[iOS] add missing QuickTime plug-in replacement API
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29
30 #import "MediaPlayerPrivateAVFoundationObjC.h"
31
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "ExceptionCodePlaceholder.h"
38 #import "FloatConversion.h"
39 #import "FloatConversion.h"
40 #import "FrameView.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandTextTrackPrivateAVFObjC.h"
44 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
45 #import "OutOfBandTextTrackPrivateAVF.h"
46 #import "URL.h"
47 #import "Logging.h"
48 #import "PlatformTimeRanges.h"
49 #import "SecurityOrigin.h"
50 #import "SoftLinking.h"
51 #import "TextTrackRepresentation.h"
52 #import "UUID.h"
53 #import "VideoTrackPrivateAVFObjC.h"
54 #import "WebCoreAVFResourceLoader.h"
55 #import "WebCoreSystemInterface.h"
56 #import <objc/runtime.h>
57 #import <runtime/DataView.h>
58 #import <runtime/JSCInlines.h>
59 #import <runtime/TypedArrayInlines.h>
60 #import <runtime/Uint16Array.h>
61 #import <runtime/Uint32Array.h>
62 #import <runtime/Uint8Array.h>
63 #import <wtf/CurrentTime.h>
64 #import <wtf/Functional.h>
65 #import <wtf/text/CString.h>
66 #import <wtf/text/StringBuilder.h>
67
68 #if ENABLE(AVF_CAPTIONS)
69 #include "TextTrack.h"
70 #endif
71
72 #import <AVFoundation/AVFoundation.h>
73 #if PLATFORM(IOS)
74 #import <CoreImage/CoreImage.h>
75 #else
76 #import <QuartzCore/CoreImage.h>
77 #endif
78 #import <CoreMedia/CoreMedia.h>
79
80 #if USE(VIDEOTOOLBOX)
81 #import <CoreVideo/CoreVideo.h>
82 #import <VideoToolbox/VideoToolbox.h>
83 #endif
84
85 #if ENABLE(AVF_CAPTIONS)
86 // Note: This must be defined before our SOFT_LINK macros:
87 @class AVMediaSelectionOption;
88 @interface AVMediaSelectionOption (OutOfBandExtensions)
89 @property (nonatomic, readonly) NSString* outOfBandSource;
90 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
91 @end
92 #endif
93
94 #if PLATFORM(IOS)
95 @class AVPlayerItem;
96 @interface AVPlayerItem (WebKitExtensions)
97 @property (nonatomic, copy) NSString* dataYouTubeID;
98 @end
99 #endif
100
101 typedef AVMetadataItem AVMetadataItemType;
102
103 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
104 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
105 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
106 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
107
108 #if USE(VIDEOTOOLBOX)
109 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
110 #endif
111
112 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
113 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
114 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
115 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
116
117 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
118 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
119 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
120 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
121 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
122 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
123 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
124 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
125
126 #if USE(VIDEOTOOLBOX)
127 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
128 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
129 #endif
130
131 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
132 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
133 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
134 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
135 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
136 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
137 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
138
139 SOFT_LINK_CLASS(CoreImage, CIContext)
140 SOFT_LINK_CLASS(CoreImage, CIImage)
141
142 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
143 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
144 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
145 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
146 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
147 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
148 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
149 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
150 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
151 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
152 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
153 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
154 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
155
156 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
157
158 #define AVPlayer getAVPlayerClass()
159 #define AVPlayerItem getAVPlayerItemClass()
160 #define AVPlayerLayer getAVPlayerLayerClass()
161 #define AVURLAsset getAVURLAssetClass()
162 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
163 #define AVMetadataItem getAVMetadataItemClass()
164
165 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
166 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
167 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
168 #define AVMediaTypeVideo getAVMediaTypeVideo()
169 #define AVMediaTypeAudio getAVMediaTypeAudio()
170 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
171 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
172 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
173 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
174 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
175 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
176 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
177 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
178
179 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
180 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
181 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
182
183 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
184 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
185 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
186
187 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
191
192 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
193 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
194 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
195 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
196 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
197 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
198 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
199 #endif
200
201 #if ENABLE(AVF_CAPTIONS)
202 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
203 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
204 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
205 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
206 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
207 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
208 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
209 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
210 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
211
212 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
213 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
214 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
215 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
216 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
217 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
218 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
219 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
220 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
221 #endif
222
223 #define kCMTimeZero getkCMTimeZero()
224
225 using namespace WebCore;
226
227 enum MediaPlayerAVFoundationObservationContext {
228     MediaPlayerAVFoundationObservationContextPlayerItem,
229     MediaPlayerAVFoundationObservationContextPlayer
230 };
231
232 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
233 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
234 #else
235 @interface WebCoreAVFMovieObserver : NSObject
236 #endif
237 {
238     MediaPlayerPrivateAVFoundationObjC* m_callback;
239     int m_delayCallbacks;
240 }
241 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
242 -(void)disconnect;
243 -(void)metadataLoaded;
244 -(void)didEnd:(NSNotification *)notification;
245 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
246 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
247 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
248 - (void)outputSequenceWasFlushed:(id)output;
249 #endif
250 @end
251
252 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
253 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
254     MediaPlayerPrivateAVFoundationObjC* m_callback;
255 }
256 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
257 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
258 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
259 @end
260 #endif
261
262 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
263 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
264     MediaPlayerPrivateAVFoundationObjC *m_callback;
265     dispatch_semaphore_t m_semaphore;
266 }
267 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
268 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
269 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
270 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
271 @end
272 #endif
273
274 namespace WebCore {
275
276 static NSArray *assetMetadataKeyNames();
277 static NSArray *itemKVOProperties();
278
279 #if !LOG_DISABLED
280 static const char *boolString(bool val)
281 {
282     return val ? "true" : "false";
283 }
284 #endif
285
286 #if ENABLE(ENCRYPTED_MEDIA_V2)
287 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
288 static PlayerToPrivateMapType& playerToPrivateMap()
289 {
290     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
291     return map;
292 };
293 #endif
294
295 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
296 static dispatch_queue_t globalLoaderDelegateQueue()
297 {
298     static dispatch_queue_t globalQueue;
299     static dispatch_once_t onceToken;
300     dispatch_once(&onceToken, ^{
301         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
302     });
303     return globalQueue;
304 }
305 #endif
306
307 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
308 static dispatch_queue_t globalPullDelegateQueue()
309 {
310     static dispatch_queue_t globalQueue;
311     static dispatch_once_t onceToken;
312     dispatch_once(&onceToken, ^{
313         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
314     });
315     return globalQueue;
316 }
317 #endif
318
319 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
320
321     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
322 }
323
324 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
325 {
326     if (isAvailable())
327         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
328 }
329
330 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
331     : MediaPlayerPrivateAVFoundation(player)
332     , m_weakPtrFactory(this)
333 #if PLATFORM(IOS)
334     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
335 #endif
336     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
337     , m_videoFrameHasDrawn(false)
338     , m_haveCheckedPlayability(false)
339 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
340     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
341     , m_videoOutputSemaphore(nullptr)
342 #endif
343 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
344     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
345 #endif
346     , m_currentTrack(0)
347     , m_cachedDuration(MediaPlayer::invalidTime())
348     , m_cachedRate(0)
349     , m_pendingStatusChanges(0)
350     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
351     , m_cachedLikelyToKeepUp(false)
352     , m_cachedBufferEmpty(false)
353     , m_cachedBufferFull(false)
354     , m_cachedHasEnabledAudio(false)
355 #if ENABLE(IOS_AIRPLAY)
356     , m_allowsWirelessVideoPlayback(true)
357 #endif
358 {
359 #if ENABLE(ENCRYPTED_MEDIA_V2)
360     playerToPrivateMap().set(player, this);
361 #endif
362 }
363
364 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
365 {
366 #if ENABLE(ENCRYPTED_MEDIA_V2)
367     playerToPrivateMap().remove(player());
368 #endif
369 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
370     [m_loaderDelegate.get() setCallback:0];
371     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
372 #endif
373 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
374     [m_videoOutputDelegate setCallback:0];
375     [m_videoOutput setDelegate:nil queue:0];
376     if (m_videoOutputSemaphore)
377         dispatch_release(m_videoOutputSemaphore);
378 #endif
379     cancelLoad();
380 }
381
382 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
383 {
384     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
385     tearDownVideoRendering();
386
387     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
388     [m_objcObserver.get() disconnect];
389
390     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
391     setIgnoreLoadStateChanges(true);
392     if (m_avAsset) {
393         [m_avAsset.get() cancelLoading];
394         m_avAsset = nil;
395     }
396
397     clearTextTracks();
398
399 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
400     if (m_legibleOutput) {
401         if (m_avPlayerItem)
402             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
403         m_legibleOutput = nil;
404     }
405 #endif
406
407     if (m_avPlayerItem) {
408         for (NSString *keyName in itemKVOProperties())
409             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
410         
411         m_avPlayerItem = nil;
412     }
413     if (m_avPlayer) {
414         if (m_timeObserver)
415             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
416         m_timeObserver = nil;
417         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
418 #if ENABLE(IOS_AIRPLAY)
419         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
420 #endif
421         m_avPlayer = nil;
422     }
423
424     // Reset cached properties
425     m_pendingStatusChanges = 0;
426     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
427     m_cachedSeekableRanges = nullptr;
428     m_cachedLoadedRanges = nullptr;
429     m_cachedTracks = nullptr;
430     m_cachedHasEnabledAudio = false;
431     m_cachedPresentationSize = FloatSize();
432     m_cachedDuration = 0;
433
434     setIgnoreLoadStateChanges(false);
435 }
436
437 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
438 {
439     return m_videoLayer;
440 }
441
442 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
443 {
444 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
445     if (m_videoOutput)
446         return true;
447 #endif
448     return m_imageGenerator;
449 }
450
451 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
452 {
453 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
454     createVideoOutput();
455 #else
456     createImageGenerator();
457 #endif
458 }
459
460 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
461 {
462     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
463
464     if (!m_avAsset || m_imageGenerator)
465         return;
466
467     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
468
469     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
470     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
471     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
472     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
473
474     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
475 }
476
477 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
478 {
479 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
480     destroyVideoOutput();
481 #endif
482     destroyImageGenerator();
483 }
484
485 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
486 {
487     if (!m_imageGenerator)
488         return;
489
490     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
491
492     m_imageGenerator = 0;
493 }
494
495 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
496 {
497     if (!m_avPlayer || m_videoLayer)
498         return;
499
500     auto weakThis = createWeakPtr();
501     callOnMainThread([this, weakThis] {
502         if (!weakThis)
503             return;
504
505         if (!m_avPlayer || m_videoLayer)
506             return;
507
508         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
509         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
510         [m_videoLayer.get() setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
511 #ifndef NDEBUG
512         [m_videoLayer.get() setName:@"Video layer"];
513 #endif
514         updateVideoLayerGravity();
515         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
516
517 #if PLATFORM(IOS)
518         if (m_videoFullscreenLayer) {
519             [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
520             [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
521         }
522 #endif
523         player()->mediaPlayerClient()->mediaPlayerRenderingModeChanged(player());
524     });
525 }
526
527 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
528 {
529     if (!m_videoLayer)
530         return;
531
532     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
533
534     [m_videoLayer.get() setPlayer:nil];
535
536     m_videoLayer = 0;
537 }
538
539 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
540 {
541     if (currentRenderingMode() == MediaRenderingToLayer)
542         return m_videoLayer && [m_videoLayer.get() isReadyForDisplay];
543
544     return m_videoFrameHasDrawn;
545 }
546
547 #if ENABLE(AVF_CAPTIONS)
548 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
549 {
550     // FIXME: Match these to correct types:
551     if (kind == PlatformTextTrack::Caption)
552         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
553
554     if (kind == PlatformTextTrack::Subtitle)
555         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
556
557     if (kind == PlatformTextTrack::Description)
558         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
559
560     if (kind == PlatformTextTrack::Forced)
561         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
562
563     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
564 }
565     
566 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
567 {
568     trackModeChanged();
569 }
570     
571 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
572 {
573     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
574     
575     for (auto& textTrack : m_textTracks) {
576         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
577             continue;
578         
579         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
580         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
581         
582         for (auto& track : outOfBandTrackSources) {
583             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
584             
585             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
586                 continue;
587             
588             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
589             if (track->mode() == PlatformTextTrack::Hidden)
590                 mode = InbandTextTrackPrivate::Hidden;
591             else if (track->mode() == PlatformTextTrack::Disabled)
592                 mode = InbandTextTrackPrivate::Disabled;
593             else if (track->mode() == PlatformTextTrack::Showing)
594                 mode = InbandTextTrackPrivate::Showing;
595             
596             textTrack->setMode(mode);
597             break;
598         }
599     }
600 }
601 #endif
602
603 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
604 {
605     if (m_avAsset)
606         return;
607
608     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
609
610     setDelayCallbacks(true);
611
612     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
613
614     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
615
616     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
617
618     String referrer = player()->referrer();
619     if (!referrer.isEmpty())
620         [headerFields.get() setObject:referrer forKey:@"Referer"];
621
622     String userAgent = player()->userAgent();
623     if (!userAgent.isEmpty())
624         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
625
626     if ([headerFields.get() count])
627         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
628
629     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
630         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
631
632 #if ENABLE(AVF_CAPTIONS)
633     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
634     if (!outOfBandTrackSources.isEmpty()) {
635         NSMutableArray* outOfBandTracks = [[NSMutableArray alloc] init];
636         for (auto& trackSource : outOfBandTrackSources) {
637             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
638             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
639             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
640             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
641             [outOfBandTracks addObject:
642                 [NSDictionary dictionaryWithObjectsAndKeys:
643                     reinterpret_cast<const NSString*>(label.get()), AVOutOfBandAlternateTrackDisplayNameKey,
644                     reinterpret_cast<const NSString*>(language.get()), AVOutOfBandAlternateTrackExtendedLanguageTagKey,
645                     [NSNumber numberWithBool: (trackSource->isDefault() ? YES : NO)], AVOutOfBandAlternateTrackIsDefaultKey,
646                     reinterpret_cast<const NSString*>(uniqueID.get()), AVOutOfBandAlternateTrackIdentifierKey,
647                     reinterpret_cast<const NSString*>(url.get()), AVOutOfBandAlternateTrackSourceKey,
648                     mediaDescriptionForKind(trackSource->kind()), AVOutOfBandAlternateTrackMediaCharactersticsKey,
649                     nil]];
650         }
651
652         [options.get() setObject: outOfBandTracks forKey: AVURLAssetOutOfBandAlternateTracksKey];
653     }
654 #endif
655     
656     NSURL *cocoaURL = URL(ParsedURLString, url);
657     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
658
659 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
660     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
661 #endif
662
663     m_haveCheckedPlayability = false;
664
665     setDelayCallbacks(false);
666 }
667
668 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
669 {
670     if (m_avPlayer)
671         return;
672
673     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
674
675     setDelayCallbacks(true);
676
677     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
678     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
679 #if ENABLE(IOS_AIRPLAY)
680     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
681 #endif
682
683 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
684     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
685 #endif
686
687 #if ENABLE(IOS_AIRPLAY)
688     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
689 #endif
690
691     if (m_avPlayerItem)
692         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
693
694     setDelayCallbacks(false);
695 }
696
697 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
698 {
699     if (m_avPlayerItem)
700         return;
701
702     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
703
704     setDelayCallbacks(true);
705
706     // Create the player item so we can load media data. 
707     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
708
709     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
710
711     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
712     for (NSString *keyName in itemKVOProperties())
713         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
714
715     if (m_avPlayer)
716         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
717
718 #if PLATFORM(IOS)
719     AtomicString value;
720     if (player()->doesHaveAttribute("data-youtube-id", &value))
721         [m_avPlayerItem.get() setDataYouTubeID: value];
722  #endif
723
724 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
725     const NSTimeInterval legibleOutputAdvanceInterval = 2;
726
727     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
728     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
729
730     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
731     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
732     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
733     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
734 #endif
735
736     setDelayCallbacks(false);
737 }
738
739 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
740 {
741     if (m_haveCheckedPlayability)
742         return;
743     m_haveCheckedPlayability = true;
744
745     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
746     auto weakThis = createWeakPtr();
747
748     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
749         callOnMainThread([weakThis] {
750             if (weakThis)
751                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
752         });
753     }];
754 }
755
756 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
757 {
758     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
759     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
760         [m_objcObserver.get() metadataLoaded];
761     }];
762 }
763
764 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
765 {
766     if (!m_avPlayerItem)
767         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
768
769     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
770         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
771     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
772         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
773     if (m_cachedLikelyToKeepUp)
774         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
775     if (m_cachedBufferFull)
776         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
777     if (m_cachedBufferEmpty)
778         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
779
780     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
781 }
782
783 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
784 {
785     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
786     PlatformMedia pm;
787     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
788     pm.media.avfMediaPlayer = m_avPlayer.get();
789     return pm;
790 }
791
792 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
793 {
794     return m_videoLayer.get();
795 }
796
797 #if PLATFORM(IOS)
798 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
799 {
800     if (m_videoFullscreenLayer == videoFullscreenLayer)
801         return;
802
803     if (m_videoFullscreenLayer)
804        [m_videoLayer removeFromSuperlayer];
805
806     m_videoFullscreenLayer = videoFullscreenLayer;
807
808     CGRect frame = CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
809
810     if (m_videoFullscreenLayer && m_videoLayer) {
811         [m_videoLayer setFrame:frame];
812         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
813     }
814
815     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
816         CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : frame;
817         [m_textTrackRepresentationLayer setFrame:textFrame];
818         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
819     }
820 }
821
822 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
823 {
824     m_videoFullscreenFrame = frame;
825     if (!m_videoFullscreenLayer)
826         return;
827
828     if (m_videoLayer)
829         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
830
831     if (m_textTrackRepresentationLayer) {
832         CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : static_cast<CGRect>(frame);
833         [m_textTrackRepresentationLayer setFrame:textFrame];
834     }
835 }
836
837 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
838 {
839     m_videoFullscreenGravity = gravity;
840     if (!m_videoLayer)
841         return;
842
843     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
844     if (gravity == MediaPlayer::VideoGravityResize)
845         videoGravity = AVLayerVideoGravityResize;
846     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
847         videoGravity = AVLayerVideoGravityResizeAspect;
848     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
849         videoGravity = AVLayerVideoGravityResizeAspectFill;
850     else
851         ASSERT_NOT_REACHED();
852
853     [m_videoLayer setVideoGravity:videoGravity];
854 }
855
856 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
857 {
858     if (m_currentMetaData)
859         return m_currentMetaData.get();
860     return nil;
861 }
862
863 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
864 {
865     if (!m_avPlayerItem)
866         return emptyString();
867     
868     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
869     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
870
871     return logString.get();
872 }
873
874 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
875 {
876     if (!m_avPlayerItem)
877         return emptyString();
878
879     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
880     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
881
882     return logString.get();
883 }
884 #endif
885
886 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
887 {
888     [CATransaction begin];
889     [CATransaction setDisableActions:YES];    
890     if (m_videoLayer)
891         [m_videoLayer.get() setHidden:!isVisible];
892     [CATransaction commit];
893 }
894     
895 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
896 {
897     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
898     if (!metaDataAvailable())
899         return;
900
901     setDelayCallbacks(true);
902     m_cachedRate = requestedRate();
903     [m_avPlayer.get() setRate:requestedRate()];
904     setDelayCallbacks(false);
905 }
906
907 void MediaPlayerPrivateAVFoundationObjC::platformPause()
908 {
909     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
910     if (!metaDataAvailable())
911         return;
912
913     setDelayCallbacks(true);
914     m_cachedRate = 0;
915     [m_avPlayer.get() setRate:nil];
916     setDelayCallbacks(false);
917 }
918
919 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
920 {
921     // Do not ask the asset for duration before it has been loaded or it will fetch the
922     // answer synchronously.
923     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
924          return MediaPlayer::invalidTime();
925     
926     CMTime cmDuration;
927     
928     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
929     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
930         cmDuration = [m_avPlayerItem.get() duration];
931     else
932         cmDuration= [m_avAsset.get() duration];
933
934     if (CMTIME_IS_NUMERIC(cmDuration))
935         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
936
937     if (CMTIME_IS_INDEFINITE(cmDuration)) {
938         return std::numeric_limits<float>::infinity();
939     }
940
941     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
942     return MediaPlayer::invalidTime();
943 }
944
945 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
946 {
947     if (!metaDataAvailable() || !m_avPlayerItem)
948         return 0;
949
950     CMTime itemTime = [m_avPlayerItem.get() currentTime];
951     if (CMTIME_IS_NUMERIC(itemTime))
952         return std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
953
954     return 0;
955 }
956
957 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
958 {
959     // setCurrentTime generates several event callbacks, update afterwards.
960     setDelayCallbacks(true);
961
962     CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
963     CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
964     CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
965
966     auto weakThis = createWeakPtr();
967
968     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
969         callOnMainThread([weakThis, finished] {
970             auto _this = weakThis.get();
971             if (!_this)
972                 return;
973
974             _this->seekCompleted(finished);
975         });
976     }];
977
978     setDelayCallbacks(false);
979 }
980
981 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
982 {
983     if (!metaDataAvailable())
984         return;
985
986     [m_avPlayer.get() setVolume:volume];
987 }
988
989 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
990 {
991     UNUSED_PARAM(closedCaptionsVisible);
992
993     if (!metaDataAvailable())
994         return;
995
996     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
997 }
998
999 void MediaPlayerPrivateAVFoundationObjC::updateRate()
1000 {
1001     setDelayCallbacks(true);
1002     m_cachedRate = requestedRate();
1003     [m_avPlayer.get() setRate:requestedRate()];
1004     setDelayCallbacks(false);
1005 }
1006
1007 float MediaPlayerPrivateAVFoundationObjC::rate() const
1008 {
1009     if (!metaDataAvailable())
1010         return 0;
1011
1012     return m_cachedRate;
1013 }
1014
1015 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1016 {
1017     auto timeRanges = PlatformTimeRanges::create();
1018
1019     if (!m_avPlayerItem)
1020         return timeRanges;
1021
1022     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1023         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1024         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
1025             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
1026             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
1027             timeRanges->add(rangeStart, rangeEnd);
1028         }
1029     }
1030     return timeRanges;
1031 }
1032
1033 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1034 {
1035     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1036         return 0;
1037
1038     double minTimeSeekable = std::numeric_limits<double>::infinity();
1039     bool hasValidRange = false;
1040     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1041         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1042         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1043             continue;
1044
1045         hasValidRange = true;
1046         double startOfRange = CMTimeGetSeconds(timeRange.start);
1047         if (minTimeSeekable > startOfRange)
1048             minTimeSeekable = startOfRange;
1049     }
1050     return hasValidRange ? minTimeSeekable : 0;
1051 }
1052
1053 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1054 {
1055     if (!m_cachedSeekableRanges)
1056         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1057
1058     double maxTimeSeekable = 0;
1059     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1060         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1061         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1062             continue;
1063         
1064         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
1065         if (maxTimeSeekable < endOfRange)
1066             maxTimeSeekable = endOfRange;
1067     }
1068     return maxTimeSeekable;
1069 }
1070
1071 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1072 {
1073 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1074     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1075     // when loadedTimeRanges changes when there is no video output. In that case
1076     // update the cached value explicitly.
1077     if (!hasLayerRenderer() && !hasContextRenderer())
1078         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1079 #endif
1080
1081     if (!m_cachedLoadedRanges)
1082         return 0;
1083
1084     float maxTimeLoaded = 0;
1085     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1086         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1087         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1088             continue;
1089         
1090         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
1091         if (maxTimeLoaded < endOfRange)
1092             maxTimeLoaded = endOfRange;
1093     }
1094
1095     return maxTimeLoaded;   
1096 }
1097
1098 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1099 {
1100     if (!metaDataAvailable())
1101         return 0;
1102
1103     long long totalMediaSize = 0;
1104     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1105         totalMediaSize += [[thisTrack assetTrack] totalSampleDataLength];
1106
1107     return totalMediaSize;
1108 }
1109
1110 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1111 {
1112     m_avAsset = asset;
1113 }
1114
1115 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1116 {
1117     if (!m_avAsset)
1118         return MediaPlayerAVAssetStatusDoesNotExist;
1119
1120     for (NSString *keyName in assetMetadataKeyNames()) {
1121         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
1122
1123         if (keyStatus < AVKeyValueStatusLoaded)
1124             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1125         
1126         if (keyStatus == AVKeyValueStatusFailed)
1127             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1128
1129         if (keyStatus == AVKeyValueStatusCancelled)
1130             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1131     }
1132
1133     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1134         return MediaPlayerAVAssetStatusPlayable;
1135
1136     return MediaPlayerAVAssetStatusLoaded;
1137 }
1138
1139 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1140 {
1141     if (!metaDataAvailable() || context->paintingDisabled())
1142         return;
1143
1144     setDelayCallbacks(true);
1145     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1146
1147 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1148     if (videoOutputHasAvailableFrame())
1149         paintWithVideoOutput(context, rect);
1150     else
1151 #endif
1152         paintWithImageGenerator(context, rect);
1153
1154     END_BLOCK_OBJC_EXCEPTIONS;
1155     setDelayCallbacks(false);
1156
1157     m_videoFrameHasDrawn = true;
1158 }
1159
1160 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1161 {
1162     if (!metaDataAvailable() || context->paintingDisabled())
1163         return;
1164
1165     // We can ignore the request if we are already rendering to a layer.
1166     if (currentRenderingMode() == MediaRenderingToLayer)
1167         return;
1168
1169     paintCurrentFrameInContext(context, rect);
1170 }
1171
1172 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1173 {
1174     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1175     if (image) {
1176         GraphicsContextStateSaver stateSaver(*context);
1177         context->translate(rect.x(), rect.y() + rect.height());
1178         context->scale(FloatSize(1.0f, -1.0f));
1179         context->setImageInterpolationQuality(InterpolationLow);
1180         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1181         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1182         image = 0;
1183     }
1184 }
1185
1186 static HashSet<String> mimeTypeCache()
1187 {
1188     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1189     static bool typeListInitialized = false;
1190
1191     if (typeListInitialized)
1192         return cache;
1193     typeListInitialized = true;
1194
1195     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1196     for (NSString *mimeType in types)
1197         cache.add(mimeType);
1198
1199     return cache;
1200
1201
1202 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1203 {
1204     if (!m_imageGenerator)
1205         createImageGenerator();
1206     ASSERT(m_imageGenerator);
1207
1208 #if !LOG_DISABLED
1209     double start = monotonicallyIncreasingTime();
1210 #endif
1211
1212     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1213     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1214     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1215
1216 #if !LOG_DISABLED
1217     double duration = monotonicallyIncreasingTime() - start;
1218     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1219 #endif
1220
1221     return image;
1222 }
1223
1224 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1225 {
1226     supportedTypes = mimeTypeCache();
1227
1228
1229 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1230 static bool keySystemIsSupported(const String& keySystem)
1231 {
1232     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1233         return true;
1234     return false;
1235 }
1236 #endif
1237
1238 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1239 {
1240 #if ENABLE(ENCRYPTED_MEDIA)
1241     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1242     // In addition to the steps in the current specification, this method must run the following steps:
1243
1244     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1245     //    If keySystem is null, continue to the next step.
1246     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1247         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1248         if (!keySystemIsSupported(parameters.keySystem))
1249             return MediaPlayer::IsNotSupported;
1250
1251         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1252         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1253     }
1254
1255     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1256 #endif
1257
1258 #if ENABLE(MEDIA_SOURCE)
1259     if (parameters.isMediaSource)
1260         return MediaPlayer::IsNotSupported;
1261 #endif
1262
1263     if (!mimeTypeCache().contains(parameters.type))
1264         return MediaPlayer::IsNotSupported;
1265
1266     // The spec says:
1267     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1268     if (parameters.codecs.isEmpty())
1269         return MediaPlayer::MayBeSupported;
1270
1271     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1272     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1273 }
1274
1275 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1276 {
1277 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1278     if (!keySystem.isEmpty()) {
1279         if (!keySystemIsSupported(keySystem))
1280             return false;
1281
1282         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1283             return false;
1284
1285         return true;
1286     }
1287 #else
1288     UNUSED_PARAM(keySystem);
1289     UNUSED_PARAM(mimeType);
1290 #endif
1291     return false;
1292 }
1293
1294 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1295 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1296 {
1297     String scheme = [[[avRequest request] URL] scheme];
1298     String keyURI = [[[avRequest request] URL] absoluteString];
1299
1300 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1301     if (scheme == "skd") {
1302         // Create an initData with the following layout:
1303         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1304         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1305         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1306         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1307         initDataView->set<uint32_t>(0, keyURISize, true);
1308
1309         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1310         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1311
1312 #if ENABLE(ENCRYPTED_MEDIA)
1313         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1314 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1315         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1316         if (!player()->keyNeeded(initData.get()))
1317 #endif
1318             return false;
1319
1320         m_keyURIToRequestMap.set(keyURI, avRequest);
1321         return true;
1322     }
1323 #endif
1324
1325     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1326     m_resourceLoaderMap.add(avRequest, resourceLoader);
1327     resourceLoader->startLoading();
1328     return true;
1329 }
1330
1331 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1332 {
1333 #if USE(CFNETWORK)
1334     UNUSED_PARAM(nsChallenge);
1335     // FIXME: <rdar://problem/15799844>
1336     return false;
1337 #else
1338     AuthenticationChallenge challenge(nsChallenge);
1339
1340     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1341 #endif
1342 }
1343
1344 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1345 {
1346     String scheme = [[[avRequest request] URL] scheme];
1347
1348     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1349
1350     if (resourceLoader)
1351         resourceLoader->stopLoading();
1352 }
1353
1354 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1355 {
1356     m_resourceLoaderMap.remove(avRequest);
1357 }
1358 #endif
1359
1360 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1361 {
1362     return AVFoundationLibrary() && CoreMediaLibrary();
1363 }
1364
1365 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
1366 {
1367     if (!metaDataAvailable())
1368         return timeValue;
1369
1370     // FIXME - impossible to implement until rdar://8721510 is fixed.
1371     return timeValue;
1372 }
1373
1374 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1375 {
1376     if (!m_videoLayer)
1377         return;
1378
1379     [CATransaction begin];
1380     [CATransaction setDisableActions:YES];    
1381     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1382     [m_videoLayer.get() setVideoGravity:gravity];
1383     [CATransaction commit];
1384 }
1385
1386 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1387 {
1388     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1389     m_languageOfPrimaryAudioTrack = String();
1390
1391     if (!m_avAsset)
1392         return;
1393
1394     setDelayCharacteristicsChangedNotification(true);
1395
1396     bool haveCCTrack = false;
1397     bool hasCaptions = false;
1398
1399     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1400     // asked about those fairly fequently.
1401     if (!m_avPlayerItem) {
1402         // We don't have a player item yet, so check with the asset because some assets support inspection
1403         // prior to becoming ready to play.
1404         setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
1405         setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
1406 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1407         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1408 #endif
1409     } else {
1410         bool hasVideo = false;
1411         bool hasAudio = false;
1412         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1413             if ([track isEnabled]) {
1414                 AVAssetTrack *assetTrack = [track assetTrack];
1415                 if ([[assetTrack mediaType] isEqualToString:AVMediaTypeVideo])
1416                     hasVideo = true;
1417                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeAudio])
1418                     hasAudio = true;
1419                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption]) {
1420 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1421                     hasCaptions = true;
1422 #endif
1423                     haveCCTrack = true;
1424                 }
1425             }
1426         }
1427         setHasVideo(hasVideo);
1428         setHasAudio(hasAudio);
1429
1430
1431 #if ENABLE(VIDEO_TRACK)
1432         updateAudioTracks();
1433         updateVideoTracks();
1434 #endif
1435     }
1436
1437 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1438     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1439         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1440         if (hasCaptions)
1441             processMediaSelectionOptions();
1442     }
1443 #endif
1444
1445 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1446     if (!hasCaptions && haveCCTrack)
1447         processLegacyClosedCaptionsTracks();
1448 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1449     if (haveCCTrack)
1450         processLegacyClosedCaptionsTracks();
1451 #endif
1452
1453     setHasClosedCaptions(hasCaptions);
1454
1455     LOG(Media, "WebCoreAVFMovieObserver:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1456         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1457
1458     sizeChanged();
1459
1460     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1461         characteristicsChanged();
1462
1463     setDelayCharacteristicsChangedNotification(false);
1464 }
1465
1466 #if ENABLE(VIDEO_TRACK)
1467 template <typename RefT, typename PassRefT>
1468 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1469 {
1470     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1471         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1472     }]]]);
1473     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1474
1475     typedef Vector<RefT> ItemVector;
1476     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1477         [oldTracks addObject:(*i)->playerItemTrack()];
1478
1479     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1480     [removedTracks minusSet:newTracks.get()];
1481
1482     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1483     [addedTracks minusSet:oldTracks.get()];
1484
1485     ItemVector replacementItems;
1486     ItemVector addedItems;
1487     ItemVector removedItems;
1488     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1489         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1490             removedItems.append(*i);
1491         else
1492             replacementItems.append(*i);
1493     }
1494
1495     for (AVPlayerItemTrack* track in addedTracks.get())
1496         addedItems.append(itemFactory(track));
1497
1498     replacementItems.appendVector(addedItems);
1499     oldItems.swap(replacementItems);
1500
1501     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1502         (player->*removedFunction)(*i);
1503
1504     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1505         (player->*addedFunction)(*i);
1506 }
1507
1508 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1509 {
1510     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1511 }
1512
1513 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1514 {
1515     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1516 }
1517
1518 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1519 {
1520 #if PLATFORM(IOS)
1521     if (m_videoFullscreenLayer)
1522         return true;
1523 #endif
1524     return false;
1525 }
1526
1527 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1528 {
1529 #if PLATFORM(IOS)
1530     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
1531     if (representationLayer == m_textTrackRepresentationLayer)
1532         return;
1533
1534     if (m_textTrackRepresentationLayer)
1535         [m_textTrackRepresentationLayer removeFromSuperlayer];
1536
1537     m_textTrackRepresentationLayer = representationLayer;
1538
1539     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1540         CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
1541
1542         [m_textTrackRepresentationLayer setFrame:textFrame];
1543         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1544     }
1545
1546 #else
1547     UNUSED_PARAM(representation);
1548 #endif
1549 }
1550 #endif // ENABLE(VIDEO_TRACK)
1551
1552 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1553 {
1554     if (!m_avAsset)
1555         return;
1556
1557     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1558 }
1559
1560 #if PLATFORM(IOS)
1561 // FIXME: Implement for iOS in WebKit System Interface.
1562 static inline NSURL *wkAVAssetResolvedURL(AVAsset*)
1563 {
1564     return nil;
1565 }
1566 #endif
1567
1568 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1569 {
1570     if (!m_avAsset)
1571         return false;
1572     
1573     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1574     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1575     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1576 }
1577
1578 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1579 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1580 {
1581     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1582
1583     if (!m_avPlayerItem || m_videoOutput)
1584         return;
1585
1586 #if USE(VIDEOTOOLBOX)
1587     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1588 #else
1589     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
1590                                 nil];
1591 #endif
1592     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
1593     ASSERT(m_videoOutput);
1594
1595     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
1596
1597     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1598
1599     waitForVideoOutputMediaDataWillChange();
1600
1601     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1602 }
1603
1604 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1605 {
1606     if (!m_videoOutput)
1607         return;
1608
1609     if (m_avPlayerItem)
1610         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1611     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1612
1613     m_videoOutput = 0;
1614 }
1615
1616 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1617 {
1618     if (!m_videoOutput)
1619         createVideoOutput();
1620     ASSERT(m_videoOutput);
1621
1622 #if !LOG_DISABLED
1623     double start = monotonicallyIncreasingTime();
1624 #endif
1625
1626     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1627
1628     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1629         return 0;
1630
1631     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1632     if (!buffer)
1633         return 0;
1634
1635 #if USE(VIDEOTOOLBOX)
1636     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1637     if (!m_pixelTransferSession) {
1638         VTPixelTransferSessionRef session = 0;
1639         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1640         m_pixelTransferSession = adoptCF(session);
1641     }
1642
1643     CVPixelBufferRef outputBuffer;
1644     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
1645     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1646     buffer = adoptCF(outputBuffer);
1647 #endif
1648
1649 #if !LOG_DISABLED
1650     double duration = monotonicallyIncreasingTime() - start;
1651     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1652 #endif
1653
1654     return buffer;
1655 }
1656
1657 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
1658 {
1659     if (!m_avPlayerItem)
1660         return false;
1661
1662     if (m_lastImage)
1663         return true;
1664
1665     if (!m_videoOutput)
1666         createVideoOutput();
1667
1668     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
1669 }
1670
1671 static const void* CVPixelBufferGetBytePointerCallback(void* info)
1672 {
1673     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1674     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1675     return CVPixelBufferGetBaseAddress(pixelBuffer);
1676 }
1677
1678 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
1679 {
1680     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1681     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1682 }
1683
1684 static void CVPixelBufferReleaseInfoCallback(void* info)
1685 {
1686     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1687     CFRelease(pixelBuffer);
1688 }
1689
1690 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
1691 {
1692     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
1693     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
1694
1695     size_t width = CVPixelBufferGetWidth(pixelBuffer);
1696     size_t height = CVPixelBufferGetHeight(pixelBuffer);
1697     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
1698     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
1699     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
1700
1701     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
1702     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
1703     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
1704
1705     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
1706 }
1707
1708 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
1709 {
1710     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1711
1712     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1713     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1714     // should be displayed.
1715     if (pixelBuffer)
1716         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
1717 }
1718
1719 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
1720 {
1721     updateLastImage();
1722
1723     if (m_lastImage) {
1724         GraphicsContextStateSaver stateSaver(*context);
1725
1726         IntRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
1727
1728         context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, outputRect, imageRect);
1729
1730         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
1731         // video frame, destroy it now that it is no longer needed.
1732         if (m_imageGenerator)
1733             destroyImageGenerator();
1734     }
1735 }
1736
1737 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
1738 {
1739     updateLastImage();
1740     return m_lastImage.get();
1741 }
1742
1743 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
1744 {
1745     if (!m_videoOutputSemaphore)
1746         m_videoOutputSemaphore = dispatch_semaphore_create(0);
1747
1748     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
1749
1750     // Wait for 1 second.
1751     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
1752
1753     if (result)
1754         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
1755 }
1756
1757 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
1758 {
1759     dispatch_semaphore_signal(m_videoOutputSemaphore);
1760 }
1761 #endif
1762
1763 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1764 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1765 {
1766     // initData should have the following layout:
1767     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1768     if (initData->byteLength() < 4)
1769         return false;
1770
1771     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1772
1773     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1774     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1775     uint32_t offset = 0;
1776     bool status = true;
1777
1778     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1779     offset += 4;
1780     if (!status || offset + keyURILength > initData->length())
1781         return false;
1782
1783     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1784     if (!keyURIArray)
1785         return false;
1786
1787     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1788     offset += keyURILength;
1789
1790     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1791     offset += 4;
1792     if (!status || offset + keyIDLength > initData->length())
1793         return false;
1794
1795     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1796     if (!keyIDArray)
1797         return false;
1798
1799     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1800     offset += keyIDLength;
1801
1802     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1803     offset += 4;
1804     if (!status || offset + certificateLength > initData->length())
1805         return false;
1806
1807     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1808     if (!certificate)
1809         return false;
1810
1811     return true;
1812 }
1813 #endif
1814
1815 #if ENABLE(ENCRYPTED_MEDIA)
1816 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1817 {
1818     if (!keySystemIsSupported(keySystem))
1819         return MediaPlayer::KeySystemNotSupported;
1820
1821     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1822     String keyURI;
1823     String keyID;
1824     RefPtr<Uint8Array> certificate;
1825     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1826         return MediaPlayer::InvalidPlayerState;
1827
1828     if (!m_keyURIToRequestMap.contains(keyURI))
1829         return MediaPlayer::InvalidPlayerState;
1830
1831     String sessionID = createCanonicalUUIDString();
1832
1833     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1834
1835     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1836     NSString* assetStr = keyID;
1837     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1838     NSError* error = 0;
1839     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1840
1841     if (!keyRequest) {
1842         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1843         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1844         return MediaPlayer::NoError;
1845     }
1846
1847     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1848     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1849     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1850
1851     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1852     m_sessionIDToRequestMap.set(sessionID, avRequest);
1853     m_keyURIToRequestMap.remove(keyURI);
1854
1855     return MediaPlayer::NoError;
1856 }
1857
1858 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
1859 {
1860     if (!keySystemIsSupported(keySystem))
1861         return MediaPlayer::KeySystemNotSupported;
1862
1863     if (!m_sessionIDToRequestMap.contains(sessionID))
1864         return MediaPlayer::InvalidPlayerState;
1865
1866     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
1867     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
1868     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
1869     [avRequest.get() finishLoading];
1870     m_sessionIDToRequestMap.remove(sessionID);
1871
1872     player()->keyAdded(keySystem, sessionID);
1873
1874     UNUSED_PARAM(initDataPtr);
1875     UNUSED_PARAM(initDataLength);
1876     return MediaPlayer::NoError;
1877 }
1878
1879 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
1880 {
1881     if (!keySystemIsSupported(keySystem))
1882         return MediaPlayer::KeySystemNotSupported;
1883
1884     if (!m_sessionIDToRequestMap.contains(sessionID))
1885         return MediaPlayer::InvalidPlayerState;
1886
1887     m_sessionIDToRequestMap.remove(sessionID);
1888     return MediaPlayer::NoError;
1889 }
1890 #endif
1891
1892 #if ENABLE(ENCRYPTED_MEDIA_V2)
1893 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
1894 {
1895     return m_keyURIToRequestMap.take(keyURI);
1896 }
1897
1898 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
1899 {
1900     if (!keySystemIsSupported(keySystem))
1901         return nullptr;
1902
1903     return std::make_unique<CDMSessionAVFoundationObjC>(this);
1904 }
1905 #endif
1906
1907 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1908 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
1909 {
1910 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1911     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1912 #endif
1913
1914     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1915     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
1916
1917         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
1918         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
1919             continue;
1920
1921         bool newCCTrack = true;
1922         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1923             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1924                 continue;
1925
1926             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
1927             if (track->avPlayerItemTrack() == playerItemTrack) {
1928                 removedTextTracks.remove(i - 1);
1929                 newCCTrack = false;
1930                 break;
1931             }
1932         }
1933
1934         if (!newCCTrack)
1935             continue;
1936         
1937         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
1938     }
1939
1940     processNewAndRemovedTextTracks(removedTextTracks);
1941 }
1942 #endif
1943
1944 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1945 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
1946 {
1947     if (!m_avAsset)
1948         return nil;
1949     
1950     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
1951         return nil;
1952     
1953     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1954 }
1955
1956 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
1957 {
1958     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1959     if (!legibleGroup) {
1960         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1961         return;
1962     }
1963
1964     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1965     // but set the selected legible track to nil so text tracks will not be automatically configured.
1966     if (!m_textTracks.size())
1967         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1968
1969     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1970     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
1971     for (AVMediaSelectionOptionType *option in legibleOptions) {
1972         bool newTrack = true;
1973         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1974             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1975                 continue;
1976             
1977             RetainPtr<AVMediaSelectionOptionType> currentOption;
1978 #if ENABLE(AVF_CAPTIONS)
1979             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
1980                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
1981                 currentOption = track->mediaSelectionOption();
1982             } else
1983 #endif
1984             {
1985                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
1986                 currentOption = track->mediaSelectionOption();
1987             }
1988             
1989             if ([currentOption.get() isEqual:option]) {
1990                 removedTextTracks.remove(i - 1);
1991                 newTrack = false;
1992                 break;
1993             }
1994         }
1995         if (!newTrack)
1996             continue;
1997
1998 #if ENABLE(AVF_CAPTIONS)
1999         if ([option outOfBandSource]) {
2000             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2001             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2002             continue;
2003         }
2004 #endif
2005
2006         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
2007     }
2008
2009     processNewAndRemovedTextTracks(removedTextTracks);
2010 }
2011
2012 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
2013 {
2014     if (!m_currentTrack)
2015         return;
2016
2017     m_currentTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
2018 }
2019
2020 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2021 {
2022     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2023
2024     if (!m_currentTrack)
2025         return;
2026     
2027     m_currentTrack->resetCueValues();
2028 }
2029 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2030
2031 void MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(InbandTextTrackPrivateAVF *track)
2032 {
2033     if (m_currentTrack == track)
2034         return;
2035
2036     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2037         
2038     m_currentTrack = track;
2039
2040     if (track) {
2041         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2042             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2043 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2044 #if ENABLE(AVF_CAPTIONS)
2045         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2046             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2047 #endif
2048         else
2049             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2050 #endif
2051     } else {
2052 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2053         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2054 #endif
2055         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2056     }
2057
2058 }
2059
2060 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2061 {
2062     if (!m_languageOfPrimaryAudioTrack.isNull())
2063         return m_languageOfPrimaryAudioTrack;
2064
2065     if (!m_avPlayerItem.get())
2066         return emptyString();
2067
2068 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2069     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2070     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2071     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2072     if (currentlySelectedAudibleOption) {
2073         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2074         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2075
2076         return m_languageOfPrimaryAudioTrack;
2077     }
2078 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2079
2080     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2081     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2082     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2083     if (!tracks || [tracks count] != 1) {
2084         m_languageOfPrimaryAudioTrack = emptyString();
2085         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2086         return m_languageOfPrimaryAudioTrack;
2087     }
2088
2089     AVAssetTrack *track = [tracks objectAtIndex:0];
2090     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2091
2092 #if !LOG_DISABLED
2093     if (m_languageOfPrimaryAudioTrack == emptyString())
2094         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2095     else
2096         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2097 #endif
2098
2099     return m_languageOfPrimaryAudioTrack;
2100 }
2101
2102 #if ENABLE(IOS_AIRPLAY)
2103 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2104 {
2105     if (!m_avPlayer)
2106         return false;
2107
2108     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2109     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2110     return wirelessTarget;
2111 }
2112
2113 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2114 {
2115     if (!m_avPlayer)
2116         return MediaPlayer::TargetTypeNone;
2117
2118     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2119     case wkExternalPlaybackTypeNone:
2120         return MediaPlayer::TargetTypeNone;
2121     case wkExternalPlaybackTypeAirPlay:
2122         return MediaPlayer::TargetTypeAirPlay;
2123     case wkExternalPlaybackTypeTVOut:
2124         return MediaPlayer::TargetTypeTVOut;
2125     }
2126
2127     ASSERT_NOT_REACHED();
2128     return MediaPlayer::TargetTypeNone;
2129 }
2130
2131 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2132 {
2133     if (!m_avPlayer)
2134         return emptyString();
2135     
2136     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2137     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2138
2139     return wirelessTargetName;
2140 }
2141
2142 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2143 {
2144     if (!m_avPlayer)
2145         return !m_allowsWirelessVideoPlayback;
2146     
2147     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2148     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2149
2150     return !m_allowsWirelessVideoPlayback;
2151 }
2152
2153 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2154 {
2155     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2156     m_allowsWirelessVideoPlayback = !disabled;
2157     if (!m_avPlayer)
2158         return;
2159     
2160     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2161 }
2162 #endif
2163
2164 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2165 {
2166     m_cachedItemStatus = status;
2167
2168     updateStates();
2169 }
2170
2171 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2172 {
2173     m_pendingStatusChanges++;
2174 }
2175
2176 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2177 {
2178     m_cachedLikelyToKeepUp = likelyToKeepUp;
2179
2180     ASSERT(m_pendingStatusChanges);
2181     if (!--m_pendingStatusChanges)
2182         updateStates();
2183 }
2184
2185 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2186 {
2187     m_pendingStatusChanges++;
2188 }
2189
2190 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2191 {
2192     m_cachedBufferEmpty = bufferEmpty;
2193
2194     ASSERT(m_pendingStatusChanges);
2195     if (!--m_pendingStatusChanges)
2196         updateStates();
2197 }
2198
2199 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2200 {
2201     m_pendingStatusChanges++;
2202 }
2203
2204 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2205 {
2206     m_cachedBufferFull = bufferFull;
2207
2208     ASSERT(m_pendingStatusChanges);
2209     if (!--m_pendingStatusChanges)
2210         updateStates();
2211 }
2212
2213 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2214 {
2215     m_cachedSeekableRanges = seekableRanges;
2216
2217     seekableTimeRangesChanged();
2218     updateStates();
2219 }
2220
2221 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2222 {
2223     m_cachedLoadedRanges = loadedRanges;
2224
2225     loadedTimeRangesChanged();
2226     updateStates();
2227 }
2228
2229 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata)
2230 {
2231     if (!metadata || [metadata isKindOfClass:[NSNull class]])
2232         return;
2233
2234     m_currentMetaData = metadata;
2235 }
2236
2237 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2238 {
2239     m_cachedTracks = tracks;
2240
2241     tracksChanged();
2242     updateStates();
2243 }
2244
2245 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2246 {
2247     m_cachedHasEnabledAudio = hasEnabledAudio;
2248
2249     tracksChanged();
2250     updateStates();
2251 }
2252
2253 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2254 {
2255     m_cachedPresentationSize = size;
2256
2257     sizeChanged();
2258     updateStates();
2259 }
2260
2261 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
2262 {
2263     m_cachedDuration = duration;
2264
2265     invalidateCachedDuration();
2266 }
2267
2268 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2269 {
2270     m_cachedRate = rate;
2271
2272     updateStates();
2273     rateChanged();
2274 }
2275     
2276 #if ENABLE(IOS_AIRPLAY)
2277 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2278 {
2279     playbackTargetIsWirelessChanged();
2280 }
2281 #endif
2282
2283 NSArray* assetMetadataKeyNames()
2284 {
2285     static NSArray* keys;
2286     if (!keys) {
2287         keys = [[NSArray alloc] initWithObjects:@"duration",
2288                     @"naturalSize",
2289                     @"preferredTransform",
2290                     @"preferredVolume",
2291                     @"preferredRate",
2292                     @"playable",
2293                     @"tracks",
2294                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2295                    nil];
2296     }
2297     return keys;
2298 }
2299
2300 NSArray* itemKVOProperties()
2301 {
2302     static NSArray* keys;
2303     if (!keys) {
2304         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2305                 @"status",
2306                 @"asset",
2307                 @"tracks",
2308                 @"seekableTimeRanges",
2309                 @"loadedTimeRanges",
2310                 @"playbackLikelyToKeepUp",
2311                 @"playbackBufferFull",
2312                 @"playbackBufferEmpty",
2313                 @"duration",
2314                 @"hasEnabledAudio",
2315                 @"timedMetadata",
2316                 nil];
2317     }
2318     return keys;
2319 }
2320
2321 } // namespace WebCore
2322
2323 @implementation WebCoreAVFMovieObserver
2324
2325 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2326 {
2327     self = [super init];
2328     if (!self)
2329         return nil;
2330     m_callback = callback;
2331     return self;
2332 }
2333
2334 - (void)disconnect
2335 {
2336     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2337     m_callback = 0;
2338 }
2339
2340 - (void)metadataLoaded
2341 {
2342     if (!m_callback)
2343         return;
2344
2345     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2346 }
2347
2348 - (void)didEnd:(NSNotification *)unusedNotification
2349 {
2350     UNUSED_PARAM(unusedNotification);
2351     if (!m_callback)
2352         return;
2353     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2354 }
2355
2356 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2357 {
2358     UNUSED_PARAM(object);
2359     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2360
2361     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2362
2363     if (!m_callback)
2364         return;
2365
2366     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2367
2368     WTF::Function<void ()> function;
2369
2370     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2371         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2372             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2373         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2374             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2375         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2376             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2377     }
2378
2379     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2380         // A value changed for an AVPlayerItem
2381         if ([keyPath isEqualToString:@"status"])
2382             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2383         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2384             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2385         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2386             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2387         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2388             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2389         else if ([keyPath isEqualToString:@"asset"])
2390             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2391         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2392             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2393         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2394             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2395         else if ([keyPath isEqualToString:@"tracks"])
2396             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2397         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2398             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2399         else if ([keyPath isEqualToString:@"presentationSize"])
2400             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2401         else if ([keyPath isEqualToString:@"duration"])
2402             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
2403         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue)
2404             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue));
2405     }
2406
2407     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2408         // A value changed for an AVPlayer.
2409         if ([keyPath isEqualToString:@"rate"])
2410             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2411 #if ENABLE(IOS_AIRPLAY)
2412         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2413             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2414 #endif
2415     }
2416     
2417     if (function.isNull())
2418         return;
2419
2420     auto weakThis = m_callback->createWeakPtr();
2421     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2422         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2423         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2424         if (!weakThis)
2425             return;
2426         function();
2427     }));
2428 }
2429
2430 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2431 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2432 {
2433     UNUSED_PARAM(output);
2434     UNUSED_PARAM(nativeSamples);
2435
2436     if (!m_callback)
2437         return;
2438
2439     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2440     RetainPtr<NSArray> strongStrings = strings;
2441     callOnMainThread([strongSelf, strongStrings, itemTime] {
2442         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2443         if (!callback)
2444             return;
2445         callback->processCue(strongStrings.get(), CMTimeGetSeconds(itemTime));
2446     });
2447 }
2448
2449 - (void)outputSequenceWasFlushed:(id)output
2450 {
2451     UNUSED_PARAM(output);
2452
2453     if (!m_callback)
2454         return;
2455     
2456     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2457     callOnMainThread([strongSelf] {
2458         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
2459             callback->flushCues();
2460     });
2461 }
2462 #endif
2463
2464 @end
2465
2466 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
2467 @implementation WebCoreAVFLoaderDelegate
2468
2469 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2470 {
2471     self = [super init];
2472     if (!self)
2473         return nil;
2474     m_callback = callback;
2475     return self;
2476 }
2477
2478 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
2479 {
2480     UNUSED_PARAM(resourceLoader);
2481     if (!m_callback)
2482         return NO;
2483
2484     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2485     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2486     callOnMainThread([strongSelf, strongRequest] {
2487         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2488         if (!callback) {
2489             [strongRequest finishLoadingWithError:nil];
2490             return;
2491         }
2492
2493         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
2494             [strongRequest finishLoadingWithError:nil];
2495     });
2496
2497     return YES;
2498 }
2499
2500 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
2501 {
2502     UNUSED_PARAM(resourceLoader);
2503     if (!m_callback)
2504         return NO;
2505
2506     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
2507         return NO;
2508
2509     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2510     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
2511     callOnMainThread([strongSelf, strongChallenge] {
2512         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2513         if (!callback) {
2514             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2515             return;
2516         }
2517
2518         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
2519             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2520     });
2521
2522     return YES;
2523 }
2524
2525 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
2526 {
2527     UNUSED_PARAM(resourceLoader);
2528     if (!m_callback)
2529         return;
2530
2531     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2532     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2533     callOnMainThread([strongSelf, strongRequest] {
2534         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2535         if (callback)
2536             callback->didCancelLoadingRequest(strongRequest.get());
2537     });
2538 }
2539
2540 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2541 {
2542     m_callback = callback;
2543 }
2544 @end
2545 #endif
2546
2547 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2548 @implementation WebCoreAVFPullDelegate
2549 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2550 {
2551     self = [super init];
2552     if (self)
2553         m_callback = callback;
2554     return self;
2555 }
2556
2557 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2558 {
2559     m_callback = callback;
2560 }
2561
2562 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
2563 {
2564     if (m_callback)
2565         m_callback->outputMediaDataWillChange(output);
2566 }
2567
2568 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
2569 {
2570     UNUSED_PARAM(output);
2571     // No-op.
2572 }
2573 @end
2574 #endif
2575
2576 #endif