a02b2f758db3ba025695b49f035f6a72346634cd
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29
30 #import "MediaPlayerPrivateAVFoundationObjC.h"
31
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "ExceptionCodePlaceholder.h"
38 #import "FloatConversion.h"
39 #import "FloatConversion.h"
40 #import "FrameView.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandTextTrackPrivateAVFObjC.h"
44 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
45 #import "OutOfBandTextTrackPrivateAVF.h"
46 #import "URL.h"
47 #import "Logging.h"
48 #import "PlatformTimeRanges.h"
49 #import "SecurityOrigin.h"
50 #import "SoftLinking.h"
51 #import "UUID.h"
52 #import "VideoTrackPrivateAVFObjC.h"
53 #import "WebCoreAVFResourceLoader.h"
54 #import "WebCoreSystemInterface.h"
55 #import <objc/runtime.h>
56 #import <runtime/DataView.h>
57 #import <runtime/JSCInlines.h>
58 #import <runtime/TypedArrayInlines.h>
59 #import <runtime/Uint16Array.h>
60 #import <runtime/Uint32Array.h>
61 #import <runtime/Uint8Array.h>
62 #import <wtf/CurrentTime.h>
63 #import <wtf/Functional.h>
64 #import <wtf/text/CString.h>
65 #import <wtf/text/StringBuilder.h>
66
67 #if ENABLE(AVF_CAPTIONS)
68 #include "TextTrack.h"
69 #endif
70
71 #import <AVFoundation/AVFoundation.h>
72 #if PLATFORM(IOS)
73 #import <CoreImage/CoreImage.h>
74 #else
75 #import <QuartzCore/CoreImage.h>
76 #endif
77 #import <CoreMedia/CoreMedia.h>
78
79 #if USE(VIDEOTOOLBOX)
80 #import <CoreVideo/CoreVideo.h>
81 #import <VideoToolbox/VideoToolbox.h>
82 #endif
83
84 #if ENABLE(AVF_CAPTIONS)
85 // Note: This must be defined before our SOFT_LINK macros:
86 @class AVMediaSelectionOption;
87 @interface AVMediaSelectionOption (OutOfBandExtensions)
88 @property (nonatomic, readonly) NSString* outOfBandSource;
89 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
90 @end
91 #endif
92
93 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
94 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
95 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
96 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
97
98 #if USE(VIDEOTOOLBOX)
99 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
100 #endif
101
102 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
103 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
104 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
105 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
106
107 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
108 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
109 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
110 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
111 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
112 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
113 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
114 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
115
116 #if USE(VIDEOTOOLBOX)
117 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
118 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
119 #endif
120
121 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
122 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
123 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
124 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
125 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
126 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
127 SOFT_LINK_CLASS(CoreImage, CIContext)
128 SOFT_LINK_CLASS(CoreImage, CIImage)
129
130 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
131 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
132 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
133 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
134 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
135 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
136 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
137 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
138 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
139 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
140 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
141 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
142 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
143
144 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
145
146 #define AVPlayer getAVPlayerClass()
147 #define AVPlayerItem getAVPlayerItemClass()
148 #define AVPlayerLayer getAVPlayerLayerClass()
149 #define AVURLAsset getAVURLAssetClass()
150 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
151
152 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
153 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
154 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
155 #define AVMediaTypeVideo getAVMediaTypeVideo()
156 #define AVMediaTypeAudio getAVMediaTypeAudio()
157 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
158 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
159 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
160 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
161 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
162 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
163 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
164 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
165
166 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
167 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
168 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
169
170 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
171 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
172 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
173
174 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
178
179 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
180 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
181 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
182 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
183 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
184 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
185 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
186 #endif
187
188 #if ENABLE(AVF_CAPTIONS)
189 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
190 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
191 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
192 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
193 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
194 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
195 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
196 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
197 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
198
199 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
200 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
201 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
202 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
203 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
204 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
205 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
206 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
207 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
208 #endif
209
210 #define kCMTimeZero getkCMTimeZero()
211
212 using namespace WebCore;
213
214 enum MediaPlayerAVFoundationObservationContext {
215     MediaPlayerAVFoundationObservationContextPlayerItem,
216     MediaPlayerAVFoundationObservationContextPlayer
217 };
218
219 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
220 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
221 #else
222 @interface WebCoreAVFMovieObserver : NSObject
223 #endif
224 {
225     MediaPlayerPrivateAVFoundationObjC* m_callback;
226     int m_delayCallbacks;
227 }
228 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
229 -(void)disconnect;
230 -(void)metadataLoaded;
231 -(void)didEnd:(NSNotification *)notification;
232 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
233 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
234 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
235 - (void)outputSequenceWasFlushed:(id)output;
236 #endif
237 @end
238
239 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
240 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
241     MediaPlayerPrivateAVFoundationObjC* m_callback;
242 }
243 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
244 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
245 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
246 @end
247 #endif
248
249 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
250 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
251     MediaPlayerPrivateAVFoundationObjC *m_callback;
252     dispatch_semaphore_t m_semaphore;
253 }
254 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
255 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
256 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
257 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
258 @end
259 #endif
260
261 namespace WebCore {
262
263 static NSArray *assetMetadataKeyNames();
264 static NSArray *itemKVOProperties();
265
266 #if !LOG_DISABLED
267 static const char *boolString(bool val)
268 {
269     return val ? "true" : "false";
270 }
271 #endif
272
273 #if ENABLE(ENCRYPTED_MEDIA_V2)
274 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
275 static PlayerToPrivateMapType& playerToPrivateMap()
276 {
277     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
278     return map;
279 };
280 #endif
281
282 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
283 static dispatch_queue_t globalLoaderDelegateQueue()
284 {
285     static dispatch_queue_t globalQueue;
286     static dispatch_once_t onceToken;
287     dispatch_once(&onceToken, ^{
288         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
289     });
290     return globalQueue;
291 }
292 #endif
293
294 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
295 static dispatch_queue_t globalPullDelegateQueue()
296 {
297     static dispatch_queue_t globalQueue;
298     static dispatch_once_t onceToken;
299     dispatch_once(&onceToken, ^{
300         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
301     });
302     return globalQueue;
303 }
304 #endif
305
306 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
307
308     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
309 }
310
311 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
312 {
313     if (isAvailable())
314         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
315 }
316
317 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
318     : MediaPlayerPrivateAVFoundation(player)
319     , m_weakPtrFactory(this)
320 #if PLATFORM(IOS)
321     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
322 #endif
323     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
324     , m_videoFrameHasDrawn(false)
325     , m_haveCheckedPlayability(false)
326 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
327     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
328     , m_videoOutputSemaphore(nullptr)
329 #endif
330 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
331     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
332 #endif
333     , m_currentTrack(0)
334     , m_cachedDuration(MediaPlayer::invalidTime())
335     , m_cachedRate(0)
336     , m_pendingStatusChanges(0)
337     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
338     , m_cachedLikelyToKeepUp(false)
339     , m_cachedBufferEmpty(false)
340     , m_cachedBufferFull(false)
341     , m_cachedHasEnabledAudio(false)
342 #if ENABLE(IOS_AIRPLAY)
343     , m_allowsWirelessVideoPlayback(true)
344 #endif
345 {
346 #if ENABLE(ENCRYPTED_MEDIA_V2)
347     playerToPrivateMap().set(player, this);
348 #endif
349 }
350
351 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
352 {
353 #if ENABLE(ENCRYPTED_MEDIA_V2)
354     playerToPrivateMap().remove(player());
355 #endif
356 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
357     [m_loaderDelegate.get() setCallback:0];
358     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
359 #endif
360 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
361     [m_videoOutputDelegate setCallback:0];
362     [m_videoOutput setDelegate:nil queue:0];
363     if (m_videoOutputSemaphore)
364         dispatch_release(m_videoOutputSemaphore);
365 #endif
366     cancelLoad();
367 }
368
369 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
370 {
371     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
372     tearDownVideoRendering();
373
374     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
375     [m_objcObserver.get() disconnect];
376
377     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
378     setIgnoreLoadStateChanges(true);
379     if (m_avAsset) {
380         [m_avAsset.get() cancelLoading];
381         m_avAsset = nil;
382     }
383
384     clearTextTracks();
385
386 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
387     if (m_legibleOutput) {
388         if (m_avPlayerItem)
389             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
390         m_legibleOutput = nil;
391     }
392 #endif
393
394     if (m_avPlayerItem) {
395         for (NSString *keyName in itemKVOProperties())
396             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
397         
398         m_avPlayerItem = nil;
399     }
400     if (m_avPlayer) {
401         if (m_timeObserver)
402             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
403         m_timeObserver = nil;
404         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
405 #if ENABLE(IOS_AIRPLAY)
406         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
407 #endif
408         m_avPlayer = nil;
409     }
410
411     // Reset cached properties
412     m_pendingStatusChanges = 0;
413     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
414     m_cachedSeekableRanges = nullptr;
415     m_cachedLoadedRanges = nullptr;
416     m_cachedTracks = nullptr;
417     m_cachedHasEnabledAudio = false;
418     m_cachedPresentationSize = FloatSize();
419     m_cachedDuration = 0;
420
421     setIgnoreLoadStateChanges(false);
422 }
423
424 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
425 {
426     return m_videoLayer;
427 }
428
429 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
430 {
431 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
432     if (m_videoOutput)
433         return true;
434 #endif
435     return m_imageGenerator;
436 }
437
438 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
439 {
440 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
441     createVideoOutput();
442 #else
443     createImageGenerator();
444 #endif
445 }
446
447 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
448 {
449     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
450
451     if (!m_avAsset || m_imageGenerator)
452         return;
453
454     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
455
456     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
457     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
458     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
459     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
460
461     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
462 }
463
464 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
465 {
466 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
467     destroyVideoOutput();
468 #endif
469     destroyImageGenerator();
470 }
471
472 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
473 {
474     if (!m_imageGenerator)
475         return;
476
477     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
478
479     m_imageGenerator = 0;
480 }
481
482 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
483 {
484     if (!m_avPlayer || m_videoLayer)
485         return;
486
487     auto weakThis = createWeakPtr();
488     callOnMainThread([this, weakThis] {
489         if (!weakThis)
490             return;
491
492         if (!m_avPlayer || m_videoLayer)
493             return;
494
495         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
496         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
497         [m_videoLayer.get() setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
498 #ifndef NDEBUG
499         [m_videoLayer.get() setName:@"Video layer"];
500 #endif
501         updateVideoLayerGravity();
502         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
503
504 #if PLATFORM(IOS)
505         if (m_videoFullscreenLayer) {
506             [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
507             [m_videoFullscreenLayer addSublayer:m_videoLayer.get()];
508         }
509 #endif
510         player()->mediaPlayerClient()->mediaPlayerRenderingModeChanged(player());
511     });
512 }
513
514 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
515 {
516     if (!m_videoLayer)
517         return;
518
519     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
520
521     [m_videoLayer.get() setPlayer:nil];
522
523     m_videoLayer = 0;
524 }
525
526 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
527 {
528     if (currentRenderingMode() == MediaRenderingToLayer)
529         return m_videoLayer && [m_videoLayer.get() isReadyForDisplay];
530
531     return m_videoFrameHasDrawn;
532 }
533
534 #if ENABLE(AVF_CAPTIONS)
535 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
536 {
537     // FIXME: Match these to correct types:
538     if (kind == PlatformTextTrack::Caption)
539         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
540
541     if (kind == PlatformTextTrack::Subtitle)
542         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
543
544     if (kind == PlatformTextTrack::Description)
545         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
546
547     if (kind == PlatformTextTrack::Forced)
548         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
549
550     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
551 }
552     
553 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
554 {
555     trackModeChanged();
556 }
557     
558 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
559 {
560     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
561     
562     for (auto& textTrack : m_textTracks) {
563         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
564             continue;
565         
566         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
567         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
568         
569         for (auto& track : outOfBandTrackSources) {
570             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
571             
572             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
573                 continue;
574             
575             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
576             if (track->mode() == PlatformTextTrack::Hidden)
577                 mode = InbandTextTrackPrivate::Hidden;
578             else if (track->mode() == PlatformTextTrack::Disabled)
579                 mode = InbandTextTrackPrivate::Disabled;
580             else if (track->mode() == PlatformTextTrack::Showing)
581                 mode = InbandTextTrackPrivate::Showing;
582             
583             textTrack->setMode(mode);
584             break;
585         }
586     }
587 }
588 #endif
589
590 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
591 {
592     if (m_avAsset)
593         return;
594
595     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
596
597     setDelayCallbacks(true);
598
599     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
600
601     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
602
603     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
604
605     String referrer = player()->referrer();
606     if (!referrer.isEmpty())
607         [headerFields.get() setObject:referrer forKey:@"Referer"];
608
609     String userAgent = player()->userAgent();
610     if (!userAgent.isEmpty())
611         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
612
613     if ([headerFields.get() count])
614         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
615
616     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
617         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
618
619 #if ENABLE(AVF_CAPTIONS)
620     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
621     if (!outOfBandTrackSources.isEmpty()) {
622         NSMutableArray* outOfBandTracks = [[NSMutableArray alloc] init];
623         for (auto& trackSource : outOfBandTrackSources) {
624             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
625             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
626             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
627             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
628             [outOfBandTracks addObject:
629                 [NSDictionary dictionaryWithObjectsAndKeys:
630                     reinterpret_cast<const NSString*>(label.get()), AVOutOfBandAlternateTrackDisplayNameKey,
631                     reinterpret_cast<const NSString*>(language.get()), AVOutOfBandAlternateTrackExtendedLanguageTagKey,
632                     [NSNumber numberWithBool: (trackSource->isDefault() ? YES : NO)], AVOutOfBandAlternateTrackIsDefaultKey,
633                     reinterpret_cast<const NSString*>(uniqueID.get()), AVOutOfBandAlternateTrackIdentifierKey,
634                     reinterpret_cast<const NSString*>(url.get()), AVOutOfBandAlternateTrackSourceKey,
635                     mediaDescriptionForKind(trackSource->kind()), AVOutOfBandAlternateTrackMediaCharactersticsKey,
636                     nil]];
637         }
638
639         [options.get() setObject: outOfBandTracks forKey: AVURLAssetOutOfBandAlternateTracksKey];
640     }
641 #endif
642     
643     NSURL *cocoaURL = URL(ParsedURLString, url);
644     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
645
646 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
647     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
648 #endif
649
650     m_haveCheckedPlayability = false;
651
652     setDelayCallbacks(false);
653 }
654
655 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
656 {
657     if (m_avPlayer)
658         return;
659
660     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
661
662     setDelayCallbacks(true);
663
664     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
665     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
666 #if ENABLE(IOS_AIRPLAY)
667     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
668 #endif
669
670 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
671     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
672 #endif
673
674 #if ENABLE(IOS_AIRPLAY)
675     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
676 #endif
677
678     if (m_avPlayerItem)
679         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
680
681     setDelayCallbacks(false);
682 }
683
684 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
685 {
686     if (m_avPlayerItem)
687         return;
688
689     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
690
691     setDelayCallbacks(true);
692
693     // Create the player item so we can load media data. 
694     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
695
696     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
697
698     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
699     for (NSString *keyName in itemKVOProperties())
700         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
701
702     if (m_avPlayer)
703         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
704
705 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
706     const NSTimeInterval legibleOutputAdvanceInterval = 2;
707
708     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
709     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
710
711     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
712     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
713     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
714     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
715 #endif
716
717     setDelayCallbacks(false);
718 }
719
720 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
721 {
722     if (m_haveCheckedPlayability)
723         return;
724     m_haveCheckedPlayability = true;
725
726     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
727     auto weakThis = createWeakPtr();
728
729     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
730         callOnMainThread([weakThis] {
731             if (weakThis)
732                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
733         });
734     }];
735 }
736
737 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
738 {
739     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
740     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
741         [m_objcObserver.get() metadataLoaded];
742     }];
743 }
744
745 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
746 {
747     if (!m_avPlayerItem)
748         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
749
750     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
751         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
752     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
753         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
754     if (m_cachedLikelyToKeepUp)
755         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
756     if (m_cachedBufferFull)
757         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
758     if (m_cachedBufferEmpty)
759         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
760
761     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
762 }
763
764 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
765 {
766     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
767     PlatformMedia pm;
768     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
769     pm.media.avfMediaPlayer = m_avPlayer.get();
770     return pm;
771 }
772
773 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
774 {
775     return m_videoLayer.get();
776 }
777
778 #if PLATFORM(IOS)
779 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
780 {
781     if (m_videoFullscreenLayer == videoFullscreenLayer)
782         return;
783
784     if (m_videoFullscreenLayer)
785        [m_videoLayer removeFromSuperlayer];
786
787     m_videoFullscreenLayer = videoFullscreenLayer;
788
789     if (!m_videoFullscreenLayer || !m_videoLayer)
790         return;
791
792     [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
793     [m_videoFullscreenLayer addSublayer:m_videoLayer.get()];
794 }
795
796 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
797 {
798     m_videoFullscreenFrame = frame;
799     if (!m_videoFullscreenLayer || !m_videoLayer)
800         return;
801     
802     [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
803 }
804
805 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
806 {
807     m_videoFullscreenGravity = gravity;
808     if (!m_videoLayer)
809         return;
810
811     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
812     if (gravity == MediaPlayer::VideoGravityResize)
813         videoGravity = AVLayerVideoGravityResize;
814     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
815         videoGravity = AVLayerVideoGravityResizeAspect;
816     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
817         videoGravity = AVLayerVideoGravityResizeAspectFill;
818     else
819         ASSERT_NOT_REACHED();
820
821     [m_videoLayer setVideoGravity:videoGravity];
822 }
823 #endif
824
825 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
826 {
827     [CATransaction begin];
828     [CATransaction setDisableActions:YES];    
829     if (m_videoLayer)
830         [m_videoLayer.get() setHidden:!isVisible];
831     [CATransaction commit];
832 }
833     
834 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
835 {
836     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
837     if (!metaDataAvailable())
838         return;
839
840     setDelayCallbacks(true);
841     m_cachedRate = requestedRate();
842     [m_avPlayer.get() setRate:requestedRate()];
843     setDelayCallbacks(false);
844 }
845
846 void MediaPlayerPrivateAVFoundationObjC::platformPause()
847 {
848     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
849     if (!metaDataAvailable())
850         return;
851
852     setDelayCallbacks(true);
853     m_cachedRate = 0;
854     [m_avPlayer.get() setRate:nil];
855     setDelayCallbacks(false);
856 }
857
858 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
859 {
860     // Do not ask the asset for duration before it has been loaded or it will fetch the
861     // answer synchronously.
862     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
863          return MediaPlayer::invalidTime();
864     
865     CMTime cmDuration;
866     
867     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
868     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
869         cmDuration = [m_avPlayerItem.get() duration];
870     else
871         cmDuration= [m_avAsset.get() duration];
872
873     if (CMTIME_IS_NUMERIC(cmDuration))
874         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
875
876     if (CMTIME_IS_INDEFINITE(cmDuration)) {
877         return std::numeric_limits<float>::infinity();
878     }
879
880     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
881     return MediaPlayer::invalidTime();
882 }
883
884 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
885 {
886     if (!metaDataAvailable() || !m_avPlayerItem)
887         return 0;
888
889     CMTime itemTime = [m_avPlayerItem.get() currentTime];
890     if (CMTIME_IS_NUMERIC(itemTime))
891         return std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
892
893     return 0;
894 }
895
896 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
897 {
898     // setCurrentTime generates several event callbacks, update afterwards.
899     setDelayCallbacks(true);
900
901     CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
902     CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
903     CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
904
905     auto weakThis = createWeakPtr();
906
907     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
908         callOnMainThread([weakThis, finished] {
909             auto _this = weakThis.get();
910             if (!_this)
911                 return;
912
913             _this->seekCompleted(finished);
914         });
915     }];
916
917     setDelayCallbacks(false);
918 }
919
920 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
921 {
922     if (!metaDataAvailable())
923         return;
924
925     [m_avPlayer.get() setVolume:volume];
926 }
927
928 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
929 {
930     UNUSED_PARAM(closedCaptionsVisible);
931
932     if (!metaDataAvailable())
933         return;
934
935     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
936 }
937
938 void MediaPlayerPrivateAVFoundationObjC::updateRate()
939 {
940     setDelayCallbacks(true);
941     m_cachedRate = requestedRate();
942     [m_avPlayer.get() setRate:requestedRate()];
943     setDelayCallbacks(false);
944 }
945
946 float MediaPlayerPrivateAVFoundationObjC::rate() const
947 {
948     if (!metaDataAvailable())
949         return 0;
950
951     return m_cachedRate;
952 }
953
954 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
955 {
956     auto timeRanges = PlatformTimeRanges::create();
957
958     if (!m_avPlayerItem)
959         return timeRanges;
960
961     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
962         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
963         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
964             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
965             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
966             timeRanges->add(rangeStart, rangeEnd);
967         }
968     }
969     return timeRanges;
970 }
971
972 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
973 {
974     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
975         return 0;
976
977     double minTimeSeekable = std::numeric_limits<double>::infinity();
978     bool hasValidRange = false;
979     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
980         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
981         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
982             continue;
983
984         hasValidRange = true;
985         double startOfRange = CMTimeGetSeconds(timeRange.start);
986         if (minTimeSeekable > startOfRange)
987             minTimeSeekable = startOfRange;
988     }
989     return hasValidRange ? minTimeSeekable : 0;
990 }
991
992 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
993 {
994     if (!m_cachedSeekableRanges)
995         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
996
997     double maxTimeSeekable = 0;
998     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
999         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1000         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1001             continue;
1002         
1003         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
1004         if (maxTimeSeekable < endOfRange)
1005             maxTimeSeekable = endOfRange;
1006     }
1007     return maxTimeSeekable;
1008 }
1009
1010 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1011 {
1012 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1013     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1014     // when loadedTimeRanges changes when there is no video output. In that case
1015     // update the cached value explicitly.
1016     if (!hasLayerRenderer() && !hasContextRenderer())
1017         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1018 #endif
1019
1020     if (!m_cachedLoadedRanges)
1021         return 0;
1022
1023     float maxTimeLoaded = 0;
1024     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1025         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1026         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1027             continue;
1028         
1029         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
1030         if (maxTimeLoaded < endOfRange)
1031             maxTimeLoaded = endOfRange;
1032     }
1033
1034     return maxTimeLoaded;   
1035 }
1036
1037 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1038 {
1039     if (!metaDataAvailable())
1040         return 0;
1041
1042     long long totalMediaSize = 0;
1043     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1044         totalMediaSize += [[thisTrack assetTrack] totalSampleDataLength];
1045
1046     return totalMediaSize;
1047 }
1048
1049 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1050 {
1051     m_avAsset = asset;
1052 }
1053
1054 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1055 {
1056     if (!m_avAsset)
1057         return MediaPlayerAVAssetStatusDoesNotExist;
1058
1059     for (NSString *keyName in assetMetadataKeyNames()) {
1060         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
1061
1062         if (keyStatus < AVKeyValueStatusLoaded)
1063             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1064         
1065         if (keyStatus == AVKeyValueStatusFailed)
1066             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1067
1068         if (keyStatus == AVKeyValueStatusCancelled)
1069             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1070     }
1071
1072     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1073         return MediaPlayerAVAssetStatusPlayable;
1074
1075     return MediaPlayerAVAssetStatusLoaded;
1076 }
1077
1078 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1079 {
1080     if (!metaDataAvailable() || context->paintingDisabled())
1081         return;
1082
1083     setDelayCallbacks(true);
1084     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1085
1086 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1087     if (videoOutputHasAvailableFrame())
1088         paintWithVideoOutput(context, rect);
1089     else
1090 #endif
1091         paintWithImageGenerator(context, rect);
1092
1093     END_BLOCK_OBJC_EXCEPTIONS;
1094     setDelayCallbacks(false);
1095
1096     m_videoFrameHasDrawn = true;
1097 }
1098
1099 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1100 {
1101     if (!metaDataAvailable() || context->paintingDisabled())
1102         return;
1103
1104     // We can ignore the request if we are already rendering to a layer.
1105     if (currentRenderingMode() == MediaRenderingToLayer)
1106         return;
1107
1108     paintCurrentFrameInContext(context, rect);
1109 }
1110
1111 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1112 {
1113     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1114     if (image) {
1115         GraphicsContextStateSaver stateSaver(*context);
1116         context->translate(rect.x(), rect.y() + rect.height());
1117         context->scale(FloatSize(1.0f, -1.0f));
1118         context->setImageInterpolationQuality(InterpolationLow);
1119         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1120         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1121         image = 0;
1122     }
1123 }
1124
1125 static HashSet<String> mimeTypeCache()
1126 {
1127     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1128     static bool typeListInitialized = false;
1129
1130     if (typeListInitialized)
1131         return cache;
1132     typeListInitialized = true;
1133
1134     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1135     for (NSString *mimeType in types)
1136         cache.add(mimeType);
1137
1138     return cache;
1139
1140
1141 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1142 {
1143     if (!m_imageGenerator)
1144         createImageGenerator();
1145     ASSERT(m_imageGenerator);
1146
1147 #if !LOG_DISABLED
1148     double start = monotonicallyIncreasingTime();
1149 #endif
1150
1151     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1152     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1153     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1154
1155 #if !LOG_DISABLED
1156     double duration = monotonicallyIncreasingTime() - start;
1157     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1158 #endif
1159
1160     return image;
1161 }
1162
1163 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1164 {
1165     supportedTypes = mimeTypeCache();
1166
1167
1168 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1169 static bool keySystemIsSupported(const String& keySystem)
1170 {
1171     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1172         return true;
1173     return false;
1174 }
1175 #endif
1176
1177 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1178 {
1179 #if ENABLE(ENCRYPTED_MEDIA)
1180     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1181     // In addition to the steps in the current specification, this method must run the following steps:
1182
1183     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1184     //    If keySystem is null, continue to the next step.
1185     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1186         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1187         if (!keySystemIsSupported(parameters.keySystem))
1188             return MediaPlayer::IsNotSupported;
1189
1190         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1191         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1192     }
1193
1194     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1195 #endif
1196
1197 #if ENABLE(MEDIA_SOURCE)
1198     if (parameters.isMediaSource)
1199         return MediaPlayer::IsNotSupported;
1200 #endif
1201
1202     if (!mimeTypeCache().contains(parameters.type))
1203         return MediaPlayer::IsNotSupported;
1204
1205     // The spec says:
1206     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1207     if (parameters.codecs.isEmpty())
1208         return MediaPlayer::MayBeSupported;
1209
1210     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1211     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1212 }
1213
1214 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1215 {
1216 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1217     if (!keySystem.isEmpty()) {
1218         if (!keySystemIsSupported(keySystem))
1219             return false;
1220
1221         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1222             return false;
1223
1224         return true;
1225     }
1226 #else
1227     UNUSED_PARAM(keySystem);
1228     UNUSED_PARAM(mimeType);
1229 #endif
1230     return false;
1231 }
1232
1233 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1234 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1235 {
1236     String scheme = [[[avRequest request] URL] scheme];
1237     String keyURI = [[[avRequest request] URL] absoluteString];
1238
1239 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1240     if (scheme == "skd") {
1241         // Create an initData with the following layout:
1242         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1243         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1244         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1245         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1246         initDataView->set<uint32_t>(0, keyURISize, true);
1247
1248         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1249         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1250
1251 #if ENABLE(ENCRYPTED_MEDIA)
1252         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1253 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1254         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1255         if (!player()->keyNeeded(initData.get()))
1256 #endif
1257             return false;
1258
1259         m_keyURIToRequestMap.set(keyURI, avRequest);
1260         return true;
1261     }
1262 #endif
1263
1264     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1265     m_resourceLoaderMap.add(avRequest, resourceLoader);
1266     resourceLoader->startLoading();
1267     return true;
1268 }
1269
1270 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1271 {
1272 #if USE(CFNETWORK)
1273     UNUSED_PARAM(nsChallenge);
1274     // FIXME: <rdar://problem/15799844>
1275     return false;
1276 #else
1277     AuthenticationChallenge challenge(nsChallenge);
1278
1279     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1280 #endif
1281 }
1282
1283 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1284 {
1285     String scheme = [[[avRequest request] URL] scheme];
1286
1287     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1288
1289     if (resourceLoader)
1290         resourceLoader->stopLoading();
1291 }
1292
1293 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1294 {
1295     m_resourceLoaderMap.remove(avRequest);
1296 }
1297 #endif
1298
1299 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1300 {
1301     return AVFoundationLibrary() && CoreMediaLibrary();
1302 }
1303
1304 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
1305 {
1306     if (!metaDataAvailable())
1307         return timeValue;
1308
1309     // FIXME - impossible to implement until rdar://8721510 is fixed.
1310     return timeValue;
1311 }
1312
1313 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1314 {
1315     if (!m_videoLayer)
1316         return;
1317
1318     [CATransaction begin];
1319     [CATransaction setDisableActions:YES];    
1320     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1321     [m_videoLayer.get() setVideoGravity:gravity];
1322     [CATransaction commit];
1323 }
1324
1325 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1326 {
1327     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1328     m_languageOfPrimaryAudioTrack = String();
1329
1330     if (!m_avAsset)
1331         return;
1332
1333     setDelayCharacteristicsChangedNotification(true);
1334
1335     bool haveCCTrack = false;
1336     bool hasCaptions = false;
1337
1338     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1339     // asked about those fairly fequently.
1340     if (!m_avPlayerItem) {
1341         // We don't have a player item yet, so check with the asset because some assets support inspection
1342         // prior to becoming ready to play.
1343         setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
1344         setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
1345 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1346         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1347 #endif
1348     } else {
1349         bool hasVideo = false;
1350         bool hasAudio = false;
1351         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1352             if ([track isEnabled]) {
1353                 AVAssetTrack *assetTrack = [track assetTrack];
1354                 if ([[assetTrack mediaType] isEqualToString:AVMediaTypeVideo])
1355                     hasVideo = true;
1356                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeAudio])
1357                     hasAudio = true;
1358                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption]) {
1359 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1360                     hasCaptions = true;
1361 #endif
1362                     haveCCTrack = true;
1363                 }
1364             }
1365         }
1366         setHasVideo(hasVideo);
1367         setHasAudio(hasAudio);
1368
1369
1370 #if ENABLE(VIDEO_TRACK)
1371         updateAudioTracks();
1372         updateVideoTracks();
1373 #endif
1374     }
1375
1376 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1377     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1378         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1379         if (hasCaptions)
1380             processMediaSelectionOptions();
1381     }
1382 #endif
1383
1384 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1385     if (!hasCaptions && haveCCTrack)
1386         processLegacyClosedCaptionsTracks();
1387 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1388     if (haveCCTrack)
1389         processLegacyClosedCaptionsTracks();
1390 #endif
1391
1392     setHasClosedCaptions(hasCaptions);
1393
1394     LOG(Media, "WebCoreAVFMovieObserver:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1395         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1396
1397     sizeChanged();
1398
1399     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1400         characteristicsChanged();
1401
1402     setDelayCharacteristicsChangedNotification(false);
1403 }
1404
1405 #if ENABLE(VIDEO_TRACK)
1406 template <typename RefT, typename PassRefT>
1407 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1408 {
1409     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1410         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1411     }]]]);
1412     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1413
1414     typedef Vector<RefT> ItemVector;
1415     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1416         [oldTracks addObject:(*i)->playerItemTrack()];
1417
1418     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1419     [removedTracks minusSet:newTracks.get()];
1420
1421     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1422     [addedTracks minusSet:oldTracks.get()];
1423
1424     ItemVector replacementItems;
1425     ItemVector addedItems;
1426     ItemVector removedItems;
1427     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1428         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1429             removedItems.append(*i);
1430         else
1431             replacementItems.append(*i);
1432     }
1433
1434     for (AVPlayerItemTrack* track in addedTracks.get())
1435         addedItems.append(itemFactory(track));
1436
1437     replacementItems.appendVector(addedItems);
1438     oldItems.swap(replacementItems);
1439
1440     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1441         (player->*removedFunction)(*i);
1442
1443     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1444         (player->*addedFunction)(*i);
1445 }
1446
1447 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1448 {
1449     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1450 }
1451
1452 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1453 {
1454     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1455 }
1456 #endif // ENABLE(VIDEO_TRACK)
1457
1458 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1459 {
1460     if (!m_avAsset)
1461         return;
1462
1463     // Some assets don't report track properties until they are completely ready to play, but we
1464     // want to report a size as early as possible so use presentationSize when an asset has no tracks.
1465     if (m_avPlayerItem && ![m_cachedTracks count]) {
1466         setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1467         return;
1468     }
1469
1470     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1471     // the union of all visual track rects.
1472     CGRect trackUnionRect = CGRectZero;
1473     for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1474         AVAssetTrack* assetTrack = [track assetTrack];
1475         CGSize trackSize = [assetTrack naturalSize];
1476         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1477         trackUnionRect = CGRectUnion(trackUnionRect, CGRectApplyAffineTransform(trackRect, [assetTrack preferredTransform]));
1478     }
1479
1480     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1481     trackUnionRect = CGRectOffset(trackUnionRect, trackUnionRect.origin.x, trackUnionRect.origin.y);
1482     
1483     // Also look at the asset's preferred transform so we account for a movie matrix.
1484     CGSize naturalSize = CGSizeApplyAffineTransform(trackUnionRect.size, [m_avAsset.get() preferredTransform]);
1485
1486     // Cache the natural size (setNaturalSize will notify the player if it has changed).
1487     setNaturalSize(IntSize(naturalSize));
1488 }
1489
1490 #if PLATFORM(IOS)
1491 // FIXME: Implement for iOS in WebKit System Interface.
1492 static inline NSURL *wkAVAssetResolvedURL(AVAsset*)
1493 {
1494     return nil;
1495 }
1496 #endif
1497
1498 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1499 {
1500     if (!m_avAsset)
1501         return false;
1502     
1503     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1504     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1505     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1506 }
1507
1508 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1509 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1510 {
1511     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1512
1513     if (!m_avPlayerItem || m_videoOutput)
1514         return;
1515
1516 #if USE(VIDEOTOOLBOX)
1517     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1518 #else
1519     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
1520                                 nil];
1521 #endif
1522     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
1523     ASSERT(m_videoOutput);
1524
1525     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
1526
1527     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1528
1529     waitForVideoOutputMediaDataWillChange();
1530
1531     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1532 }
1533
1534 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1535 {
1536     if (!m_videoOutput)
1537         return;
1538
1539     if (m_avPlayerItem)
1540         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1541     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1542
1543     m_videoOutput = 0;
1544 }
1545
1546 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1547 {
1548     if (!m_videoOutput)
1549         createVideoOutput();
1550     ASSERT(m_videoOutput);
1551
1552 #if !LOG_DISABLED
1553     double start = monotonicallyIncreasingTime();
1554 #endif
1555
1556     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1557
1558     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1559         return 0;
1560
1561     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1562     if (!buffer)
1563         return 0;
1564
1565 #if USE(VIDEOTOOLBOX)
1566     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1567     if (!m_pixelTransferSession) {
1568         VTPixelTransferSessionRef session = 0;
1569         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1570         m_pixelTransferSession = adoptCF(session);
1571     }
1572
1573     CVPixelBufferRef outputBuffer;
1574     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
1575     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1576     buffer = adoptCF(outputBuffer);
1577 #endif
1578
1579 #if !LOG_DISABLED
1580     double duration = monotonicallyIncreasingTime() - start;
1581     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1582 #endif
1583
1584     return buffer;
1585 }
1586
1587 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
1588 {
1589     if (!m_avPlayerItem)
1590         return false;
1591
1592     if (m_lastImage)
1593         return true;
1594
1595     if (!m_videoOutput)
1596         createVideoOutput();
1597
1598     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
1599 }
1600
1601 static const void* CVPixelBufferGetBytePointerCallback(void* info)
1602 {
1603     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1604     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1605     return CVPixelBufferGetBaseAddress(pixelBuffer);
1606 }
1607
1608 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
1609 {
1610     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1611     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1612 }
1613
1614 static void CVPixelBufferReleaseInfoCallback(void* info)
1615 {
1616     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1617     CFRelease(pixelBuffer);
1618 }
1619
1620 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
1621 {
1622     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
1623     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
1624
1625     size_t width = CVPixelBufferGetWidth(pixelBuffer);
1626     size_t height = CVPixelBufferGetHeight(pixelBuffer);
1627     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
1628     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
1629     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
1630
1631     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
1632     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
1633     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
1634
1635     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
1636 }
1637
1638 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
1639 {
1640     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1641
1642     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1643     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1644     // should be displayed.
1645     if (pixelBuffer)
1646         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
1647 }
1648
1649 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
1650 {
1651     updateLastImage();
1652
1653     if (m_lastImage) {
1654         GraphicsContextStateSaver stateSaver(*context);
1655
1656         IntRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
1657
1658         context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, outputRect, imageRect);
1659
1660         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
1661         // video frame, destroy it now that it is no longer needed.
1662         if (m_imageGenerator)
1663             destroyImageGenerator();
1664     }
1665 }
1666
1667 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
1668 {
1669     updateLastImage();
1670     return m_lastImage.get();
1671 }
1672
1673 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
1674 {
1675     if (!m_videoOutputSemaphore)
1676         m_videoOutputSemaphore = dispatch_semaphore_create(0);
1677
1678     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
1679
1680     // Wait for 1 second.
1681     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
1682
1683     if (result)
1684         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
1685 }
1686
1687 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
1688 {
1689     dispatch_semaphore_signal(m_videoOutputSemaphore);
1690 }
1691 #endif
1692
1693 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1694 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1695 {
1696     // initData should have the following layout:
1697     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1698     if (initData->byteLength() < 4)
1699         return false;
1700
1701     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1702
1703     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1704     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1705     uint32_t offset = 0;
1706     bool status = true;
1707
1708     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1709     offset += 4;
1710     if (!status || offset + keyURILength > initData->length())
1711         return false;
1712
1713     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1714     if (!keyURIArray)
1715         return false;
1716
1717     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1718     offset += keyURILength;
1719
1720     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1721     offset += 4;
1722     if (!status || offset + keyIDLength > initData->length())
1723         return false;
1724
1725     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1726     if (!keyIDArray)
1727         return false;
1728
1729     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1730     offset += keyIDLength;
1731
1732     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1733     offset += 4;
1734     if (!status || offset + certificateLength > initData->length())
1735         return false;
1736
1737     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1738     if (!certificate)
1739         return false;
1740
1741     return true;
1742 }
1743 #endif
1744
1745 #if ENABLE(ENCRYPTED_MEDIA)
1746 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1747 {
1748     if (!keySystemIsSupported(keySystem))
1749         return MediaPlayer::KeySystemNotSupported;
1750
1751     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1752     String keyURI;
1753     String keyID;
1754     RefPtr<Uint8Array> certificate;
1755     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1756         return MediaPlayer::InvalidPlayerState;
1757
1758     if (!m_keyURIToRequestMap.contains(keyURI))
1759         return MediaPlayer::InvalidPlayerState;
1760
1761     String sessionID = createCanonicalUUIDString();
1762
1763     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1764
1765     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1766     NSString* assetStr = keyID;
1767     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1768     NSError* error = 0;
1769     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1770
1771     if (!keyRequest) {
1772         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1773         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1774         return MediaPlayer::NoError;
1775     }
1776
1777     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1778     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1779     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1780
1781     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1782     m_sessionIDToRequestMap.set(sessionID, avRequest);
1783     m_keyURIToRequestMap.remove(keyURI);
1784
1785     return MediaPlayer::NoError;
1786 }
1787
1788 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
1789 {
1790     if (!keySystemIsSupported(keySystem))
1791         return MediaPlayer::KeySystemNotSupported;
1792
1793     if (!m_sessionIDToRequestMap.contains(sessionID))
1794         return MediaPlayer::InvalidPlayerState;
1795
1796     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
1797     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
1798     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
1799     [avRequest.get() finishLoading];
1800     m_sessionIDToRequestMap.remove(sessionID);
1801
1802     player()->keyAdded(keySystem, sessionID);
1803
1804     UNUSED_PARAM(initDataPtr);
1805     UNUSED_PARAM(initDataLength);
1806     return MediaPlayer::NoError;
1807 }
1808
1809 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
1810 {
1811     if (!keySystemIsSupported(keySystem))
1812         return MediaPlayer::KeySystemNotSupported;
1813
1814     if (!m_sessionIDToRequestMap.contains(sessionID))
1815         return MediaPlayer::InvalidPlayerState;
1816
1817     m_sessionIDToRequestMap.remove(sessionID);
1818     return MediaPlayer::NoError;
1819 }
1820 #endif
1821
1822 #if ENABLE(ENCRYPTED_MEDIA_V2)
1823 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
1824 {
1825     return m_keyURIToRequestMap.take(keyURI);
1826 }
1827
1828 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
1829 {
1830     if (!keySystemIsSupported(keySystem))
1831         return nullptr;
1832
1833     return std::make_unique<CDMSessionAVFoundationObjC>(this);
1834 }
1835 #endif
1836
1837 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1838 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
1839 {
1840 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1841     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1842 #endif
1843
1844     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1845     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
1846
1847         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
1848         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
1849             continue;
1850
1851         bool newCCTrack = true;
1852         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1853             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1854                 continue;
1855
1856             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
1857             if (track->avPlayerItemTrack() == playerItemTrack) {
1858                 removedTextTracks.remove(i - 1);
1859                 newCCTrack = false;
1860                 break;
1861             }
1862         }
1863
1864         if (!newCCTrack)
1865             continue;
1866         
1867         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
1868     }
1869
1870     processNewAndRemovedTextTracks(removedTextTracks);
1871 }
1872 #endif
1873
1874 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1875 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
1876 {
1877     if (!m_avAsset)
1878         return nil;
1879     
1880     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
1881         return nil;
1882     
1883     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1884 }
1885
1886 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
1887 {
1888     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1889     if (!legibleGroup) {
1890         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1891         return;
1892     }
1893
1894     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1895     // but set the selected legible track to nil so text tracks will not be automatically configured.
1896     if (!m_textTracks.size())
1897         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1898
1899     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1900     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
1901     for (AVMediaSelectionOptionType *option in legibleOptions) {
1902         bool newTrack = true;
1903         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1904             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1905                 continue;
1906             
1907             RetainPtr<AVMediaSelectionOptionType> currentOption;
1908 #if ENABLE(AVF_CAPTIONS)
1909             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
1910                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
1911                 currentOption = track->mediaSelectionOption();
1912             } else
1913 #endif
1914             {
1915                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
1916                 currentOption = track->mediaSelectionOption();
1917             }
1918             
1919             if ([currentOption.get() isEqual:option]) {
1920                 removedTextTracks.remove(i - 1);
1921                 newTrack = false;
1922                 break;
1923             }
1924         }
1925         if (!newTrack)
1926             continue;
1927
1928 #if ENABLE(AVF_CAPTIONS)
1929         if ([option outOfBandSource]) {
1930             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
1931             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
1932             continue;
1933         }
1934 #endif
1935
1936         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
1937     }
1938
1939     processNewAndRemovedTextTracks(removedTextTracks);
1940 }
1941
1942 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
1943 {
1944     if (!m_currentTrack)
1945         return;
1946
1947     m_currentTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
1948 }
1949
1950 void MediaPlayerPrivateAVFoundationObjC::flushCues()
1951 {
1952     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
1953
1954     if (!m_currentTrack)
1955         return;
1956     
1957     m_currentTrack->resetCueValues();
1958 }
1959 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1960
1961 void MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(InbandTextTrackPrivateAVF *track)
1962 {
1963     if (m_currentTrack == track)
1964         return;
1965
1966     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1967         
1968     m_currentTrack = track;
1969
1970     if (track) {
1971         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1972             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
1973 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1974 #if ENABLE(AVF_CAPTIONS)
1975         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
1976             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1977 #endif
1978         else
1979             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1980 #endif
1981     } else {
1982 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1983         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1984 #endif
1985         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
1986     }
1987
1988 }
1989
1990 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
1991 {
1992     if (!m_languageOfPrimaryAudioTrack.isNull())
1993         return m_languageOfPrimaryAudioTrack;
1994
1995     if (!m_avPlayerItem.get())
1996         return emptyString();
1997
1998 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1999     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2000     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2001     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2002     if (currentlySelectedAudibleOption) {
2003         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2004         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2005
2006         return m_languageOfPrimaryAudioTrack;
2007     }
2008 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2009
2010     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2011     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2012     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2013     if (!tracks || [tracks count] != 1) {
2014         m_languageOfPrimaryAudioTrack = emptyString();
2015         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2016         return m_languageOfPrimaryAudioTrack;
2017     }
2018
2019     AVAssetTrack *track = [tracks objectAtIndex:0];
2020     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2021
2022 #if !LOG_DISABLED
2023     if (m_languageOfPrimaryAudioTrack == emptyString())
2024         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2025     else
2026         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2027 #endif
2028
2029     return m_languageOfPrimaryAudioTrack;
2030 }
2031
2032 #if ENABLE(IOS_AIRPLAY)
2033 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2034 {
2035     if (!m_avPlayer)
2036         return false;
2037
2038     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2039     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2040     return wirelessTarget;
2041 }
2042
2043 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2044 {
2045     if (!m_avPlayer)
2046         return MediaPlayer::TargetTypeNone;
2047
2048     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2049     case wkExternalPlaybackTypeNone:
2050         return MediaPlayer::TargetTypeNone;
2051     case wkExternalPlaybackTypeAirPlay:
2052         return MediaPlayer::TargetTypeAirPlay;
2053     case wkExternalPlaybackTypeTVOut:
2054         return MediaPlayer::TargetTypeTVOut;
2055     }
2056
2057     ASSERT_NOT_REACHED();
2058     return MediaPlayer::TargetTypeNone;
2059 }
2060
2061 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2062 {
2063     if (!m_avPlayer)
2064         return emptyString();
2065     
2066     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2067     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2068
2069     return wirelessTargetName;
2070 }
2071
2072 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2073 {
2074     if (!m_avPlayer)
2075         return !m_allowsWirelessVideoPlayback;
2076     
2077     m_allowsWirelessVideoPlayback = ![m_avPlayer.get() allowsExternalPlayback];
2078     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2079
2080     return !m_allowsWirelessVideoPlayback;
2081 }
2082
2083 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2084 {
2085     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2086     m_allowsWirelessVideoPlayback = !disabled;
2087     if (!m_avPlayer)
2088         return;
2089     
2090     [m_avPlayer.get() setAllowsExternalPlayback:disabled];
2091 }
2092 #endif
2093
2094 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2095 {
2096     m_cachedItemStatus = status;
2097
2098     updateStates();
2099 }
2100
2101 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2102 {
2103     m_pendingStatusChanges++;
2104 }
2105
2106 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2107 {
2108     m_cachedLikelyToKeepUp = likelyToKeepUp;
2109
2110     ASSERT(m_pendingStatusChanges);
2111     if (!--m_pendingStatusChanges)
2112         updateStates();
2113 }
2114
2115 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2116 {
2117     m_pendingStatusChanges++;
2118 }
2119
2120 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2121 {
2122     m_cachedBufferEmpty = bufferEmpty;
2123
2124     ASSERT(m_pendingStatusChanges);
2125     if (!--m_pendingStatusChanges)
2126         updateStates();
2127 }
2128
2129 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2130 {
2131     m_pendingStatusChanges++;
2132 }
2133
2134 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2135 {
2136     m_cachedBufferFull = bufferFull;
2137
2138     ASSERT(m_pendingStatusChanges);
2139     if (!--m_pendingStatusChanges)
2140         updateStates();
2141 }
2142
2143 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2144 {
2145     m_cachedSeekableRanges = seekableRanges;
2146
2147     seekableTimeRangesChanged();
2148     updateStates();
2149 }
2150
2151 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2152 {
2153     m_cachedLoadedRanges = loadedRanges;
2154
2155     loadedTimeRangesChanged();
2156     updateStates();
2157 }
2158
2159 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2160 {
2161     m_cachedTracks = tracks;
2162
2163     tracksChanged();
2164     updateStates();
2165 }
2166
2167 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2168 {
2169     m_cachedHasEnabledAudio = hasEnabledAudio;
2170
2171     tracksChanged();
2172     updateStates();
2173 }
2174
2175 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2176 {
2177     m_cachedPresentationSize = size;
2178
2179     sizeChanged();
2180     updateStates();
2181 }
2182
2183 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
2184 {
2185     m_cachedDuration = duration;
2186
2187     invalidateCachedDuration();
2188 }
2189
2190 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2191 {
2192     m_cachedRate = rate;
2193
2194     updateStates();
2195     rateChanged();
2196 }
2197     
2198 #if ENABLE(IOS_AIRPLAY)
2199 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2200 {
2201     playbackTargetIsWirelessChanged();
2202 }
2203 #endif
2204
2205 NSArray* assetMetadataKeyNames()
2206 {
2207     static NSArray* keys;
2208     if (!keys) {
2209         keys = [[NSArray alloc] initWithObjects:@"duration",
2210                     @"naturalSize",
2211                     @"preferredTransform",
2212                     @"preferredVolume",
2213                     @"preferredRate",
2214                     @"playable",
2215                     @"tracks",
2216                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2217                    nil];
2218     }
2219     return keys;
2220 }
2221
2222 NSArray* itemKVOProperties()
2223 {
2224     static NSArray* keys;
2225     if (!keys) {
2226         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2227                 @"status",
2228                 @"asset",
2229                 @"tracks",
2230                 @"seekableTimeRanges",
2231                 @"loadedTimeRanges",
2232                 @"playbackLikelyToKeepUp",
2233                 @"playbackBufferFull",
2234                 @"playbackBufferEmpty",
2235                 @"duration",
2236                 @"hasEnabledAudio",
2237                 nil];
2238     }
2239     return keys;
2240 }
2241
2242 } // namespace WebCore
2243
2244 @implementation WebCoreAVFMovieObserver
2245
2246 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2247 {
2248     self = [super init];
2249     if (!self)
2250         return nil;
2251     m_callback = callback;
2252     return self;
2253 }
2254
2255 - (void)disconnect
2256 {
2257     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2258     m_callback = 0;
2259 }
2260
2261 - (void)metadataLoaded
2262 {
2263     if (!m_callback)
2264         return;
2265
2266     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2267 }
2268
2269 - (void)didEnd:(NSNotification *)unusedNotification
2270 {
2271     UNUSED_PARAM(unusedNotification);
2272     if (!m_callback)
2273         return;
2274     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2275 }
2276
2277 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2278 {
2279     UNUSED_PARAM(object);
2280     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2281
2282     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2283
2284     if (!m_callback)
2285         return;
2286
2287     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2288
2289     WTF::Function<void ()> function;
2290
2291     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2292         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2293             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2294         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2295             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2296         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2297             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2298     }
2299
2300     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2301         // A value changed for an AVPlayerItem
2302         if ([keyPath isEqualToString:@"status"])
2303             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2304         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2305             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2306         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2307             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2308         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2309             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2310         else if ([keyPath isEqualToString:@"asset"])
2311             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2312         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2313             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2314         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2315             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2316         else if ([keyPath isEqualToString:@"tracks"])
2317             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2318         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2319             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2320         else if ([keyPath isEqualToString:@"presentationSize"])
2321             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2322         else if ([keyPath isEqualToString:@"duration"])
2323             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
2324     }
2325
2326     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2327         // A value changed for an AVPlayer.
2328         if ([keyPath isEqualToString:@"rate"])
2329             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2330 #if ENABLE(IOS_AIRPLAY)
2331         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2332             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2333 #endif
2334     }
2335     
2336     if (function.isNull())
2337         return;
2338
2339     auto weakThis = m_callback->createWeakPtr();
2340     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2341         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2342         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2343         if (!weakThis)
2344             return;
2345         function();
2346     }));
2347 }
2348
2349 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2350 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2351 {
2352     UNUSED_PARAM(output);
2353     UNUSED_PARAM(nativeSamples);
2354
2355     if (!m_callback)
2356         return;
2357
2358     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2359     RetainPtr<NSArray> strongStrings = strings;
2360     callOnMainThread([strongSelf, strongStrings, itemTime] {
2361         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2362         if (!callback)
2363             return;
2364         callback->processCue(strongStrings.get(), CMTimeGetSeconds(itemTime));
2365     });
2366 }
2367
2368 - (void)outputSequenceWasFlushed:(id)output
2369 {
2370     UNUSED_PARAM(output);
2371
2372     if (!m_callback)
2373         return;
2374     
2375     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2376     callOnMainThread([strongSelf] {
2377         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
2378             callback->flushCues();
2379     });
2380 }
2381 #endif
2382
2383 @end
2384
2385 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
2386 @implementation WebCoreAVFLoaderDelegate
2387
2388 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2389 {
2390     self = [super init];
2391     if (!self)
2392         return nil;
2393     m_callback = callback;
2394     return self;
2395 }
2396
2397 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
2398 {
2399     UNUSED_PARAM(resourceLoader);
2400     if (!m_callback)
2401         return NO;
2402
2403     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2404     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2405     callOnMainThread([strongSelf, strongRequest] {
2406         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2407         if (!callback) {
2408             [strongRequest finishLoadingWithError:nil];
2409             return;
2410         }
2411
2412         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
2413             [strongRequest finishLoadingWithError:nil];
2414     });
2415
2416     return YES;
2417 }
2418
2419 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
2420 {
2421     UNUSED_PARAM(resourceLoader);
2422     if (!m_callback)
2423         return NO;
2424
2425     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
2426         return NO;
2427
2428     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2429     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
2430     callOnMainThread([strongSelf, strongChallenge] {
2431         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2432         if (!callback) {
2433             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2434             return;
2435         }
2436
2437         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
2438             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2439     });
2440
2441     return YES;
2442 }
2443
2444 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
2445 {
2446     UNUSED_PARAM(resourceLoader);
2447     if (!m_callback)
2448         return;
2449
2450     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2451     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2452     callOnMainThread([strongSelf, strongRequest] {
2453         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2454         if (callback)
2455             callback->didCancelLoadingRequest(strongRequest.get());
2456     });
2457 }
2458
2459 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2460 {
2461     m_callback = callback;
2462 }
2463 @end
2464 #endif
2465
2466 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2467 @implementation WebCoreAVFPullDelegate
2468 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2469 {
2470     self = [super init];
2471     if (self)
2472         m_callback = callback;
2473     return self;
2474 }
2475
2476 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2477 {
2478     m_callback = callback;
2479 }
2480
2481 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
2482 {
2483     if (m_callback)
2484         m_callback->outputMediaDataWillChange(output);
2485 }
2486
2487 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
2488 {
2489     UNUSED_PARAM(output);
2490     // No-op.
2491 }
2492 @end
2493 #endif
2494
2495 #endif