[iOS] Pass additional options to AVFoundation during playback.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29
30 #import "MediaPlayerPrivateAVFoundationObjC.h"
31
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "ExceptionCodePlaceholder.h"
38 #import "FloatConversion.h"
39 #import "FloatConversion.h"
40 #import "FrameView.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandTextTrackPrivateAVFObjC.h"
44 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
45 #import "OutOfBandTextTrackPrivateAVF.h"
46 #import "URL.h"
47 #import "Logging.h"
48 #import "PlatformTimeRanges.h"
49 #import "SecurityOrigin.h"
50 #import "SoftLinking.h"
51 #import "UUID.h"
52 #import "VideoTrackPrivateAVFObjC.h"
53 #import "WebCoreAVFResourceLoader.h"
54 #import "WebCoreSystemInterface.h"
55 #import <objc/runtime.h>
56 #import <runtime/DataView.h>
57 #import <runtime/JSCInlines.h>
58 #import <runtime/TypedArrayInlines.h>
59 #import <runtime/Uint16Array.h>
60 #import <runtime/Uint32Array.h>
61 #import <runtime/Uint8Array.h>
62 #import <wtf/CurrentTime.h>
63 #import <wtf/Functional.h>
64 #import <wtf/text/CString.h>
65 #import <wtf/text/StringBuilder.h>
66
67 #if ENABLE(AVF_CAPTIONS)
68 #include "TextTrack.h"
69 #endif
70
71 #import <AVFoundation/AVFoundation.h>
72 #if PLATFORM(IOS)
73 #import <CoreImage/CoreImage.h>
74 #else
75 #import <QuartzCore/CoreImage.h>
76 #endif
77 #import <CoreMedia/CoreMedia.h>
78
79 #if USE(VIDEOTOOLBOX)
80 #import <CoreVideo/CoreVideo.h>
81 #import <VideoToolbox/VideoToolbox.h>
82 #endif
83
84 #if ENABLE(AVF_CAPTIONS)
85 // Note: This must be defined before our SOFT_LINK macros:
86 @class AVMediaSelectionOption;
87 @interface AVMediaSelectionOption (OutOfBandExtensions)
88 @property (nonatomic, readonly) NSString* outOfBandSource;
89 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
90 @end
91 #endif
92
93 #if PLATFORM(IOS)
94 @class AVPlayerItem;
95 @interface AVPlayerItem (WebKitExtensions)
96 @property (nonatomic, copy) NSString* dataYouTubeID;
97 @end
98 #endif
99
100 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
101 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
102 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
103 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
104
105 #if USE(VIDEOTOOLBOX)
106 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
107 #endif
108
109 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
110 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
111 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
112 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
113
114 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
115 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
116 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
117 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
118 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
119 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
120 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
121 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
122
123 #if USE(VIDEOTOOLBOX)
124 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
125 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
126 #endif
127
128 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
129 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
130 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
131 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
132 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
133 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
134 SOFT_LINK_CLASS(CoreImage, CIContext)
135 SOFT_LINK_CLASS(CoreImage, CIImage)
136
137 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
138 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
139 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
140 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
141 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
142 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
143 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
144 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
145 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
146 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
147 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
148 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
149 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
150
151 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
152
153 #define AVPlayer getAVPlayerClass()
154 #define AVPlayerItem getAVPlayerItemClass()
155 #define AVPlayerLayer getAVPlayerLayerClass()
156 #define AVURLAsset getAVURLAssetClass()
157 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
158
159 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
160 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
161 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
162 #define AVMediaTypeVideo getAVMediaTypeVideo()
163 #define AVMediaTypeAudio getAVMediaTypeAudio()
164 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
165 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
166 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
167 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
168 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
169 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
170 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
171 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
172
173 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
174 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
175 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
176
177 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
178 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
179 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
180
181 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
185
186 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
187 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
188 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
189 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
190 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
191 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
192 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
193 #endif
194
195 #if ENABLE(AVF_CAPTIONS)
196 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
197 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
198 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
199 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
200 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
201 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
202 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
203 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
204 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
205
206 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
207 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
208 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
209 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
210 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
211 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
212 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
213 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
214 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
215 #endif
216
217 #define kCMTimeZero getkCMTimeZero()
218
219 using namespace WebCore;
220
221 enum MediaPlayerAVFoundationObservationContext {
222     MediaPlayerAVFoundationObservationContextPlayerItem,
223     MediaPlayerAVFoundationObservationContextPlayer
224 };
225
226 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
227 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
228 #else
229 @interface WebCoreAVFMovieObserver : NSObject
230 #endif
231 {
232     MediaPlayerPrivateAVFoundationObjC* m_callback;
233     int m_delayCallbacks;
234 }
235 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
236 -(void)disconnect;
237 -(void)metadataLoaded;
238 -(void)didEnd:(NSNotification *)notification;
239 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
240 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
241 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
242 - (void)outputSequenceWasFlushed:(id)output;
243 #endif
244 @end
245
246 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
247 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
248     MediaPlayerPrivateAVFoundationObjC* m_callback;
249 }
250 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
251 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
252 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
253 @end
254 #endif
255
256 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
257 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
258     MediaPlayerPrivateAVFoundationObjC *m_callback;
259     dispatch_semaphore_t m_semaphore;
260 }
261 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
262 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
263 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
264 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
265 @end
266 #endif
267
268 namespace WebCore {
269
270 static NSArray *assetMetadataKeyNames();
271 static NSArray *itemKVOProperties();
272
273 #if !LOG_DISABLED
274 static const char *boolString(bool val)
275 {
276     return val ? "true" : "false";
277 }
278 #endif
279
280 #if ENABLE(ENCRYPTED_MEDIA_V2)
281 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
282 static PlayerToPrivateMapType& playerToPrivateMap()
283 {
284     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
285     return map;
286 };
287 #endif
288
289 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
290 static dispatch_queue_t globalLoaderDelegateQueue()
291 {
292     static dispatch_queue_t globalQueue;
293     static dispatch_once_t onceToken;
294     dispatch_once(&onceToken, ^{
295         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
296     });
297     return globalQueue;
298 }
299 #endif
300
301 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
302 static dispatch_queue_t globalPullDelegateQueue()
303 {
304     static dispatch_queue_t globalQueue;
305     static dispatch_once_t onceToken;
306     dispatch_once(&onceToken, ^{
307         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
308     });
309     return globalQueue;
310 }
311 #endif
312
313 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
314
315     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
316 }
317
318 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
319 {
320     if (isAvailable())
321         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
322 }
323
324 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
325     : MediaPlayerPrivateAVFoundation(player)
326     , m_weakPtrFactory(this)
327 #if PLATFORM(IOS)
328     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
329 #endif
330     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
331     , m_videoFrameHasDrawn(false)
332     , m_haveCheckedPlayability(false)
333 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
334     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
335     , m_videoOutputSemaphore(nullptr)
336 #endif
337 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
338     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
339 #endif
340     , m_currentTrack(0)
341     , m_cachedDuration(MediaPlayer::invalidTime())
342     , m_cachedRate(0)
343     , m_pendingStatusChanges(0)
344     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
345     , m_cachedLikelyToKeepUp(false)
346     , m_cachedBufferEmpty(false)
347     , m_cachedBufferFull(false)
348     , m_cachedHasEnabledAudio(false)
349 #if ENABLE(IOS_AIRPLAY)
350     , m_allowsWirelessVideoPlayback(true)
351 #endif
352 {
353 #if ENABLE(ENCRYPTED_MEDIA_V2)
354     playerToPrivateMap().set(player, this);
355 #endif
356 }
357
358 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
359 {
360 #if ENABLE(ENCRYPTED_MEDIA_V2)
361     playerToPrivateMap().remove(player());
362 #endif
363 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
364     [m_loaderDelegate.get() setCallback:0];
365     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
366 #endif
367 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
368     [m_videoOutputDelegate setCallback:0];
369     [m_videoOutput setDelegate:nil queue:0];
370     if (m_videoOutputSemaphore)
371         dispatch_release(m_videoOutputSemaphore);
372 #endif
373     cancelLoad();
374 }
375
376 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
377 {
378     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
379     tearDownVideoRendering();
380
381     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
382     [m_objcObserver.get() disconnect];
383
384     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
385     setIgnoreLoadStateChanges(true);
386     if (m_avAsset) {
387         [m_avAsset.get() cancelLoading];
388         m_avAsset = nil;
389     }
390
391     clearTextTracks();
392
393 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
394     if (m_legibleOutput) {
395         if (m_avPlayerItem)
396             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
397         m_legibleOutput = nil;
398     }
399 #endif
400
401     if (m_avPlayerItem) {
402         for (NSString *keyName in itemKVOProperties())
403             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
404         
405         m_avPlayerItem = nil;
406     }
407     if (m_avPlayer) {
408         if (m_timeObserver)
409             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
410         m_timeObserver = nil;
411         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
412 #if ENABLE(IOS_AIRPLAY)
413         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
414 #endif
415         m_avPlayer = nil;
416     }
417
418     // Reset cached properties
419     m_pendingStatusChanges = 0;
420     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
421     m_cachedSeekableRanges = nullptr;
422     m_cachedLoadedRanges = nullptr;
423     m_cachedTracks = nullptr;
424     m_cachedHasEnabledAudio = false;
425     m_cachedPresentationSize = FloatSize();
426     m_cachedDuration = 0;
427
428     setIgnoreLoadStateChanges(false);
429 }
430
431 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
432 {
433     return m_videoLayer;
434 }
435
436 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
437 {
438 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
439     if (m_videoOutput)
440         return true;
441 #endif
442     return m_imageGenerator;
443 }
444
445 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
446 {
447 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
448     createVideoOutput();
449 #else
450     createImageGenerator();
451 #endif
452 }
453
454 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
455 {
456     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
457
458     if (!m_avAsset || m_imageGenerator)
459         return;
460
461     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
462
463     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
464     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
465     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
466     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
467
468     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
469 }
470
471 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
472 {
473 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
474     destroyVideoOutput();
475 #endif
476     destroyImageGenerator();
477 }
478
479 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
480 {
481     if (!m_imageGenerator)
482         return;
483
484     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
485
486     m_imageGenerator = 0;
487 }
488
489 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
490 {
491     if (!m_avPlayer || m_videoLayer)
492         return;
493
494     auto weakThis = createWeakPtr();
495     callOnMainThread([this, weakThis] {
496         if (!weakThis)
497             return;
498
499         if (!m_avPlayer || m_videoLayer)
500             return;
501
502         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
503         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
504         [m_videoLayer.get() setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
505 #ifndef NDEBUG
506         [m_videoLayer.get() setName:@"Video layer"];
507 #endif
508         updateVideoLayerGravity();
509         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
510
511 #if PLATFORM(IOS)
512         if (m_videoFullscreenLayer) {
513             [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
514             [m_videoFullscreenLayer addSublayer:m_videoLayer.get()];
515         }
516 #endif
517         player()->mediaPlayerClient()->mediaPlayerRenderingModeChanged(player());
518     });
519 }
520
521 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
522 {
523     if (!m_videoLayer)
524         return;
525
526     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
527
528     [m_videoLayer.get() setPlayer:nil];
529
530     m_videoLayer = 0;
531 }
532
533 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
534 {
535     if (currentRenderingMode() == MediaRenderingToLayer)
536         return m_videoLayer && [m_videoLayer.get() isReadyForDisplay];
537
538     return m_videoFrameHasDrawn;
539 }
540
541 #if ENABLE(AVF_CAPTIONS)
542 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
543 {
544     // FIXME: Match these to correct types:
545     if (kind == PlatformTextTrack::Caption)
546         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
547
548     if (kind == PlatformTextTrack::Subtitle)
549         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
550
551     if (kind == PlatformTextTrack::Description)
552         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
553
554     if (kind == PlatformTextTrack::Forced)
555         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
556
557     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
558 }
559     
560 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
561 {
562     trackModeChanged();
563 }
564     
565 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
566 {
567     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
568     
569     for (auto& textTrack : m_textTracks) {
570         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
571             continue;
572         
573         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
574         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
575         
576         for (auto& track : outOfBandTrackSources) {
577             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
578             
579             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
580                 continue;
581             
582             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
583             if (track->mode() == PlatformTextTrack::Hidden)
584                 mode = InbandTextTrackPrivate::Hidden;
585             else if (track->mode() == PlatformTextTrack::Disabled)
586                 mode = InbandTextTrackPrivate::Disabled;
587             else if (track->mode() == PlatformTextTrack::Showing)
588                 mode = InbandTextTrackPrivate::Showing;
589             
590             textTrack->setMode(mode);
591             break;
592         }
593     }
594 }
595 #endif
596
597 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
598 {
599     if (m_avAsset)
600         return;
601
602     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
603
604     setDelayCallbacks(true);
605
606     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
607
608     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
609
610     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
611
612     String referrer = player()->referrer();
613     if (!referrer.isEmpty())
614         [headerFields.get() setObject:referrer forKey:@"Referer"];
615
616     String userAgent = player()->userAgent();
617     if (!userAgent.isEmpty())
618         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
619
620     if ([headerFields.get() count])
621         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
622
623     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
624         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
625
626 #if ENABLE(AVF_CAPTIONS)
627     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
628     if (!outOfBandTrackSources.isEmpty()) {
629         NSMutableArray* outOfBandTracks = [[NSMutableArray alloc] init];
630         for (auto& trackSource : outOfBandTrackSources) {
631             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
632             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
633             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
634             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
635             [outOfBandTracks addObject:
636                 [NSDictionary dictionaryWithObjectsAndKeys:
637                     reinterpret_cast<const NSString*>(label.get()), AVOutOfBandAlternateTrackDisplayNameKey,
638                     reinterpret_cast<const NSString*>(language.get()), AVOutOfBandAlternateTrackExtendedLanguageTagKey,
639                     [NSNumber numberWithBool: (trackSource->isDefault() ? YES : NO)], AVOutOfBandAlternateTrackIsDefaultKey,
640                     reinterpret_cast<const NSString*>(uniqueID.get()), AVOutOfBandAlternateTrackIdentifierKey,
641                     reinterpret_cast<const NSString*>(url.get()), AVOutOfBandAlternateTrackSourceKey,
642                     mediaDescriptionForKind(trackSource->kind()), AVOutOfBandAlternateTrackMediaCharactersticsKey,
643                     nil]];
644         }
645
646         [options.get() setObject: outOfBandTracks forKey: AVURLAssetOutOfBandAlternateTracksKey];
647     }
648 #endif
649     
650     NSURL *cocoaURL = URL(ParsedURLString, url);
651     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
652
653 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
654     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
655 #endif
656
657     m_haveCheckedPlayability = false;
658
659     setDelayCallbacks(false);
660 }
661
662 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
663 {
664     if (m_avPlayer)
665         return;
666
667     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
668
669     setDelayCallbacks(true);
670
671     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
672     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
673 #if ENABLE(IOS_AIRPLAY)
674     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
675 #endif
676
677 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
678     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
679 #endif
680
681 #if ENABLE(IOS_AIRPLAY)
682     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
683 #endif
684
685     if (m_avPlayerItem)
686         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
687
688     setDelayCallbacks(false);
689 }
690
691 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
692 {
693     if (m_avPlayerItem)
694         return;
695
696     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
697
698     setDelayCallbacks(true);
699
700     // Create the player item so we can load media data. 
701     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
702
703     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
704
705     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
706     for (NSString *keyName in itemKVOProperties())
707         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
708
709     if (m_avPlayer)
710         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
711
712 #if PLATFORM(IOS)
713     AtomicString value;
714     if (player()->doesHaveAttribute("data-youtube-id", &value)) {
715         [m_avPlayerItem.get() setDataYouTubeID: value];
716  #endif
717
718 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
719     const NSTimeInterval legibleOutputAdvanceInterval = 2;
720
721     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
722     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
723
724     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
725     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
726     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
727     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
728 #endif
729
730     setDelayCallbacks(false);
731 }
732
733 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
734 {
735     if (m_haveCheckedPlayability)
736         return;
737     m_haveCheckedPlayability = true;
738
739     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
740     auto weakThis = createWeakPtr();
741
742     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
743         callOnMainThread([weakThis] {
744             if (weakThis)
745                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
746         });
747     }];
748 }
749
750 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
751 {
752     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
753     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
754         [m_objcObserver.get() metadataLoaded];
755     }];
756 }
757
758 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
759 {
760     if (!m_avPlayerItem)
761         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
762
763     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
764         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
765     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
766         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
767     if (m_cachedLikelyToKeepUp)
768         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
769     if (m_cachedBufferFull)
770         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
771     if (m_cachedBufferEmpty)
772         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
773
774     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
775 }
776
777 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
778 {
779     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
780     PlatformMedia pm;
781     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
782     pm.media.avfMediaPlayer = m_avPlayer.get();
783     return pm;
784 }
785
786 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
787 {
788     return m_videoLayer.get();
789 }
790
791 #if PLATFORM(IOS)
792 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
793 {
794     if (m_videoFullscreenLayer == videoFullscreenLayer)
795         return;
796
797     if (m_videoFullscreenLayer)
798        [m_videoLayer removeFromSuperlayer];
799
800     m_videoFullscreenLayer = videoFullscreenLayer;
801
802     if (!m_videoFullscreenLayer || !m_videoLayer)
803         return;
804
805     [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
806     [m_videoFullscreenLayer addSublayer:m_videoLayer.get()];
807 }
808
809 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
810 {
811     m_videoFullscreenFrame = frame;
812     if (!m_videoFullscreenLayer || !m_videoLayer)
813         return;
814     
815     [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
816 }
817
818 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
819 {
820     m_videoFullscreenGravity = gravity;
821     if (!m_videoLayer)
822         return;
823
824     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
825     if (gravity == MediaPlayer::VideoGravityResize)
826         videoGravity = AVLayerVideoGravityResize;
827     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
828         videoGravity = AVLayerVideoGravityResizeAspect;
829     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
830         videoGravity = AVLayerVideoGravityResizeAspectFill;
831     else
832         ASSERT_NOT_REACHED();
833
834     [m_videoLayer setVideoGravity:videoGravity];
835 }
836 #endif
837
838 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
839 {
840     [CATransaction begin];
841     [CATransaction setDisableActions:YES];    
842     if (m_videoLayer)
843         [m_videoLayer.get() setHidden:!isVisible];
844     [CATransaction commit];
845 }
846     
847 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
848 {
849     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
850     if (!metaDataAvailable())
851         return;
852
853     setDelayCallbacks(true);
854     m_cachedRate = requestedRate();
855     [m_avPlayer.get() setRate:requestedRate()];
856     setDelayCallbacks(false);
857 }
858
859 void MediaPlayerPrivateAVFoundationObjC::platformPause()
860 {
861     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
862     if (!metaDataAvailable())
863         return;
864
865     setDelayCallbacks(true);
866     m_cachedRate = 0;
867     [m_avPlayer.get() setRate:nil];
868     setDelayCallbacks(false);
869 }
870
871 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
872 {
873     // Do not ask the asset for duration before it has been loaded or it will fetch the
874     // answer synchronously.
875     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
876          return MediaPlayer::invalidTime();
877     
878     CMTime cmDuration;
879     
880     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
881     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
882         cmDuration = [m_avPlayerItem.get() duration];
883     else
884         cmDuration= [m_avAsset.get() duration];
885
886     if (CMTIME_IS_NUMERIC(cmDuration))
887         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
888
889     if (CMTIME_IS_INDEFINITE(cmDuration)) {
890         return std::numeric_limits<float>::infinity();
891     }
892
893     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
894     return MediaPlayer::invalidTime();
895 }
896
897 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
898 {
899     if (!metaDataAvailable() || !m_avPlayerItem)
900         return 0;
901
902     CMTime itemTime = [m_avPlayerItem.get() currentTime];
903     if (CMTIME_IS_NUMERIC(itemTime))
904         return std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
905
906     return 0;
907 }
908
909 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
910 {
911     // setCurrentTime generates several event callbacks, update afterwards.
912     setDelayCallbacks(true);
913
914     CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
915     CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
916     CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
917
918     auto weakThis = createWeakPtr();
919
920     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
921         callOnMainThread([weakThis, finished] {
922             auto _this = weakThis.get();
923             if (!_this)
924                 return;
925
926             _this->seekCompleted(finished);
927         });
928     }];
929
930     setDelayCallbacks(false);
931 }
932
933 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
934 {
935     if (!metaDataAvailable())
936         return;
937
938     [m_avPlayer.get() setVolume:volume];
939 }
940
941 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
942 {
943     UNUSED_PARAM(closedCaptionsVisible);
944
945     if (!metaDataAvailable())
946         return;
947
948     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
949 }
950
951 void MediaPlayerPrivateAVFoundationObjC::updateRate()
952 {
953     setDelayCallbacks(true);
954     m_cachedRate = requestedRate();
955     [m_avPlayer.get() setRate:requestedRate()];
956     setDelayCallbacks(false);
957 }
958
959 float MediaPlayerPrivateAVFoundationObjC::rate() const
960 {
961     if (!metaDataAvailable())
962         return 0;
963
964     return m_cachedRate;
965 }
966
967 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
968 {
969     auto timeRanges = PlatformTimeRanges::create();
970
971     if (!m_avPlayerItem)
972         return timeRanges;
973
974     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
975         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
976         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
977             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
978             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
979             timeRanges->add(rangeStart, rangeEnd);
980         }
981     }
982     return timeRanges;
983 }
984
985 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
986 {
987     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
988         return 0;
989
990     double minTimeSeekable = std::numeric_limits<double>::infinity();
991     bool hasValidRange = false;
992     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
993         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
994         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
995             continue;
996
997         hasValidRange = true;
998         double startOfRange = CMTimeGetSeconds(timeRange.start);
999         if (minTimeSeekable > startOfRange)
1000             minTimeSeekable = startOfRange;
1001     }
1002     return hasValidRange ? minTimeSeekable : 0;
1003 }
1004
1005 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1006 {
1007     if (!m_cachedSeekableRanges)
1008         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1009
1010     double maxTimeSeekable = 0;
1011     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1012         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1013         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1014             continue;
1015         
1016         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
1017         if (maxTimeSeekable < endOfRange)
1018             maxTimeSeekable = endOfRange;
1019     }
1020     return maxTimeSeekable;
1021 }
1022
1023 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1024 {
1025 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1026     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1027     // when loadedTimeRanges changes when there is no video output. In that case
1028     // update the cached value explicitly.
1029     if (!hasLayerRenderer() && !hasContextRenderer())
1030         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1031 #endif
1032
1033     if (!m_cachedLoadedRanges)
1034         return 0;
1035
1036     float maxTimeLoaded = 0;
1037     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1038         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1039         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1040             continue;
1041         
1042         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
1043         if (maxTimeLoaded < endOfRange)
1044             maxTimeLoaded = endOfRange;
1045     }
1046
1047     return maxTimeLoaded;   
1048 }
1049
1050 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1051 {
1052     if (!metaDataAvailable())
1053         return 0;
1054
1055     long long totalMediaSize = 0;
1056     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1057         totalMediaSize += [[thisTrack assetTrack] totalSampleDataLength];
1058
1059     return totalMediaSize;
1060 }
1061
1062 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1063 {
1064     m_avAsset = asset;
1065 }
1066
1067 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1068 {
1069     if (!m_avAsset)
1070         return MediaPlayerAVAssetStatusDoesNotExist;
1071
1072     for (NSString *keyName in assetMetadataKeyNames()) {
1073         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
1074
1075         if (keyStatus < AVKeyValueStatusLoaded)
1076             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1077         
1078         if (keyStatus == AVKeyValueStatusFailed)
1079             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1080
1081         if (keyStatus == AVKeyValueStatusCancelled)
1082             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1083     }
1084
1085     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1086         return MediaPlayerAVAssetStatusPlayable;
1087
1088     return MediaPlayerAVAssetStatusLoaded;
1089 }
1090
1091 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1092 {
1093     if (!metaDataAvailable() || context->paintingDisabled())
1094         return;
1095
1096     setDelayCallbacks(true);
1097     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1098
1099 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1100     if (videoOutputHasAvailableFrame())
1101         paintWithVideoOutput(context, rect);
1102     else
1103 #endif
1104         paintWithImageGenerator(context, rect);
1105
1106     END_BLOCK_OBJC_EXCEPTIONS;
1107     setDelayCallbacks(false);
1108
1109     m_videoFrameHasDrawn = true;
1110 }
1111
1112 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1113 {
1114     if (!metaDataAvailable() || context->paintingDisabled())
1115         return;
1116
1117     // We can ignore the request if we are already rendering to a layer.
1118     if (currentRenderingMode() == MediaRenderingToLayer)
1119         return;
1120
1121     paintCurrentFrameInContext(context, rect);
1122 }
1123
1124 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1125 {
1126     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1127     if (image) {
1128         GraphicsContextStateSaver stateSaver(*context);
1129         context->translate(rect.x(), rect.y() + rect.height());
1130         context->scale(FloatSize(1.0f, -1.0f));
1131         context->setImageInterpolationQuality(InterpolationLow);
1132         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1133         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1134         image = 0;
1135     }
1136 }
1137
1138 static HashSet<String> mimeTypeCache()
1139 {
1140     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1141     static bool typeListInitialized = false;
1142
1143     if (typeListInitialized)
1144         return cache;
1145     typeListInitialized = true;
1146
1147     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1148     for (NSString *mimeType in types)
1149         cache.add(mimeType);
1150
1151     return cache;
1152
1153
1154 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1155 {
1156     if (!m_imageGenerator)
1157         createImageGenerator();
1158     ASSERT(m_imageGenerator);
1159
1160 #if !LOG_DISABLED
1161     double start = monotonicallyIncreasingTime();
1162 #endif
1163
1164     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1165     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1166     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1167
1168 #if !LOG_DISABLED
1169     double duration = monotonicallyIncreasingTime() - start;
1170     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1171 #endif
1172
1173     return image;
1174 }
1175
1176 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1177 {
1178     supportedTypes = mimeTypeCache();
1179
1180
1181 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1182 static bool keySystemIsSupported(const String& keySystem)
1183 {
1184     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1185         return true;
1186     return false;
1187 }
1188 #endif
1189
1190 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1191 {
1192 #if ENABLE(ENCRYPTED_MEDIA)
1193     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1194     // In addition to the steps in the current specification, this method must run the following steps:
1195
1196     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1197     //    If keySystem is null, continue to the next step.
1198     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1199         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1200         if (!keySystemIsSupported(parameters.keySystem))
1201             return MediaPlayer::IsNotSupported;
1202
1203         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1204         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1205     }
1206
1207     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1208 #endif
1209
1210 #if ENABLE(MEDIA_SOURCE)
1211     if (parameters.isMediaSource)
1212         return MediaPlayer::IsNotSupported;
1213 #endif
1214
1215     if (!mimeTypeCache().contains(parameters.type))
1216         return MediaPlayer::IsNotSupported;
1217
1218     // The spec says:
1219     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1220     if (parameters.codecs.isEmpty())
1221         return MediaPlayer::MayBeSupported;
1222
1223     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1224     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1225 }
1226
1227 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1228 {
1229 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1230     if (!keySystem.isEmpty()) {
1231         if (!keySystemIsSupported(keySystem))
1232             return false;
1233
1234         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1235             return false;
1236
1237         return true;
1238     }
1239 #else
1240     UNUSED_PARAM(keySystem);
1241     UNUSED_PARAM(mimeType);
1242 #endif
1243     return false;
1244 }
1245
1246 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1247 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1248 {
1249     String scheme = [[[avRequest request] URL] scheme];
1250     String keyURI = [[[avRequest request] URL] absoluteString];
1251
1252 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1253     if (scheme == "skd") {
1254         // Create an initData with the following layout:
1255         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1256         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1257         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1258         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1259         initDataView->set<uint32_t>(0, keyURISize, true);
1260
1261         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1262         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1263
1264 #if ENABLE(ENCRYPTED_MEDIA)
1265         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1266 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1267         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1268         if (!player()->keyNeeded(initData.get()))
1269 #endif
1270             return false;
1271
1272         m_keyURIToRequestMap.set(keyURI, avRequest);
1273         return true;
1274     }
1275 #endif
1276
1277     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1278     m_resourceLoaderMap.add(avRequest, resourceLoader);
1279     resourceLoader->startLoading();
1280     return true;
1281 }
1282
1283 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1284 {
1285 #if USE(CFNETWORK)
1286     UNUSED_PARAM(nsChallenge);
1287     // FIXME: <rdar://problem/15799844>
1288     return false;
1289 #else
1290     AuthenticationChallenge challenge(nsChallenge);
1291
1292     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1293 #endif
1294 }
1295
1296 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1297 {
1298     String scheme = [[[avRequest request] URL] scheme];
1299
1300     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1301
1302     if (resourceLoader)
1303         resourceLoader->stopLoading();
1304 }
1305
1306 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1307 {
1308     m_resourceLoaderMap.remove(avRequest);
1309 }
1310 #endif
1311
1312 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1313 {
1314     return AVFoundationLibrary() && CoreMediaLibrary();
1315 }
1316
1317 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
1318 {
1319     if (!metaDataAvailable())
1320         return timeValue;
1321
1322     // FIXME - impossible to implement until rdar://8721510 is fixed.
1323     return timeValue;
1324 }
1325
1326 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1327 {
1328     if (!m_videoLayer)
1329         return;
1330
1331     [CATransaction begin];
1332     [CATransaction setDisableActions:YES];    
1333     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1334     [m_videoLayer.get() setVideoGravity:gravity];
1335     [CATransaction commit];
1336 }
1337
1338 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1339 {
1340     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1341     m_languageOfPrimaryAudioTrack = String();
1342
1343     if (!m_avAsset)
1344         return;
1345
1346     setDelayCharacteristicsChangedNotification(true);
1347
1348     bool haveCCTrack = false;
1349     bool hasCaptions = false;
1350
1351     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1352     // asked about those fairly fequently.
1353     if (!m_avPlayerItem) {
1354         // We don't have a player item yet, so check with the asset because some assets support inspection
1355         // prior to becoming ready to play.
1356         setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
1357         setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
1358 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1359         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1360 #endif
1361     } else {
1362         bool hasVideo = false;
1363         bool hasAudio = false;
1364         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1365             if ([track isEnabled]) {
1366                 AVAssetTrack *assetTrack = [track assetTrack];
1367                 if ([[assetTrack mediaType] isEqualToString:AVMediaTypeVideo])
1368                     hasVideo = true;
1369                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeAudio])
1370                     hasAudio = true;
1371                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption]) {
1372 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1373                     hasCaptions = true;
1374 #endif
1375                     haveCCTrack = true;
1376                 }
1377             }
1378         }
1379         setHasVideo(hasVideo);
1380         setHasAudio(hasAudio);
1381
1382
1383 #if ENABLE(VIDEO_TRACK)
1384         updateAudioTracks();
1385         updateVideoTracks();
1386 #endif
1387     }
1388
1389 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1390     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1391         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1392         if (hasCaptions)
1393             processMediaSelectionOptions();
1394     }
1395 #endif
1396
1397 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1398     if (!hasCaptions && haveCCTrack)
1399         processLegacyClosedCaptionsTracks();
1400 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1401     if (haveCCTrack)
1402         processLegacyClosedCaptionsTracks();
1403 #endif
1404
1405     setHasClosedCaptions(hasCaptions);
1406
1407     LOG(Media, "WebCoreAVFMovieObserver:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1408         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1409
1410     sizeChanged();
1411
1412     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1413         characteristicsChanged();
1414
1415     setDelayCharacteristicsChangedNotification(false);
1416 }
1417
1418 #if ENABLE(VIDEO_TRACK)
1419 template <typename RefT, typename PassRefT>
1420 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1421 {
1422     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1423         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1424     }]]]);
1425     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1426
1427     typedef Vector<RefT> ItemVector;
1428     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1429         [oldTracks addObject:(*i)->playerItemTrack()];
1430
1431     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1432     [removedTracks minusSet:newTracks.get()];
1433
1434     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1435     [addedTracks minusSet:oldTracks.get()];
1436
1437     ItemVector replacementItems;
1438     ItemVector addedItems;
1439     ItemVector removedItems;
1440     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1441         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1442             removedItems.append(*i);
1443         else
1444             replacementItems.append(*i);
1445     }
1446
1447     for (AVPlayerItemTrack* track in addedTracks.get())
1448         addedItems.append(itemFactory(track));
1449
1450     replacementItems.appendVector(addedItems);
1451     oldItems.swap(replacementItems);
1452
1453     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1454         (player->*removedFunction)(*i);
1455
1456     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1457         (player->*addedFunction)(*i);
1458 }
1459
1460 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1461 {
1462     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1463 }
1464
1465 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1466 {
1467     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1468 }
1469 #endif // ENABLE(VIDEO_TRACK)
1470
1471 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1472 {
1473     if (!m_avAsset)
1474         return;
1475
1476     // Some assets don't report track properties until they are completely ready to play, but we
1477     // want to report a size as early as possible so use presentationSize when an asset has no tracks.
1478     if (m_avPlayerItem && ![m_cachedTracks count]) {
1479         setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1480         return;
1481     }
1482
1483     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1484     // the union of all visual track rects.
1485     CGRect trackUnionRect = CGRectZero;
1486     for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1487         AVAssetTrack* assetTrack = [track assetTrack];
1488         CGSize trackSize = [assetTrack naturalSize];
1489         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1490         trackUnionRect = CGRectUnion(trackUnionRect, CGRectApplyAffineTransform(trackRect, [assetTrack preferredTransform]));
1491     }
1492
1493     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1494     trackUnionRect = CGRectOffset(trackUnionRect, trackUnionRect.origin.x, trackUnionRect.origin.y);
1495     
1496     // Also look at the asset's preferred transform so we account for a movie matrix.
1497     CGSize naturalSize = CGSizeApplyAffineTransform(trackUnionRect.size, [m_avAsset.get() preferredTransform]);
1498
1499     // Cache the natural size (setNaturalSize will notify the player if it has changed).
1500     setNaturalSize(IntSize(naturalSize));
1501 }
1502
1503 #if PLATFORM(IOS)
1504 // FIXME: Implement for iOS in WebKit System Interface.
1505 static inline NSURL *wkAVAssetResolvedURL(AVAsset*)
1506 {
1507     return nil;
1508 }
1509 #endif
1510
1511 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1512 {
1513     if (!m_avAsset)
1514         return false;
1515     
1516     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1517     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1518     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1519 }
1520
1521 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1522 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1523 {
1524     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1525
1526     if (!m_avPlayerItem || m_videoOutput)
1527         return;
1528
1529 #if USE(VIDEOTOOLBOX)
1530     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1531 #else
1532     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
1533                                 nil];
1534 #endif
1535     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
1536     ASSERT(m_videoOutput);
1537
1538     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
1539
1540     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1541
1542     waitForVideoOutputMediaDataWillChange();
1543
1544     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1545 }
1546
1547 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1548 {
1549     if (!m_videoOutput)
1550         return;
1551
1552     if (m_avPlayerItem)
1553         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1554     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1555
1556     m_videoOutput = 0;
1557 }
1558
1559 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1560 {
1561     if (!m_videoOutput)
1562         createVideoOutput();
1563     ASSERT(m_videoOutput);
1564
1565 #if !LOG_DISABLED
1566     double start = monotonicallyIncreasingTime();
1567 #endif
1568
1569     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1570
1571     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1572         return 0;
1573
1574     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1575     if (!buffer)
1576         return 0;
1577
1578 #if USE(VIDEOTOOLBOX)
1579     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1580     if (!m_pixelTransferSession) {
1581         VTPixelTransferSessionRef session = 0;
1582         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1583         m_pixelTransferSession = adoptCF(session);
1584     }
1585
1586     CVPixelBufferRef outputBuffer;
1587     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
1588     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1589     buffer = adoptCF(outputBuffer);
1590 #endif
1591
1592 #if !LOG_DISABLED
1593     double duration = monotonicallyIncreasingTime() - start;
1594     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1595 #endif
1596
1597     return buffer;
1598 }
1599
1600 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
1601 {
1602     if (!m_avPlayerItem)
1603         return false;
1604
1605     if (m_lastImage)
1606         return true;
1607
1608     if (!m_videoOutput)
1609         createVideoOutput();
1610
1611     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
1612 }
1613
1614 static const void* CVPixelBufferGetBytePointerCallback(void* info)
1615 {
1616     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1617     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1618     return CVPixelBufferGetBaseAddress(pixelBuffer);
1619 }
1620
1621 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
1622 {
1623     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1624     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1625 }
1626
1627 static void CVPixelBufferReleaseInfoCallback(void* info)
1628 {
1629     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1630     CFRelease(pixelBuffer);
1631 }
1632
1633 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
1634 {
1635     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
1636     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
1637
1638     size_t width = CVPixelBufferGetWidth(pixelBuffer);
1639     size_t height = CVPixelBufferGetHeight(pixelBuffer);
1640     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
1641     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
1642     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
1643
1644     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
1645     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
1646     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
1647
1648     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
1649 }
1650
1651 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
1652 {
1653     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1654
1655     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1656     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1657     // should be displayed.
1658     if (pixelBuffer)
1659         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
1660 }
1661
1662 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
1663 {
1664     updateLastImage();
1665
1666     if (m_lastImage) {
1667         GraphicsContextStateSaver stateSaver(*context);
1668
1669         IntRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
1670
1671         context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, outputRect, imageRect);
1672
1673         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
1674         // video frame, destroy it now that it is no longer needed.
1675         if (m_imageGenerator)
1676             destroyImageGenerator();
1677     }
1678 }
1679
1680 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
1681 {
1682     updateLastImage();
1683     return m_lastImage.get();
1684 }
1685
1686 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
1687 {
1688     if (!m_videoOutputSemaphore)
1689         m_videoOutputSemaphore = dispatch_semaphore_create(0);
1690
1691     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
1692
1693     // Wait for 1 second.
1694     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
1695
1696     if (result)
1697         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
1698 }
1699
1700 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
1701 {
1702     dispatch_semaphore_signal(m_videoOutputSemaphore);
1703 }
1704 #endif
1705
1706 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1707 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1708 {
1709     // initData should have the following layout:
1710     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1711     if (initData->byteLength() < 4)
1712         return false;
1713
1714     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1715
1716     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1717     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1718     uint32_t offset = 0;
1719     bool status = true;
1720
1721     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1722     offset += 4;
1723     if (!status || offset + keyURILength > initData->length())
1724         return false;
1725
1726     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1727     if (!keyURIArray)
1728         return false;
1729
1730     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1731     offset += keyURILength;
1732
1733     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1734     offset += 4;
1735     if (!status || offset + keyIDLength > initData->length())
1736         return false;
1737
1738     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1739     if (!keyIDArray)
1740         return false;
1741
1742     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1743     offset += keyIDLength;
1744
1745     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1746     offset += 4;
1747     if (!status || offset + certificateLength > initData->length())
1748         return false;
1749
1750     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1751     if (!certificate)
1752         return false;
1753
1754     return true;
1755 }
1756 #endif
1757
1758 #if ENABLE(ENCRYPTED_MEDIA)
1759 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1760 {
1761     if (!keySystemIsSupported(keySystem))
1762         return MediaPlayer::KeySystemNotSupported;
1763
1764     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1765     String keyURI;
1766     String keyID;
1767     RefPtr<Uint8Array> certificate;
1768     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1769         return MediaPlayer::InvalidPlayerState;
1770
1771     if (!m_keyURIToRequestMap.contains(keyURI))
1772         return MediaPlayer::InvalidPlayerState;
1773
1774     String sessionID = createCanonicalUUIDString();
1775
1776     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1777
1778     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1779     NSString* assetStr = keyID;
1780     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1781     NSError* error = 0;
1782     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1783
1784     if (!keyRequest) {
1785         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1786         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1787         return MediaPlayer::NoError;
1788     }
1789
1790     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1791     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1792     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1793
1794     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1795     m_sessionIDToRequestMap.set(sessionID, avRequest);
1796     m_keyURIToRequestMap.remove(keyURI);
1797
1798     return MediaPlayer::NoError;
1799 }
1800
1801 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
1802 {
1803     if (!keySystemIsSupported(keySystem))
1804         return MediaPlayer::KeySystemNotSupported;
1805
1806     if (!m_sessionIDToRequestMap.contains(sessionID))
1807         return MediaPlayer::InvalidPlayerState;
1808
1809     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
1810     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
1811     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
1812     [avRequest.get() finishLoading];
1813     m_sessionIDToRequestMap.remove(sessionID);
1814
1815     player()->keyAdded(keySystem, sessionID);
1816
1817     UNUSED_PARAM(initDataPtr);
1818     UNUSED_PARAM(initDataLength);
1819     return MediaPlayer::NoError;
1820 }
1821
1822 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
1823 {
1824     if (!keySystemIsSupported(keySystem))
1825         return MediaPlayer::KeySystemNotSupported;
1826
1827     if (!m_sessionIDToRequestMap.contains(sessionID))
1828         return MediaPlayer::InvalidPlayerState;
1829
1830     m_sessionIDToRequestMap.remove(sessionID);
1831     return MediaPlayer::NoError;
1832 }
1833 #endif
1834
1835 #if ENABLE(ENCRYPTED_MEDIA_V2)
1836 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
1837 {
1838     return m_keyURIToRequestMap.take(keyURI);
1839 }
1840
1841 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
1842 {
1843     if (!keySystemIsSupported(keySystem))
1844         return nullptr;
1845
1846     return std::make_unique<CDMSessionAVFoundationObjC>(this);
1847 }
1848 #endif
1849
1850 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1851 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
1852 {
1853 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1854     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1855 #endif
1856
1857     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1858     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
1859
1860         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
1861         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
1862             continue;
1863
1864         bool newCCTrack = true;
1865         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1866             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1867                 continue;
1868
1869             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
1870             if (track->avPlayerItemTrack() == playerItemTrack) {
1871                 removedTextTracks.remove(i - 1);
1872                 newCCTrack = false;
1873                 break;
1874             }
1875         }
1876
1877         if (!newCCTrack)
1878             continue;
1879         
1880         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
1881     }
1882
1883     processNewAndRemovedTextTracks(removedTextTracks);
1884 }
1885 #endif
1886
1887 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1888 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
1889 {
1890     if (!m_avAsset)
1891         return nil;
1892     
1893     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
1894         return nil;
1895     
1896     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1897 }
1898
1899 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
1900 {
1901     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1902     if (!legibleGroup) {
1903         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1904         return;
1905     }
1906
1907     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1908     // but set the selected legible track to nil so text tracks will not be automatically configured.
1909     if (!m_textTracks.size())
1910         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1911
1912     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1913     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
1914     for (AVMediaSelectionOptionType *option in legibleOptions) {
1915         bool newTrack = true;
1916         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1917             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1918                 continue;
1919             
1920             RetainPtr<AVMediaSelectionOptionType> currentOption;
1921 #if ENABLE(AVF_CAPTIONS)
1922             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
1923                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
1924                 currentOption = track->mediaSelectionOption();
1925             } else
1926 #endif
1927             {
1928                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
1929                 currentOption = track->mediaSelectionOption();
1930             }
1931             
1932             if ([currentOption.get() isEqual:option]) {
1933                 removedTextTracks.remove(i - 1);
1934                 newTrack = false;
1935                 break;
1936             }
1937         }
1938         if (!newTrack)
1939             continue;
1940
1941 #if ENABLE(AVF_CAPTIONS)
1942         if ([option outOfBandSource]) {
1943             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
1944             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
1945             continue;
1946         }
1947 #endif
1948
1949         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
1950     }
1951
1952     processNewAndRemovedTextTracks(removedTextTracks);
1953 }
1954
1955 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
1956 {
1957     if (!m_currentTrack)
1958         return;
1959
1960     m_currentTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
1961 }
1962
1963 void MediaPlayerPrivateAVFoundationObjC::flushCues()
1964 {
1965     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
1966
1967     if (!m_currentTrack)
1968         return;
1969     
1970     m_currentTrack->resetCueValues();
1971 }
1972 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1973
1974 void MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(InbandTextTrackPrivateAVF *track)
1975 {
1976     if (m_currentTrack == track)
1977         return;
1978
1979     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1980         
1981     m_currentTrack = track;
1982
1983     if (track) {
1984         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1985             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
1986 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1987 #if ENABLE(AVF_CAPTIONS)
1988         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
1989             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1990 #endif
1991         else
1992             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1993 #endif
1994     } else {
1995 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1996         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1997 #endif
1998         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
1999     }
2000
2001 }
2002
2003 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2004 {
2005     if (!m_languageOfPrimaryAudioTrack.isNull())
2006         return m_languageOfPrimaryAudioTrack;
2007
2008     if (!m_avPlayerItem.get())
2009         return emptyString();
2010
2011 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2012     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2013     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2014     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2015     if (currentlySelectedAudibleOption) {
2016         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2017         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2018
2019         return m_languageOfPrimaryAudioTrack;
2020     }
2021 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2022
2023     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2024     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2025     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2026     if (!tracks || [tracks count] != 1) {
2027         m_languageOfPrimaryAudioTrack = emptyString();
2028         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2029         return m_languageOfPrimaryAudioTrack;
2030     }
2031
2032     AVAssetTrack *track = [tracks objectAtIndex:0];
2033     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2034
2035 #if !LOG_DISABLED
2036     if (m_languageOfPrimaryAudioTrack == emptyString())
2037         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2038     else
2039         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2040 #endif
2041
2042     return m_languageOfPrimaryAudioTrack;
2043 }
2044
2045 #if ENABLE(IOS_AIRPLAY)
2046 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2047 {
2048     if (!m_avPlayer)
2049         return false;
2050
2051     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2052     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2053     return wirelessTarget;
2054 }
2055
2056 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2057 {
2058     if (!m_avPlayer)
2059         return MediaPlayer::TargetTypeNone;
2060
2061     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2062     case wkExternalPlaybackTypeNone:
2063         return MediaPlayer::TargetTypeNone;
2064     case wkExternalPlaybackTypeAirPlay:
2065         return MediaPlayer::TargetTypeAirPlay;
2066     case wkExternalPlaybackTypeTVOut:
2067         return MediaPlayer::TargetTypeTVOut;
2068     }
2069
2070     ASSERT_NOT_REACHED();
2071     return MediaPlayer::TargetTypeNone;
2072 }
2073
2074 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2075 {
2076     if (!m_avPlayer)
2077         return emptyString();
2078     
2079     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2080     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2081
2082     return wirelessTargetName;
2083 }
2084
2085 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2086 {
2087     if (!m_avPlayer)
2088         return !m_allowsWirelessVideoPlayback;
2089     
2090     m_allowsWirelessVideoPlayback = ![m_avPlayer.get() allowsExternalPlayback];
2091     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2092
2093     return !m_allowsWirelessVideoPlayback;
2094 }
2095
2096 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2097 {
2098     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2099     m_allowsWirelessVideoPlayback = !disabled;
2100     if (!m_avPlayer)
2101         return;
2102     
2103     [m_avPlayer.get() setAllowsExternalPlayback:disabled];
2104 }
2105 #endif
2106
2107 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2108 {
2109     m_cachedItemStatus = status;
2110
2111     updateStates();
2112 }
2113
2114 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2115 {
2116     m_pendingStatusChanges++;
2117 }
2118
2119 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2120 {
2121     m_cachedLikelyToKeepUp = likelyToKeepUp;
2122
2123     ASSERT(m_pendingStatusChanges);
2124     if (!--m_pendingStatusChanges)
2125         updateStates();
2126 }
2127
2128 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2129 {
2130     m_pendingStatusChanges++;
2131 }
2132
2133 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2134 {
2135     m_cachedBufferEmpty = bufferEmpty;
2136
2137     ASSERT(m_pendingStatusChanges);
2138     if (!--m_pendingStatusChanges)
2139         updateStates();
2140 }
2141
2142 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2143 {
2144     m_pendingStatusChanges++;
2145 }
2146
2147 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2148 {
2149     m_cachedBufferFull = bufferFull;
2150
2151     ASSERT(m_pendingStatusChanges);
2152     if (!--m_pendingStatusChanges)
2153         updateStates();
2154 }
2155
2156 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2157 {
2158     m_cachedSeekableRanges = seekableRanges;
2159
2160     seekableTimeRangesChanged();
2161     updateStates();
2162 }
2163
2164 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2165 {
2166     m_cachedLoadedRanges = loadedRanges;
2167
2168     loadedTimeRangesChanged();
2169     updateStates();
2170 }
2171
2172 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2173 {
2174     m_cachedTracks = tracks;
2175
2176     tracksChanged();
2177     updateStates();
2178 }
2179
2180 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2181 {
2182     m_cachedHasEnabledAudio = hasEnabledAudio;
2183
2184     tracksChanged();
2185     updateStates();
2186 }
2187
2188 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2189 {
2190     m_cachedPresentationSize = size;
2191
2192     sizeChanged();
2193     updateStates();
2194 }
2195
2196 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
2197 {
2198     m_cachedDuration = duration;
2199
2200     invalidateCachedDuration();
2201 }
2202
2203 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2204 {
2205     m_cachedRate = rate;
2206
2207     updateStates();
2208     rateChanged();
2209 }
2210     
2211 #if ENABLE(IOS_AIRPLAY)
2212 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2213 {
2214     playbackTargetIsWirelessChanged();
2215 }
2216 #endif
2217
2218 NSArray* assetMetadataKeyNames()
2219 {
2220     static NSArray* keys;
2221     if (!keys) {
2222         keys = [[NSArray alloc] initWithObjects:@"duration",
2223                     @"naturalSize",
2224                     @"preferredTransform",
2225                     @"preferredVolume",
2226                     @"preferredRate",
2227                     @"playable",
2228                     @"tracks",
2229                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2230                    nil];
2231     }
2232     return keys;
2233 }
2234
2235 NSArray* itemKVOProperties()
2236 {
2237     static NSArray* keys;
2238     if (!keys) {
2239         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2240                 @"status",
2241                 @"asset",
2242                 @"tracks",
2243                 @"seekableTimeRanges",
2244                 @"loadedTimeRanges",
2245                 @"playbackLikelyToKeepUp",
2246                 @"playbackBufferFull",
2247                 @"playbackBufferEmpty",
2248                 @"duration",
2249                 @"hasEnabledAudio",
2250                 nil];
2251     }
2252     return keys;
2253 }
2254
2255 } // namespace WebCore
2256
2257 @implementation WebCoreAVFMovieObserver
2258
2259 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2260 {
2261     self = [super init];
2262     if (!self)
2263         return nil;
2264     m_callback = callback;
2265     return self;
2266 }
2267
2268 - (void)disconnect
2269 {
2270     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2271     m_callback = 0;
2272 }
2273
2274 - (void)metadataLoaded
2275 {
2276     if (!m_callback)
2277         return;
2278
2279     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2280 }
2281
2282 - (void)didEnd:(NSNotification *)unusedNotification
2283 {
2284     UNUSED_PARAM(unusedNotification);
2285     if (!m_callback)
2286         return;
2287     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2288 }
2289
2290 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2291 {
2292     UNUSED_PARAM(object);
2293     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2294
2295     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2296
2297     if (!m_callback)
2298         return;
2299
2300     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2301
2302     WTF::Function<void ()> function;
2303
2304     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2305         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2306             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2307         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2308             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2309         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2310             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2311     }
2312
2313     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2314         // A value changed for an AVPlayerItem
2315         if ([keyPath isEqualToString:@"status"])
2316             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2317         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2318             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2319         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2320             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2321         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2322             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2323         else if ([keyPath isEqualToString:@"asset"])
2324             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2325         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2326             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2327         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2328             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2329         else if ([keyPath isEqualToString:@"tracks"])
2330             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2331         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2332             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2333         else if ([keyPath isEqualToString:@"presentationSize"])
2334             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2335         else if ([keyPath isEqualToString:@"duration"])
2336             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
2337     }
2338
2339     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2340         // A value changed for an AVPlayer.
2341         if ([keyPath isEqualToString:@"rate"])
2342             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2343 #if ENABLE(IOS_AIRPLAY)
2344         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2345             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2346 #endif
2347     }
2348     
2349     if (function.isNull())
2350         return;
2351
2352     auto weakThis = m_callback->createWeakPtr();
2353     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2354         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2355         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2356         if (!weakThis)
2357             return;
2358         function();
2359     }));
2360 }
2361
2362 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2363 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2364 {
2365     UNUSED_PARAM(output);
2366     UNUSED_PARAM(nativeSamples);
2367
2368     if (!m_callback)
2369         return;
2370
2371     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2372     RetainPtr<NSArray> strongStrings = strings;
2373     callOnMainThread([strongSelf, strongStrings, itemTime] {
2374         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2375         if (!callback)
2376             return;
2377         callback->processCue(strongStrings.get(), CMTimeGetSeconds(itemTime));
2378     });
2379 }
2380
2381 - (void)outputSequenceWasFlushed:(id)output
2382 {
2383     UNUSED_PARAM(output);
2384
2385     if (!m_callback)
2386         return;
2387     
2388     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2389     callOnMainThread([strongSelf] {
2390         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
2391             callback->flushCues();
2392     });
2393 }
2394 #endif
2395
2396 @end
2397
2398 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
2399 @implementation WebCoreAVFLoaderDelegate
2400
2401 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2402 {
2403     self = [super init];
2404     if (!self)
2405         return nil;
2406     m_callback = callback;
2407     return self;
2408 }
2409
2410 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
2411 {
2412     UNUSED_PARAM(resourceLoader);
2413     if (!m_callback)
2414         return NO;
2415
2416     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2417     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2418     callOnMainThread([strongSelf, strongRequest] {
2419         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2420         if (!callback) {
2421             [strongRequest finishLoadingWithError:nil];
2422             return;
2423         }
2424
2425         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
2426             [strongRequest finishLoadingWithError:nil];
2427     });
2428
2429     return YES;
2430 }
2431
2432 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
2433 {
2434     UNUSED_PARAM(resourceLoader);
2435     if (!m_callback)
2436         return NO;
2437
2438     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
2439         return NO;
2440
2441     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2442     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
2443     callOnMainThread([strongSelf, strongChallenge] {
2444         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2445         if (!callback) {
2446             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2447             return;
2448         }
2449
2450         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
2451             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2452     });
2453
2454     return YES;
2455 }
2456
2457 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
2458 {
2459     UNUSED_PARAM(resourceLoader);
2460     if (!m_callback)
2461         return;
2462
2463     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2464     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2465     callOnMainThread([strongSelf, strongRequest] {
2466         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2467         if (callback)
2468             callback->didCancelLoadingRequest(strongRequest.get());
2469     });
2470 }
2471
2472 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2473 {
2474     m_callback = callback;
2475 }
2476 @end
2477 #endif
2478
2479 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2480 @implementation WebCoreAVFPullDelegate
2481 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2482 {
2483     self = [super init];
2484     if (self)
2485         m_callback = callback;
2486     return self;
2487 }
2488
2489 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2490 {
2491     m_callback = callback;
2492 }
2493
2494 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
2495 {
2496     if (m_callback)
2497         m_callback->outputMediaDataWillChange(output);
2498 }
2499
2500 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
2501 {
2502     UNUSED_PARAM(output);
2503     // No-op.
2504 }
2505 @end
2506 #endif
2507
2508 #endif