Don't short circuit seeking
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "FloatConversion.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "OutOfBandTextTrackPrivateAVF.h"
48 #import "URL.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaSelectionGroupAVFObjC.h"
52 #import "MediaTimeAVFoundation.h"
53 #import "PlatformTimeRanges.h"
54 #import "QuartzCoreSPI.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "UUID.h"
60 #import "VideoTrackPrivateAVFObjC.h"
61 #import "WebCoreAVFResourceLoader.h"
62 #import "WebCoreCALayerExtras.h"
63 #import "WebCoreSystemInterface.h"
64 #import <functional>
65 #import <objc/runtime.h>
66 #import <runtime/DataView.h>
67 #import <runtime/JSCInlines.h>
68 #import <runtime/TypedArrayInlines.h>
69 #import <runtime/Uint16Array.h>
70 #import <runtime/Uint32Array.h>
71 #import <runtime/Uint8Array.h>
72 #import <wtf/CurrentTime.h>
73 #import <wtf/ListHashSet.h>
74 #import <wtf/NeverDestroyed.h>
75 #import <wtf/text/CString.h>
76 #import <wtf/text/StringBuilder.h>
77
78 #if ENABLE(AVF_CAPTIONS)
79 #include "TextTrack.h"
80 #endif
81
82 #import <AVFoundation/AVFoundation.h>
83 #if PLATFORM(IOS)
84 #import "WAKAppKitStubs.h"
85 #import <CoreImage/CoreImage.h>
86 #import <mach/mach_port.h>
87 #else
88 #import <Foundation/NSGeometry.h>
89 #import <QuartzCore/CoreImage.h>
90 #endif
91
92 #if USE(VIDEOTOOLBOX)
93 #import <CoreVideo/CoreVideo.h>
94 #import <VideoToolbox/VideoToolbox.h>
95 #endif
96
97 #if USE(CFNETWORK)
98 #include "CFNSURLConnectionSPI.h"
99 #endif
100
101 namespace std {
102 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
103     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
104 };
105 }
106
107 @interface WebVideoContainerLayer : CALayer
108 @end
109
110 @implementation WebVideoContainerLayer
111
112 - (void)setBounds:(CGRect)bounds
113 {
114     [super setBounds:bounds];
115     for (CALayer* layer in self.sublayers)
116         layer.frame = bounds;
117 }
118
119 - (void)setPosition:(CGPoint)position
120 {
121     if (!CATransform3DIsIdentity(self.transform)) {
122         // Pre-apply the transform added in the WebProcess to fix <rdar://problem/18316542> to the position.
123         position = CGPointApplyAffineTransform(position, CATransform3DGetAffineTransform(self.transform));
124     }
125     [super setPosition:position];
126 }
127 @end
128
129 #if ENABLE(AVF_CAPTIONS)
130 // Note: This must be defined before our SOFT_LINK macros:
131 @class AVMediaSelectionOption;
132 @interface AVMediaSelectionOption (OutOfBandExtensions)
133 @property (nonatomic, readonly) NSString* outOfBandSource;
134 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
135 @end
136 #endif
137
138 @interface AVURLAsset (WebKitExtensions)
139 @property (nonatomic, readonly) NSURL *resolvedURL;
140 @end
141
142 typedef AVPlayer AVPlayerType;
143 typedef AVPlayerItem AVPlayerItemType;
144 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
145 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
146 typedef AVMetadataItem AVMetadataItemType;
147 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
148 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
149
150 #pragma mark - Soft Linking
151
152 // Soft-linking headers must be included last since they #define functions, constants, etc.
153 #import "CoreMediaSoftLink.h"
154
155 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
156 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
157 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
158
159 #if USE(VIDEOTOOLBOX)
160 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
161 #endif
162
163 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
166 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
167 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
168 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
169 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
170 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
171
172 #if USE(VIDEOTOOLBOX)
173 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
174 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
175 #endif
176
177 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
178 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
179 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
180 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
181 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
182 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
183 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
184
185 SOFT_LINK_CLASS(CoreImage, CIContext)
186 SOFT_LINK_CLASS(CoreImage, CIImage)
187
188 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
189 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
201 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
203 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
204
205 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
206
207 #define AVPlayer getAVPlayerClass()
208 #define AVPlayerItem getAVPlayerItemClass()
209 #define AVPlayerLayer getAVPlayerLayerClass()
210 #define AVURLAsset getAVURLAssetClass()
211 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
212 #define AVMetadataItem getAVMetadataItemClass()
213
214 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
215 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
216 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
217 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
218 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
219 #define AVMediaTypeVideo getAVMediaTypeVideo()
220 #define AVMediaTypeAudio getAVMediaTypeAudio()
221 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
222 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
223 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
224 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
225 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
226 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
227 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
228 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
229 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
230 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
231
232 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
233 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
234 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
235
236 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
237 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
238 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
239
240 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
241 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
242 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
243 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
244
245 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
246 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
247 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
248 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
249 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
250 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
251 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
252 #endif
253
254 #if ENABLE(AVF_CAPTIONS)
255 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
263 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
264 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
265
266 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
267 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
268 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
269 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
270 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
271 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
272 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
273 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
274 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
275 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
276 #endif
277
278 #if ENABLE(DATACUE_VALUE)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
280 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
281 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
282 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
283 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
284
285 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
286 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
287 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
288 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
289 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
290 #endif
291
292 #if PLATFORM(IOS)
293 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
294 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
295 #endif
296
297 using namespace WebCore;
298
299 enum MediaPlayerAVFoundationObservationContext {
300     MediaPlayerAVFoundationObservationContextPlayerItem,
301     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
302     MediaPlayerAVFoundationObservationContextPlayer,
303     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
304 };
305
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
307 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
308 #else
309 @interface WebCoreAVFMovieObserver : NSObject
310 #endif
311 {
312     MediaPlayerPrivateAVFoundationObjC* m_callback;
313     int m_delayCallbacks;
314 }
315 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 -(void)disconnect;
317 -(void)metadataLoaded;
318 -(void)didEnd:(NSNotification *)notification;
319 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
320 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
321 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
322 - (void)outputSequenceWasFlushed:(id)output;
323 #endif
324 @end
325
326 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
327 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
333 @end
334 #endif
335
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
338     MediaPlayerPrivateAVFoundationObjC *m_callback;
339     dispatch_semaphore_t m_semaphore;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
344 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
345 @end
346 #endif
347
348 namespace WebCore {
349
350 static NSArray *assetMetadataKeyNames();
351 static NSArray *itemKVOProperties();
352 static NSArray *assetTrackMetadataKeyNames();
353 static NSArray *playerKVOProperties();
354 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
355
356 #if !LOG_DISABLED
357 static const char *boolString(bool val)
358 {
359     return val ? "true" : "false";
360 }
361 #endif
362
363 #if ENABLE(ENCRYPTED_MEDIA_V2)
364 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
365 static PlayerToPrivateMapType& playerToPrivateMap()
366 {
367     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
368     return map;
369 };
370 #endif
371
372 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
373 static dispatch_queue_t globalLoaderDelegateQueue()
374 {
375     static dispatch_queue_t globalQueue;
376     static dispatch_once_t onceToken;
377     dispatch_once(&onceToken, ^{
378         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
379     });
380     return globalQueue;
381 }
382 #endif
383
384 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
385 static dispatch_queue_t globalPullDelegateQueue()
386 {
387     static dispatch_queue_t globalQueue;
388     static dispatch_once_t onceToken;
389     dispatch_once(&onceToken, ^{
390         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
391     });
392     return globalQueue;
393 }
394 #endif
395
396 #if USE(CFNETWORK)
397 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
398 public:
399     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
400     {
401         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
402     }
403
404     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
405     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
406
407 private:
408     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
409         : m_challenge(challenge)
410     {
411         ASSERT(m_challenge);
412     }
413
414     virtual void refAuthenticationClient() override { ref(); }
415     virtual void derefAuthenticationClient() override { deref(); }
416
417     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
418     {
419         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
420     }
421
422     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
423     {
424         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
425     }
426
427     virtual void receivedCancellation(const AuthenticationChallenge&) override
428     {
429         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
430     }
431
432     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
433     {
434         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
435             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
436     }
437
438     virtual void receivedChallengeRejection(const AuthenticationChallenge&) override
439     {
440         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
441             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
442     }
443
444     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
445 };
446 #endif
447
448 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
449 {
450     if (isAvailable())
451         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
452             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
453 }
454
455 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
456     : MediaPlayerPrivateAVFoundation(player)
457     , m_weakPtrFactory(this)
458 #if PLATFORM(IOS)
459     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
460 #endif
461     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
462     , m_videoFrameHasDrawn(false)
463     , m_haveCheckedPlayability(false)
464 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
465     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
466     , m_videoOutputSemaphore(nullptr)
467 #endif
468 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
469     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
470 #endif
471     , m_currentTextTrack(0)
472     , m_cachedRate(0)
473     , m_cachedTotalBytes(0)
474     , m_pendingStatusChanges(0)
475     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
476     , m_cachedLikelyToKeepUp(false)
477     , m_cachedBufferEmpty(false)
478     , m_cachedBufferFull(false)
479     , m_cachedHasEnabledAudio(false)
480     , m_shouldBufferData(true)
481     , m_cachedIsReadyForDisplay(false)
482     , m_haveBeenAskedToCreateLayer(false)
483 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
484     , m_allowsWirelessVideoPlayback(true)
485 #endif
486 {
487 #if ENABLE(ENCRYPTED_MEDIA_V2)
488     playerToPrivateMap().set(player, this);
489 #endif
490 }
491
492 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
493 {
494 #if ENABLE(ENCRYPTED_MEDIA_V2)
495     playerToPrivateMap().remove(player());
496 #endif
497 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
498     [m_loaderDelegate.get() setCallback:0];
499     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
500
501     for (auto& pair : m_resourceLoaderMap)
502         pair.value->invalidate();
503 #endif
504 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
505     [m_videoOutputDelegate setCallback:0];
506     [m_videoOutput setDelegate:nil queue:0];
507     if (m_videoOutputSemaphore)
508         dispatch_release(m_videoOutputSemaphore);
509 #endif
510
511     if (m_videoLayer)
512         destroyVideoLayer();
513
514     cancelLoad();
515 }
516
517 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
518 {
519     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
520     tearDownVideoRendering();
521
522     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
523     [m_objcObserver.get() disconnect];
524
525     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
526     setIgnoreLoadStateChanges(true);
527     if (m_avAsset) {
528         [m_avAsset.get() cancelLoading];
529         m_avAsset = nil;
530     }
531
532     clearTextTracks();
533
534 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
535     if (m_legibleOutput) {
536         if (m_avPlayerItem)
537             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
538         m_legibleOutput = nil;
539     }
540 #endif
541
542     if (m_avPlayerItem) {
543         for (NSString *keyName in itemKVOProperties())
544             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
545         
546         m_avPlayerItem = nil;
547     }
548     if (m_avPlayer) {
549         if (m_timeObserver)
550             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
551         m_timeObserver = nil;
552
553         for (NSString *keyName in playerKVOProperties())
554             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
555         m_avPlayer = nil;
556     }
557
558     // Reset cached properties
559     m_pendingStatusChanges = 0;
560     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
561     m_cachedSeekableRanges = nullptr;
562     m_cachedLoadedRanges = nullptr;
563     m_cachedHasEnabledAudio = false;
564     m_cachedPresentationSize = FloatSize();
565     m_cachedDuration = MediaTime::zeroTime();
566
567     for (AVPlayerItemTrack *track in m_cachedTracks.get())
568         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
569     m_cachedTracks = nullptr;
570
571 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
572     if (m_provider) {
573         m_provider->setPlayerItem(nullptr);
574         m_provider->setAudioTrack(nullptr);
575     }
576 #endif
577
578     setIgnoreLoadStateChanges(false);
579 }
580
581 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
582 {
583     return m_haveBeenAskedToCreateLayer;
584 }
585
586 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
587 {
588 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
589     if (m_videoOutput)
590         return true;
591 #endif
592     return m_imageGenerator;
593 }
594
595 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
596 {
597 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
598     createVideoOutput();
599 #else
600     createImageGenerator();
601 #endif
602 }
603
604 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
605 {
606     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
607
608     if (!m_avAsset || m_imageGenerator)
609         return;
610
611     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
612
613     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
614     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
615     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
616     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
617
618     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
619 }
620
621 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
622 {
623 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
624     destroyVideoOutput();
625 #endif
626     destroyImageGenerator();
627 }
628
629 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
630 {
631     if (!m_imageGenerator)
632         return;
633
634     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
635
636     m_imageGenerator = 0;
637 }
638
639 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
640 {
641     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
642         return;
643
644     auto weakThis = createWeakPtr();
645     callOnMainThread([this, weakThis] {
646         if (!weakThis)
647             return;
648
649         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
650             return;
651         m_haveBeenAskedToCreateLayer = true;
652
653         if (!m_videoLayer)
654             createAVPlayerLayer();
655
656 #if USE(VIDEOTOOLBOX) && (!defined(__MAC_OS_X_VERSION_MIN_REQUIRED) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 101000)
657         if (!m_videoOutput)
658             createVideoOutput();
659 #endif
660
661         player()->client().mediaPlayerRenderingModeChanged(player());
662     });
663 }
664
665 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
666 {
667     if (!m_avPlayer)
668         return;
669
670     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
671     [m_videoLayer setPlayer:m_avPlayer.get()];
672     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
673 #ifndef NDEBUG
674     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
675 #endif
676     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
677     updateVideoLayerGravity();
678     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
679     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
680     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
681
682 #if PLATFORM(IOS)
683     [m_videoLayer web_disableAllActions];
684     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
685 #ifndef NDEBUG
686     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
687 #endif
688     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
689     if (m_videoFullscreenLayer) {
690         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
691         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
692     } else {
693         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
694         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
695     }
696     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
697         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
698 #else
699     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
700 #endif
701 }
702
703 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
704 {
705     if (!m_videoLayer)
706         return;
707
708     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
709
710     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
711     [m_videoLayer.get() setPlayer:nil];
712
713 #if PLATFORM(IOS)
714     if (m_videoFullscreenLayer)
715         [m_videoLayer removeFromSuperlayer];
716     m_videoInlineLayer = nil;
717 #endif
718
719     m_videoLayer = nil;
720 }
721
722 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
723 {
724     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
725     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
726
727     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
728     if (!date)
729         return MediaTime::invalidTime();
730
731     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
732
733     // Rounding due to second offset error when subtracting.
734     return MediaTime::createWithDouble(round(date - currentTime));
735 }
736
737 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
738 {
739     if (currentRenderingMode() == MediaRenderingToLayer)
740         return m_cachedIsReadyForDisplay;
741
742     return m_videoFrameHasDrawn;
743 }
744
745 #if ENABLE(AVF_CAPTIONS)
746 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
747 {
748     // FIXME: Match these to correct types:
749     if (kind == PlatformTextTrack::Caption)
750         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
751
752     if (kind == PlatformTextTrack::Subtitle)
753         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
754
755     if (kind == PlatformTextTrack::Description)
756         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
757
758     if (kind == PlatformTextTrack::Forced)
759         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
760
761     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
762 }
763     
764 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
765 {
766     trackModeChanged();
767 }
768     
769 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
770 {
771     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
772     
773     for (auto& textTrack : m_textTracks) {
774         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
775             continue;
776         
777         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
778         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
779         
780         for (auto& track : outOfBandTrackSources) {
781             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
782             
783             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
784                 continue;
785             
786             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
787             if (track->mode() == PlatformTextTrack::Hidden)
788                 mode = InbandTextTrackPrivate::Hidden;
789             else if (track->mode() == PlatformTextTrack::Disabled)
790                 mode = InbandTextTrackPrivate::Disabled;
791             else if (track->mode() == PlatformTextTrack::Showing)
792                 mode = InbandTextTrackPrivate::Showing;
793             
794             textTrack->setMode(mode);
795             break;
796         }
797     }
798 }
799 #endif
800
801
802 static NSURL *canonicalURL(const String& url)
803 {
804     NSURL *cocoaURL = URL(ParsedURLString, url);
805     if (url.isEmpty())
806         return cocoaURL;
807
808     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
809     if (!request)
810         return cocoaURL;
811
812     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
813     if (!canonicalRequest)
814         return cocoaURL;
815
816     return [canonicalRequest URL];
817 }
818
819 #if PLATFORM(IOS)
820 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
821 {
822     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
823     [properties setDictionary:@{
824         NSHTTPCookieName: cookie.name,
825         NSHTTPCookieValue: cookie.value,
826         NSHTTPCookieDomain: cookie.domain,
827         NSHTTPCookiePath: cookie.path,
828         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
829     }];
830     if (cookie.secure)
831         [properties setObject:@YES forKey:NSHTTPCookieSecure];
832     if (cookie.session)
833         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
834
835     return [NSHTTPCookie cookieWithProperties:properties.get()];
836 }
837 #endif
838
839 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
840 {
841     if (m_avAsset)
842         return;
843
844     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
845
846     setDelayCallbacks(true);
847
848     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
849
850     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
851
852     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
853
854     String referrer = player()->referrer();
855     if (!referrer.isEmpty())
856         [headerFields.get() setObject:referrer forKey:@"Referer"];
857
858     String userAgent = player()->userAgent();
859     if (!userAgent.isEmpty())
860         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
861
862     if ([headerFields.get() count])
863         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
864
865     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
866         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
867
868 #if PLATFORM(IOS)
869     // FIXME: rdar://problem/20354688
870     String identifier = player()->sourceApplicationIdentifier();
871     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
872         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
873 #endif
874
875 #if ENABLE(AVF_CAPTIONS)
876     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
877     if (!outOfBandTrackSources.isEmpty()) {
878         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
879         for (auto& trackSource : outOfBandTrackSources) {
880             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
881             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
882             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
883             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
884             [outOfBandTracks.get() addObject:@{
885                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
886                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
887                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
888                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
889                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
890                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
891             }];
892         }
893
894         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
895     }
896 #endif
897
898 #if PLATFORM(IOS)
899     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
900     if (!networkInterfaceName.isEmpty())
901         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
902 #endif
903
904 #if PLATFORM(IOS)
905     Vector<Cookie> cookies;
906     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
907         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
908         for (auto& cookie : cookies)
909             [nsCookies addObject:toNSHTTPCookie(cookie)];
910
911         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
912     }
913 #endif
914
915     NSURL *cocoaURL = canonicalURL(url);
916     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
917
918 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
919     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
920 #endif
921
922     m_haveCheckedPlayability = false;
923
924     setDelayCallbacks(false);
925 }
926
927 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
928 {
929     if (!m_avPlayer)
930         return;
931
932     if (pthread_main_np()) {
933         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
934         return;
935     }
936
937     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
938     RetainPtr<AVPlayerItemType> strongItem = item;
939     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
940         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
941     });
942 }
943
944 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
945 {
946     if (m_avPlayer)
947         return;
948
949     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
950
951     setDelayCallbacks(true);
952
953     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
954     for (NSString *keyName in playerKVOProperties())
955         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
956
957 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
958     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
959 #endif
960
961 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
962     updateDisableExternalPlayback();
963     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
964 #endif
965
966 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
967     if (m_shouldPlayToPlaybackTarget)
968         setShouldPlayToPlaybackTarget(true);
969 #endif
970
971     if (player()->client().mediaPlayerIsVideo())
972         createAVPlayerLayer();
973
974     if (m_avPlayerItem)
975         setAVPlayerItem(m_avPlayerItem.get());
976
977     setDelayCallbacks(false);
978 }
979
980 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
981 {
982     if (m_avPlayerItem)
983         return;
984
985     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
986
987     setDelayCallbacks(true);
988
989     // Create the player item so we can load media data. 
990     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
991
992     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
993
994     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
995     for (NSString *keyName in itemKVOProperties())
996         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
997
998     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
999
1000     if (m_avPlayer)
1001         setAVPlayerItem(m_avPlayerItem.get());
1002
1003 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1004     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1005
1006     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1007     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1008     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1009
1010     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1011     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1012     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1013     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1014 #endif
1015
1016 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1017     if (m_provider) {
1018         m_provider->setPlayerItem(m_avPlayerItem.get());
1019         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1020     }
1021 #endif
1022
1023     setDelayCallbacks(false);
1024 }
1025
1026 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1027 {
1028     if (m_haveCheckedPlayability)
1029         return;
1030     m_haveCheckedPlayability = true;
1031
1032     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1033     auto weakThis = createWeakPtr();
1034
1035     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1036         callOnMainThread([weakThis] {
1037             if (weakThis)
1038                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1039         });
1040     }];
1041 }
1042
1043 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1044 {
1045     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1046
1047     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1048     dispatch_group_enter(metadataLoadingGroup);
1049     auto weakThis = createWeakPtr();
1050     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1051
1052         callOnMainThread([weakThis, metadataLoadingGroup] {
1053             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1054                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1055                     dispatch_group_enter(metadataLoadingGroup);
1056                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1057                         dispatch_group_leave(metadataLoadingGroup);
1058                     }];
1059                 }
1060             }
1061             dispatch_group_leave(metadataLoadingGroup);
1062         });
1063     }];
1064
1065     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1066         callOnMainThread([weakThis] {
1067             if (weakThis)
1068                 [weakThis->m_objcObserver.get() metadataLoaded];
1069         });
1070
1071         dispatch_release(metadataLoadingGroup);
1072     });
1073 }
1074
1075 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1076 {
1077     if (!m_avPlayerItem)
1078         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1079
1080     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1081         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1082     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1083         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1084     if (m_cachedLikelyToKeepUp)
1085         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1086     if (m_cachedBufferFull)
1087         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1088     if (m_cachedBufferEmpty)
1089         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1090
1091     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1092 }
1093
1094 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1095 {
1096     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1097     PlatformMedia pm;
1098     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1099     pm.media.avfMediaPlayer = m_avPlayer.get();
1100     return pm;
1101 }
1102
1103 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1104 {
1105 #if PLATFORM(IOS)
1106     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1107 #else
1108     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1109 #endif
1110 }
1111
1112 #if PLATFORM(IOS)
1113 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1114 {
1115     if (m_videoFullscreenLayer == videoFullscreenLayer)
1116         return;
1117
1118     m_videoFullscreenLayer = videoFullscreenLayer;
1119
1120     [CATransaction begin];
1121     [CATransaction setDisableActions:YES];
1122     
1123     CAContext *oldContext = [m_videoLayer context];
1124     CAContext *newContext = nil;
1125     
1126     if (m_videoFullscreenLayer && m_videoLayer) {
1127         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1128         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1129         newContext = [m_videoFullscreenLayer context];
1130     } else if (m_videoInlineLayer && m_videoLayer) {
1131         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1132         [m_videoLayer removeFromSuperlayer];
1133         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1134         newContext = [m_videoInlineLayer context];
1135     } else if (m_videoLayer)
1136         [m_videoLayer removeFromSuperlayer];
1137
1138     if (oldContext && newContext && oldContext != newContext) {
1139         mach_port_t fencePort = [oldContext createFencePort];
1140         [newContext setFencePort:fencePort];
1141         mach_port_deallocate(mach_task_self(), fencePort);
1142     }
1143     [CATransaction commit];
1144
1145     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1146         syncTextTrackBounds();
1147         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1148     }
1149
1150     updateDisableExternalPlayback();
1151 }
1152
1153 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1154 {
1155     m_videoFullscreenFrame = frame;
1156     if (!m_videoFullscreenLayer)
1157         return;
1158
1159     if (m_videoLayer) {
1160         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1161     }
1162     syncTextTrackBounds();
1163 }
1164
1165 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1166 {
1167     m_videoFullscreenGravity = gravity;
1168     if (!m_videoLayer)
1169         return;
1170
1171     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1172     if (gravity == MediaPlayer::VideoGravityResize)
1173         videoGravity = AVLayerVideoGravityResize;
1174     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1175         videoGravity = AVLayerVideoGravityResizeAspect;
1176     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1177         videoGravity = AVLayerVideoGravityResizeAspectFill;
1178     else
1179         ASSERT_NOT_REACHED();
1180     
1181     if ([m_videoLayer videoGravity] == videoGravity)
1182         return;
1183
1184     [m_videoLayer setVideoGravity:videoGravity];
1185     syncTextTrackBounds();
1186 }
1187
1188 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1189 {
1190     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1191         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1192 }
1193
1194 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1195 {
1196     if (m_currentMetaData)
1197         return m_currentMetaData.get();
1198     return nil;
1199 }
1200
1201 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1202 {
1203     if (!m_avPlayerItem)
1204         return emptyString();
1205     
1206     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1207     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1208
1209     return logString.get();
1210 }
1211
1212 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1213 {
1214     if (!m_avPlayerItem)
1215         return emptyString();
1216
1217     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1218     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1219
1220     return logString.get();
1221 }
1222 #endif
1223
1224 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1225 {
1226     [CATransaction begin];
1227     [CATransaction setDisableActions:YES];    
1228     if (m_videoLayer)
1229         [m_videoLayer.get() setHidden:!isVisible];
1230     [CATransaction commit];
1231 }
1232     
1233 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1234 {
1235     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1236     if (!metaDataAvailable())
1237         return;
1238
1239     setDelayCallbacks(true);
1240     m_cachedRate = requestedRate();
1241     [m_avPlayer.get() setRate:requestedRate()];
1242     setDelayCallbacks(false);
1243 }
1244
1245 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1246 {
1247     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1248     if (!metaDataAvailable())
1249         return;
1250
1251     setDelayCallbacks(true);
1252     m_cachedRate = 0;
1253     [m_avPlayer.get() setRate:0];
1254     setDelayCallbacks(false);
1255 }
1256
1257 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1258 {
1259     // Do not ask the asset for duration before it has been loaded or it will fetch the
1260     // answer synchronously.
1261     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1262         return MediaTime::invalidTime();
1263     
1264     CMTime cmDuration;
1265     
1266     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1267     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1268         cmDuration = [m_avPlayerItem.get() duration];
1269     else
1270         cmDuration = [m_avAsset.get() duration];
1271
1272     if (CMTIME_IS_NUMERIC(cmDuration))
1273         return toMediaTime(cmDuration);
1274
1275     if (CMTIME_IS_INDEFINITE(cmDuration))
1276         return MediaTime::positiveInfiniteTime();
1277
1278     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1279     return MediaTime::invalidTime();
1280 }
1281
1282 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1283 {
1284     if (!metaDataAvailable() || !m_avPlayerItem)
1285         return MediaTime::zeroTime();
1286
1287     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1288     if (CMTIME_IS_NUMERIC(itemTime))
1289         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1290
1291     return MediaTime::zeroTime();
1292 }
1293
1294 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1295 {
1296     // setCurrentTime generates several event callbacks, update afterwards.
1297     setDelayCallbacks(true);
1298
1299     if (m_metadataTrack)
1300         m_metadataTrack->flushPartialCues();
1301
1302     CMTime cmTime = toCMTime(time);
1303     CMTime cmBefore = toCMTime(negativeTolerance);
1304     CMTime cmAfter = toCMTime(positiveTolerance);
1305
1306     auto weakThis = createWeakPtr();
1307
1308     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1309
1310     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1311         double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]);
1312         callOnMainThread([weakThis, finished, currentTime] {
1313             UNUSED_PARAM(currentTime);
1314             auto _this = weakThis.get();
1315             LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - completion handler called, currentTime = %f", _this, currentTime);
1316             if (!_this)
1317                 return;
1318
1319             _this->seekCompleted(finished);
1320         });
1321     }];
1322
1323     setDelayCallbacks(false);
1324 }
1325
1326 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1327 {
1328 #if PLATFORM(IOS)
1329     UNUSED_PARAM(volume);
1330     return;
1331 #else
1332     if (!metaDataAvailable())
1333         return;
1334
1335     [m_avPlayer.get() setVolume:volume];
1336 #endif
1337 }
1338
1339 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1340 {
1341     UNUSED_PARAM(closedCaptionsVisible);
1342
1343     if (!metaDataAvailable())
1344         return;
1345
1346     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1347 }
1348
1349 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1350 {
1351     setDelayCallbacks(true);
1352     m_cachedRate = rate;
1353     [m_avPlayer.get() setRate:rate];
1354     setDelayCallbacks(false);
1355 }
1356
1357 double MediaPlayerPrivateAVFoundationObjC::rate() const
1358 {
1359     if (!metaDataAvailable())
1360         return 0;
1361
1362     return m_cachedRate;
1363 }
1364
1365 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1366 {
1367     if (m_avPlayerItem)
1368         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1369 }
1370
1371 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1372 {
1373     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1374
1375     if (!m_avPlayerItem)
1376         return timeRanges;
1377
1378     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1379         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1380         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1381             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1382     }
1383     return timeRanges;
1384 }
1385
1386 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1387 {
1388     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1389         return MediaTime::zeroTime();
1390
1391     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1392     bool hasValidRange = false;
1393     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1394         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1395         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1396             continue;
1397
1398         hasValidRange = true;
1399         MediaTime startOfRange = toMediaTime(timeRange.start);
1400         if (minTimeSeekable > startOfRange)
1401             minTimeSeekable = startOfRange;
1402     }
1403     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1404 }
1405
1406 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1407 {
1408     if (!m_cachedSeekableRanges)
1409         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1410
1411     MediaTime maxTimeSeekable;
1412     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1413         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1414         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1415             continue;
1416         
1417         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1418         if (maxTimeSeekable < endOfRange)
1419             maxTimeSeekable = endOfRange;
1420     }
1421     return maxTimeSeekable;
1422 }
1423
1424 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1425 {
1426     if (!m_cachedLoadedRanges)
1427         return MediaTime::zeroTime();
1428
1429     MediaTime maxTimeLoaded;
1430     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1431         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1432         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1433             continue;
1434         
1435         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1436         if (maxTimeLoaded < endOfRange)
1437             maxTimeLoaded = endOfRange;
1438     }
1439
1440     return maxTimeLoaded;   
1441 }
1442
1443 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1444 {
1445     if (!metaDataAvailable())
1446         return 0;
1447
1448     if (m_cachedTotalBytes)
1449         return m_cachedTotalBytes;
1450
1451     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1452         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1453
1454     return m_cachedTotalBytes;
1455 }
1456
1457 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1458 {
1459     m_avAsset = asset;
1460 }
1461
1462 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1463 {
1464     if (!m_avAsset)
1465         return MediaPlayerAVAssetStatusDoesNotExist;
1466
1467     for (NSString *keyName in assetMetadataKeyNames()) {
1468         NSError *error = nil;
1469         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1470 #if !LOG_DISABLED
1471         if (error)
1472             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1473 #endif
1474
1475         if (keyStatus < AVKeyValueStatusLoaded)
1476             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1477         
1478         if (keyStatus == AVKeyValueStatusFailed)
1479             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1480
1481         if (keyStatus == AVKeyValueStatusCancelled)
1482             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1483     }
1484
1485     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1486         return MediaPlayerAVAssetStatusPlayable;
1487
1488     return MediaPlayerAVAssetStatusLoaded;
1489 }
1490
1491 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1492 {
1493     if (!m_avAsset)
1494         return 0;
1495
1496     NSError *error = nil;
1497     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1498     return [error code];
1499 }
1500
1501 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1502 {
1503     if (!metaDataAvailable() || context->paintingDisabled())
1504         return;
1505
1506     setDelayCallbacks(true);
1507     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1508
1509 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1510     if (videoOutputHasAvailableFrame())
1511         paintWithVideoOutput(context, rect);
1512     else
1513 #endif
1514         paintWithImageGenerator(context, rect);
1515
1516     END_BLOCK_OBJC_EXCEPTIONS;
1517     setDelayCallbacks(false);
1518
1519     m_videoFrameHasDrawn = true;
1520 }
1521
1522 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1523 {
1524     if (!metaDataAvailable() || context->paintingDisabled())
1525         return;
1526
1527     // We can ignore the request if we are already rendering to a layer.
1528     if (currentRenderingMode() == MediaRenderingToLayer)
1529         return;
1530
1531     // paint() is best effort, so only paint if we already have an image generator or video output available.
1532     if (!hasContextRenderer())
1533         return;
1534
1535     paintCurrentFrameInContext(context, rect);
1536 }
1537
1538 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1539 {
1540     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1541
1542     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1543     if (image) {
1544         GraphicsContextStateSaver stateSaver(*context);
1545         context->translate(rect.x(), rect.y() + rect.height());
1546         context->scale(FloatSize(1.0f, -1.0f));
1547         context->setImageInterpolationQuality(InterpolationLow);
1548         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1549         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1550         image = 0;
1551     }
1552 }
1553
1554 static const HashSet<String>& avfMIMETypes()
1555 {
1556     static NeverDestroyed<HashSet<String>> cache = [] () {
1557         HashSet<String> types;
1558
1559         NSArray *nsTypes = [AVURLAsset audiovisualMIMETypes];
1560         for (NSString *mimeType in nsTypes)
1561             types.add([mimeType lowercaseString]);
1562
1563         return types;
1564     }();
1565
1566     
1567     return cache;
1568 }
1569
1570 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1571 {
1572     if (!m_imageGenerator)
1573         createImageGenerator();
1574     ASSERT(m_imageGenerator);
1575
1576 #if !LOG_DISABLED
1577     double start = monotonicallyIncreasingTime();
1578 #endif
1579
1580     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1581     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1582     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1583
1584 #if !LOG_DISABLED
1585     double duration = monotonicallyIncreasingTime() - start;
1586     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1587 #endif
1588
1589     return image;
1590 }
1591
1592 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1593 {
1594     supportedTypes = avfMIMETypes();
1595
1596
1597 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1598 static bool keySystemIsSupported(const String& keySystem)
1599 {
1600     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1601         return true;
1602     return false;
1603 }
1604 #endif
1605
1606 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1607 {
1608 #if ENABLE(ENCRYPTED_MEDIA)
1609     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1610     // In addition to the steps in the current specification, this method must run the following steps:
1611
1612     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1613     //    If keySystem is null, continue to the next step.
1614     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1615         // "Clear Key" is only supported with HLS:
1616         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1617             return MediaPlayer::IsNotSupported;
1618
1619         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1620         if (!keySystemIsSupported(parameters.keySystem))
1621             return MediaPlayer::IsNotSupported;
1622
1623         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1624         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1625     }
1626
1627     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1628 #endif
1629
1630 #if ENABLE(MEDIA_SOURCE)
1631     if (parameters.isMediaSource)
1632         return MediaPlayer::IsNotSupported;
1633 #endif
1634 #if ENABLE(MEDIA_STREAM)
1635     if (parameters.isMediaStream)
1636         return MediaPlayer::IsNotSupported;
1637 #endif
1638     if (isUnsupportedMIMEType(parameters.type))
1639         return MediaPlayer::IsNotSupported;
1640
1641     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1642         return MediaPlayer::IsNotSupported;
1643
1644     // The spec says:
1645     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1646     if (parameters.codecs.isEmpty())
1647         return MediaPlayer::MayBeSupported;
1648
1649     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1650     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1651 }
1652
1653 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1654 {
1655 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1656     if (!keySystem.isEmpty()) {
1657         // "Clear Key" is only supported with HLS:
1658         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1659             return MediaPlayer::IsNotSupported;
1660
1661         if (!keySystemIsSupported(keySystem))
1662             return false;
1663
1664         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1665             return false;
1666
1667         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1668             return false;
1669
1670         return true;
1671     }
1672 #else
1673     UNUSED_PARAM(keySystem);
1674     UNUSED_PARAM(mimeType);
1675 #endif
1676     return false;
1677 }
1678
1679 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1680 #if ENABLE(ENCRYPTED_MEDIA_V2)
1681 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1682 {
1683     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1684         [infoRequest setContentLength:keyData->byteLength()];
1685         [infoRequest setByteRangeAccessSupported:YES];
1686     }
1687
1688     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1689         long long start = [dataRequest currentOffset];
1690         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1691
1692         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1693             [request finishLoadingWithError:nil];
1694             return;
1695         }
1696
1697         ASSERT(start <= std::numeric_limits<int>::max());
1698         ASSERT(end <= std::numeric_limits<int>::max());
1699         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1700         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1701         [dataRequest respondWithData:nsData.get()];
1702     }
1703
1704     [request finishLoading];
1705 }
1706 #endif
1707
1708 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1709 {
1710     String scheme = [[[avRequest request] URL] scheme];
1711     String keyURI = [[[avRequest request] URL] absoluteString];
1712
1713 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1714     if (scheme == "skd") {
1715         // Create an initData with the following layout:
1716         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1717         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1718         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1719         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1720         initDataView->set<uint32_t>(0, keyURISize, true);
1721
1722         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1723         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1724
1725 #if ENABLE(ENCRYPTED_MEDIA)
1726         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1727 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1728         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1729         if (!player()->keyNeeded(initData.get()))
1730 #endif
1731             return false;
1732
1733         m_keyURIToRequestMap.set(keyURI, avRequest);
1734         return true;
1735 #if ENABLE(ENCRYPTED_MEDIA_V2)
1736     } else if (scheme == "clearkey") {
1737         String keyID = [[[avRequest request] URL] resourceSpecifier];
1738         StringView keyIDView(keyID);
1739         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1740
1741         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1742         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1743
1744         auto keyData = player()->cachedKeyForKeyId(keyID);
1745         if (keyData) {
1746             fulfillRequestWithKeyData(avRequest, keyData.get());
1747             return false;
1748         }
1749
1750         if (!player()->keyNeeded(initData.get()))
1751             return false;
1752
1753         m_keyURIToRequestMap.set(keyID, avRequest);
1754         return true;
1755 #endif
1756     }
1757 #endif
1758
1759     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1760     m_resourceLoaderMap.add(avRequest, resourceLoader);
1761     resourceLoader->startLoading();
1762     return true;
1763 }
1764
1765 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1766 {
1767 #if USE(CFNETWORK)
1768     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1769     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1770     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1771 #else
1772     AuthenticationChallenge challenge(nsChallenge);
1773 #endif
1774
1775     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1776 }
1777
1778 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1779 {
1780     String scheme = [[[avRequest request] URL] scheme];
1781
1782     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1783
1784     if (resourceLoader)
1785         resourceLoader->stopLoading();
1786 }
1787
1788 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1789 {
1790     m_resourceLoaderMap.remove(avRequest);
1791 }
1792 #endif
1793
1794 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1795 {
1796     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1797 }
1798
1799 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1800 {
1801     if (!metaDataAvailable())
1802         return timeValue;
1803
1804     // FIXME - impossible to implement until rdar://8721510 is fixed.
1805     return timeValue;
1806 }
1807
1808 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1809 {
1810 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1811     return 0;
1812 #else
1813     return 5;
1814 #endif
1815 }
1816
1817 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1818 {
1819     if (!m_videoLayer)
1820         return;
1821
1822 #if PLATFORM(IOS)
1823     // Do not attempt to change the video gravity while in full screen mode.
1824     // See setVideoFullscreenGravity().
1825     if (m_videoFullscreenLayer)
1826         return;
1827 #endif
1828
1829     [CATransaction begin];
1830     [CATransaction setDisableActions:YES];    
1831     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1832     [m_videoLayer.get() setVideoGravity:gravity];
1833     [CATransaction commit];
1834 }
1835
1836 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1837 {
1838     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1839         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1840     }];
1841     if (index == NSNotFound)
1842         return nil;
1843     return [tracks objectAtIndex:index];
1844 }
1845
1846 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1847 {
1848     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1849     m_languageOfPrimaryAudioTrack = String();
1850
1851     if (!m_avAsset)
1852         return;
1853
1854     setDelayCharacteristicsChangedNotification(true);
1855
1856     bool haveCCTrack = false;
1857     bool hasCaptions = false;
1858
1859     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1860     // asked about those fairly fequently.
1861     if (!m_avPlayerItem) {
1862         // We don't have a player item yet, so check with the asset because some assets support inspection
1863         // prior to becoming ready to play.
1864         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1865         setHasVideo(firstEnabledVideoTrack);
1866         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1867 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1868         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1869 #endif
1870
1871         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1872     } else {
1873         bool hasVideo = false;
1874         bool hasAudio = false;
1875         bool hasMetaData = false;
1876         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1877             if ([track isEnabled]) {
1878                 AVAssetTrack *assetTrack = [track assetTrack];
1879                 NSString *mediaType = [assetTrack mediaType];
1880                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1881                     hasVideo = true;
1882                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1883                     hasAudio = true;
1884                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1885 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1886                     hasCaptions = true;
1887 #endif
1888                     haveCCTrack = true;
1889                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1890                     hasMetaData = true;
1891                 }
1892             }
1893         }
1894
1895 #if ENABLE(VIDEO_TRACK)
1896         updateAudioTracks();
1897         updateVideoTracks();
1898
1899 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1900         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1901         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1902 #endif
1903 #endif
1904
1905         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1906         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1907         // when it is not.
1908         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1909
1910         setHasAudio(hasAudio);
1911 #if ENABLE(DATACUE_VALUE)
1912         if (hasMetaData)
1913             processMetadataTrack();
1914 #endif
1915     }
1916
1917 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1918     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1919     if (legibleGroup && m_cachedTracks) {
1920         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1921         if (hasCaptions)
1922             processMediaSelectionOptions();
1923     }
1924 #endif
1925
1926 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1927     if (!hasCaptions && haveCCTrack)
1928         processLegacyClosedCaptionsTracks();
1929 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1930     if (haveCCTrack)
1931         processLegacyClosedCaptionsTracks();
1932 #endif
1933
1934     setHasClosedCaptions(hasCaptions);
1935
1936     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1937         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1938
1939     sizeChanged();
1940
1941     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1942         characteristicsChanged();
1943
1944 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1945     if (m_provider)
1946         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1947 #endif
1948
1949     setDelayCharacteristicsChangedNotification(false);
1950 }
1951
1952 #if ENABLE(VIDEO_TRACK)
1953 template <typename RefT, typename PassRefT>
1954 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1955 {
1956     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1957         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1958     }]]]);
1959     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1960
1961     for (auto& oldItem : oldItems) {
1962         if (oldItem->playerItemTrack())
1963             [oldTracks addObject:oldItem->playerItemTrack()];
1964     }
1965
1966     // Find the added & removed AVPlayerItemTracks:
1967     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1968     [removedTracks minusSet:newTracks.get()];
1969
1970     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1971     [addedTracks minusSet:oldTracks.get()];
1972
1973     typedef Vector<RefT> ItemVector;
1974     ItemVector replacementItems;
1975     ItemVector addedItems;
1976     ItemVector removedItems;
1977     for (auto& oldItem : oldItems) {
1978         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1979             removedItems.append(oldItem);
1980         else
1981             replacementItems.append(oldItem);
1982     }
1983
1984     for (AVPlayerItemTrack* track in addedTracks.get())
1985         addedItems.append(itemFactory(track));
1986
1987     replacementItems.appendVector(addedItems);
1988     oldItems.swap(replacementItems);
1989
1990     for (auto& removedItem : removedItems)
1991         (player->*removedFunction)(removedItem);
1992
1993     for (auto& addedItem : addedItems)
1994         (player->*addedFunction)(addedItem);
1995 }
1996
1997 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1998 template <typename RefT, typename PassRefT>
1999 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2000 {
2001     group->updateOptions(characteristics);
2002
2003     // Only add selection options which do not have an associated persistant track.
2004     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2005     for (auto& option : group->options()) {
2006         if (!option)
2007             continue;
2008         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2009         if (!avOption)
2010             continue;
2011         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
2012             newSelectionOptions.add(option);
2013     }
2014
2015     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2016     for (auto& oldItem : oldItems) {
2017         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2018             oldSelectionOptions.add(option);
2019     }
2020
2021     // Find the added & removed AVMediaSelectionOptions:
2022     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2023     for (auto& oldOption : oldSelectionOptions) {
2024         if (!newSelectionOptions.contains(oldOption))
2025             removedSelectionOptions.add(oldOption);
2026     }
2027
2028     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2029     for (auto& newOption : newSelectionOptions) {
2030         if (!oldSelectionOptions.contains(newOption))
2031             addedSelectionOptions.add(newOption);
2032     }
2033
2034     typedef Vector<RefT> ItemVector;
2035     ItemVector replacementItems;
2036     ItemVector addedItems;
2037     ItemVector removedItems;
2038     for (auto& oldItem : oldItems) {
2039         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2040             removedItems.append(oldItem);
2041         else
2042             replacementItems.append(oldItem);
2043     }
2044
2045     for (auto& option : addedSelectionOptions)
2046         addedItems.append(itemFactory(*option.get()));
2047
2048     replacementItems.appendVector(addedItems);
2049     oldItems.swap(replacementItems);
2050     
2051     for (auto& removedItem : removedItems)
2052         (player->*removedFunction)(removedItem);
2053     
2054     for (auto& addedItem : addedItems)
2055         (player->*addedFunction)(addedItem);
2056 }
2057 #endif
2058
2059 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2060 {
2061 #if !LOG_DISABLED
2062     size_t count = m_audioTracks.size();
2063 #endif
2064
2065     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2066
2067 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2068     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2069     if (!m_audibleGroup) {
2070         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2071             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2072     }
2073
2074     if (m_audibleGroup)
2075         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2076 #endif
2077
2078     for (auto& track : m_audioTracks)
2079         track->resetPropertiesFromTrack();
2080
2081 #if !LOG_DISABLED
2082     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2083 #endif
2084 }
2085
2086 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2087 {
2088 #if !LOG_DISABLED
2089     size_t count = m_videoTracks.size();
2090 #endif
2091
2092     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2093
2094 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2095     if (!m_visualGroup) {
2096         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2097             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2098     }
2099
2100     if (m_visualGroup)
2101         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2102 #endif
2103
2104     for (auto& track : m_audioTracks)
2105         track->resetPropertiesFromTrack();
2106
2107 #if !LOG_DISABLED
2108     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2109 #endif
2110 }
2111
2112 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2113 {
2114 #if PLATFORM(IOS)
2115     if (m_videoFullscreenLayer)
2116         return true;
2117 #endif
2118     return false;
2119 }
2120
2121 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2122 {
2123 #if PLATFORM(IOS)
2124     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2125         return;
2126     
2127     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2128     [m_textTrackRepresentationLayer setFrame:textFrame];
2129 #endif
2130 }
2131
2132 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2133 {
2134 #if PLATFORM(IOS)
2135     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2136     if (representationLayer == m_textTrackRepresentationLayer) {
2137         syncTextTrackBounds();
2138         return;
2139     }
2140
2141     if (m_textTrackRepresentationLayer)
2142         [m_textTrackRepresentationLayer removeFromSuperlayer];
2143
2144     m_textTrackRepresentationLayer = representationLayer;
2145
2146     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2147         syncTextTrackBounds();
2148         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2149     }
2150
2151 #else
2152     UNUSED_PARAM(representation);
2153 #endif
2154 }
2155 #endif // ENABLE(VIDEO_TRACK)
2156
2157 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2158 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2159 {
2160     if (!m_provider) {
2161         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2162         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2163     }
2164
2165     return m_provider.get();
2166 }
2167 #endif
2168
2169 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2170 {
2171     if (!m_avAsset)
2172         return;
2173
2174     setNaturalSize(m_cachedPresentationSize);
2175 }
2176     
2177 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2178 {
2179     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2180         return false;
2181     
2182     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2183     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2184     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2185 }
2186
2187 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2188 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2189 {
2190     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2191
2192     if (!m_avPlayerItem || m_videoOutput)
2193         return;
2194
2195 #if USE(VIDEOTOOLBOX) && (!defined(__MAC_OS_X_VERSION_MIN_REQUIRED) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 101000)
2196     NSDictionary* attributes = nil;
2197 #else
2198     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2199                                 nil];
2200 #endif
2201     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2202     ASSERT(m_videoOutput);
2203
2204     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2205
2206     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2207
2208 #if defined(__MAC_OS_X_VERSION_MIN_REQUIRED) && __MAC_OS_X_VERSION_MIN_REQUIRED < 101000
2209     waitForVideoOutputMediaDataWillChange();
2210 #endif
2211
2212     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2213 }
2214
2215 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2216 {
2217     if (!m_videoOutput)
2218         return;
2219
2220     if (m_avPlayerItem)
2221         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2222     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2223
2224     m_videoOutput = 0;
2225 }
2226
2227 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2228 {
2229     if (!m_videoOutput)
2230         createVideoOutput();
2231     ASSERT(m_videoOutput);
2232
2233 #if !LOG_DISABLED
2234     double start = monotonicallyIncreasingTime();
2235 #endif
2236
2237     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2238
2239     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2240         return 0;
2241
2242     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2243     if (!buffer)
2244         return 0;
2245
2246 #if USE(VIDEOTOOLBOX)
2247     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2248     if (!m_pixelTransferSession) {
2249         VTPixelTransferSessionRef session = 0;
2250         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2251         m_pixelTransferSession = adoptCF(session);
2252     }
2253
2254     CVPixelBufferRef outputBuffer;
2255     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2256     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2257     buffer = adoptCF(outputBuffer);
2258 #endif
2259
2260 #if !LOG_DISABLED
2261     double duration = monotonicallyIncreasingTime() - start;
2262     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2263 #endif
2264
2265     return buffer;
2266 }
2267
2268 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2269 {
2270     if (!m_avPlayerItem)
2271         return false;
2272
2273     if (m_lastImage)
2274         return true;
2275
2276     if (!m_videoOutput)
2277         createVideoOutput();
2278
2279     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2280 }
2281
2282 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2283 {
2284     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2285     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2286     return CVPixelBufferGetBaseAddress(pixelBuffer);
2287 }
2288
2289 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2290 {
2291     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2292     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2293 }
2294
2295 static void CVPixelBufferReleaseInfoCallback(void* info)
2296 {
2297     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2298     CFRelease(pixelBuffer);
2299 }
2300
2301 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2302 {
2303     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2304     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2305
2306     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2307     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2308     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2309     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2310     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2311
2312     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2313     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2314     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2315
2316     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2317 }
2318
2319 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2320 {
2321     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2322
2323     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2324     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2325     // should be displayed.
2326     if (pixelBuffer)
2327         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2328 }
2329
2330 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2331 {
2332 #if (!defined(__MAC_OS_X_VERSION_MIN_REQUIRED) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 101000)
2333     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2334         waitForVideoOutputMediaDataWillChange();
2335 #endif
2336
2337     updateLastImage();
2338
2339     if (!m_lastImage)
2340         return;
2341
2342     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2343     if (!firstEnabledVideoTrack)
2344         return;
2345
2346     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2347
2348     GraphicsContextStateSaver stateSaver(*context);
2349     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2350     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2351     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2352
2353     context->concatCTM(videoTransform);
2354     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2355
2356     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2357     // video frame, destroy it now that it is no longer needed.
2358     if (m_imageGenerator)
2359         destroyImageGenerator();
2360
2361 }
2362
2363 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2364 {
2365     updateLastImage();
2366     return m_lastImage.get();
2367 }
2368
2369 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2370 {
2371     if (!m_videoOutputSemaphore)
2372         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2373
2374     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2375
2376     // Wait for 1 second.
2377     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2378
2379     if (result)
2380         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2381 }
2382
2383 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2384 {
2385     dispatch_semaphore_signal(m_videoOutputSemaphore);
2386 }
2387 #endif
2388
2389 #if ENABLE(ENCRYPTED_MEDIA)
2390 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2391 {
2392     if (!keySystemIsSupported(keySystem))
2393         return MediaPlayer::KeySystemNotSupported;
2394
2395     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2396     String keyURI;
2397     String keyID;
2398     RefPtr<Uint8Array> certificate;
2399     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2400         return MediaPlayer::InvalidPlayerState;
2401
2402     if (!m_keyURIToRequestMap.contains(keyURI))
2403         return MediaPlayer::InvalidPlayerState;
2404
2405     String sessionID = createCanonicalUUIDString();
2406
2407     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2408
2409     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2410     NSString* assetStr = keyID;
2411     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2412     NSError* error = 0;
2413     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2414
2415     if (!keyRequest) {
2416         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2417         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2418         return MediaPlayer::NoError;
2419     }
2420
2421     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2422     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2423     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2424
2425     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2426     m_sessionIDToRequestMap.set(sessionID, avRequest);
2427     m_keyURIToRequestMap.remove(keyURI);
2428
2429     return MediaPlayer::NoError;
2430 }
2431
2432 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2433 {
2434     if (!keySystemIsSupported(keySystem))
2435         return MediaPlayer::KeySystemNotSupported;
2436
2437     if (!m_sessionIDToRequestMap.contains(sessionID))
2438         return MediaPlayer::InvalidPlayerState;
2439
2440     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2441     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2442     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2443     [avRequest.get() finishLoading];
2444     m_sessionIDToRequestMap.remove(sessionID);
2445
2446     player()->keyAdded(keySystem, sessionID);
2447
2448     UNUSED_PARAM(initDataPtr);
2449     UNUSED_PARAM(initDataLength);
2450     return MediaPlayer::NoError;
2451 }
2452
2453 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2454 {
2455     if (!keySystemIsSupported(keySystem))
2456         return MediaPlayer::KeySystemNotSupported;
2457
2458     if (!m_sessionIDToRequestMap.contains(sessionID))
2459         return MediaPlayer::InvalidPlayerState;
2460
2461     m_sessionIDToRequestMap.remove(sessionID);
2462     return MediaPlayer::NoError;
2463 }
2464 #endif
2465
2466 #if ENABLE(ENCRYPTED_MEDIA_V2)
2467 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2468 {
2469     return m_keyURIToRequestMap.take(keyURI);
2470 }
2471
2472 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2473 {
2474     Vector<String> fulfilledKeyIds;
2475
2476     for (auto& pair : m_keyURIToRequestMap) {
2477         const String& keyId = pair.key;
2478         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2479
2480         auto keyData = player()->cachedKeyForKeyId(keyId);
2481         if (!keyData)
2482             continue;
2483
2484         fulfillRequestWithKeyData(request.get(), keyData.get());
2485         fulfilledKeyIds.append(keyId);
2486     }
2487
2488     for (auto& keyId : fulfilledKeyIds)
2489         m_keyURIToRequestMap.remove(keyId);
2490 }
2491
2492 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2493 {
2494     if (!keySystemIsSupported(keySystem))
2495         return nullptr;
2496
2497     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2498 }
2499 #endif
2500
2501 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2502 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2503 {
2504 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2505     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2506 #endif
2507
2508     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2509     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2510
2511         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2512         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2513             continue;
2514
2515         bool newCCTrack = true;
2516         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2517             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2518                 continue;
2519
2520             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2521             if (track->avPlayerItemTrack() == playerItemTrack) {
2522                 removedTextTracks.remove(i - 1);
2523                 newCCTrack = false;
2524                 break;
2525             }
2526         }
2527
2528         if (!newCCTrack)
2529             continue;
2530         
2531         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2532     }
2533
2534     processNewAndRemovedTextTracks(removedTextTracks);
2535 }
2536 #endif
2537
2538 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2539 {
2540     if (!m_avAsset)
2541         return nil;
2542
2543     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2544         return nil;
2545
2546     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2547 }
2548
2549 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2550 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2551 {
2552     if (!m_avAsset)
2553         return false;
2554
2555     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2556         return false;
2557
2558     return true;
2559 }
2560
2561 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2562 {
2563     if (!hasLoadedMediaSelectionGroups())
2564         return nil;
2565
2566     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2567 }
2568
2569 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2570 {
2571     if (!hasLoadedMediaSelectionGroups())
2572         return nil;
2573
2574     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2575 }
2576
2577 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2578 {
2579     if (!hasLoadedMediaSelectionGroups())
2580         return nil;
2581
2582     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2583 }
2584
2585 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2586 {
2587     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2588     if (!legibleGroup) {
2589         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2590         return;
2591     }
2592
2593     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2594     // but set the selected legible track to nil so text tracks will not be automatically configured.
2595     if (!m_textTracks.size())
2596         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2597
2598     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2599     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2600     for (AVMediaSelectionOptionType *option in legibleOptions) {
2601         bool newTrack = true;
2602         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2603             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2604                 continue;
2605             
2606             RetainPtr<AVMediaSelectionOptionType> currentOption;
2607 #if ENABLE(AVF_CAPTIONS)
2608             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2609                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2610                 currentOption = track->mediaSelectionOption();
2611             } else
2612 #endif
2613             {
2614                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2615                 currentOption = track->mediaSelectionOption();
2616             }
2617             
2618             if ([currentOption.get() isEqual:option]) {
2619                 removedTextTracks.remove(i - 1);
2620                 newTrack = false;
2621                 break;
2622             }
2623         }
2624         if (!newTrack)
2625             continue;
2626
2627 #if ENABLE(AVF_CAPTIONS)
2628         if ([option outOfBandSource]) {
2629             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2630             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2631             continue;
2632         }
2633 #endif
2634
2635         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2636     }
2637
2638     processNewAndRemovedTextTracks(removedTextTracks);
2639 }
2640
2641 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2642 {
2643     if (m_metadataTrack)
2644         return;
2645
2646     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2647     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2648     player()->addTextTrack(m_metadataTrack);
2649 }
2650
2651 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2652 {
2653     if (!m_currentTextTrack)
2654         return;
2655
2656     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2657 }
2658
2659 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2660 {
2661     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2662
2663     if (!m_currentTextTrack)
2664         return;
2665     
2666     m_currentTextTrack->resetCueValues();
2667 }
2668 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2669
2670 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2671 {
2672     if (m_currentTextTrack == track)
2673         return;
2674
2675     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2676         
2677     m_currentTextTrack = track;
2678
2679     if (track) {
2680         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2681             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2682 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2683 #if ENABLE(AVF_CAPTIONS)
2684         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2685             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2686 #endif
2687         else
2688             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2689 #endif
2690     } else {
2691 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2692         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2693 #endif
2694         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2695     }
2696
2697 }
2698
2699 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2700 {
2701     if (!m_languageOfPrimaryAudioTrack.isNull())
2702         return m_languageOfPrimaryAudioTrack;
2703
2704     if (!m_avPlayerItem.get())
2705         return emptyString();
2706
2707 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2708     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2709     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2710     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2711     if (currentlySelectedAudibleOption) {
2712         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2713         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2714
2715         return m_languageOfPrimaryAudioTrack;
2716     }
2717 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2718
2719     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2720     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2721     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2722     if (!tracks || [tracks count] != 1) {
2723         m_languageOfPrimaryAudioTrack = emptyString();
2724         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2725         return m_languageOfPrimaryAudioTrack;
2726     }
2727
2728     AVAssetTrack *track = [tracks objectAtIndex:0];
2729     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2730
2731 #if !LOG_DISABLED
2732     if (m_languageOfPrimaryAudioTrack == emptyString())
2733         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2734     else
2735         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2736 #endif
2737
2738     return m_languageOfPrimaryAudioTrack;
2739 }
2740
2741 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2742 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2743 {
2744     if (!m_avPlayer)
2745         return false;
2746
2747     bool wirelessTarget = m_avPlayer.get().externalPlaybackActive;
2748     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2749
2750     return wirelessTarget;
2751 }
2752
2753 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2754 {
2755     if (!m_avPlayer)
2756         return MediaPlayer::TargetTypeNone;
2757
2758 #if PLATFORM(IOS)
2759     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2760     case wkExternalPlaybackTypeNone:
2761         return MediaPlayer::TargetTypeNone;
2762     case wkExternalPlaybackTypeAirPlay:
2763         return MediaPlayer::TargetTypeAirPlay;
2764     case wkExternalPlaybackTypeTVOut:
2765         return MediaPlayer::TargetTypeTVOut;
2766     }
2767
2768     ASSERT_NOT_REACHED();
2769     return MediaPlayer::TargetTypeNone;
2770
2771 #else
2772     return MediaPlayer::TargetTypeAirPlay;
2773 #endif
2774 }
2775
2776 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2777 {
2778     if (!m_avPlayer)
2779         return emptyString();
2780
2781     String wirelessTargetName;
2782 #if !PLATFORM(IOS)
2783     if (m_outputContext)
2784         wirelessTargetName = m_outputContext.get().deviceName;
2785 #else
2786     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2787 #endif
2788     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2789
2790     return wirelessTargetName;
2791 }
2792
2793 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2794 {
2795     if (!m_avPlayer)
2796         return !m_allowsWirelessVideoPlayback;
2797
2798     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2799     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2800
2801     return !m_allowsWirelessVideoPlayback;
2802 }
2803
2804 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2805 {
2806     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2807     m_allowsWirelessVideoPlayback = !disabled;
2808     if (!m_avPlayer)
2809         return;
2810
2811     setDelayCallbacks(true);
2812     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2813     setDelayCallbacks(false);
2814 }
2815
2816 #if !PLATFORM(IOS)
2817 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2818 {
2819     MediaPlaybackTargetMac* macTarget = toMediaPlaybackTargetMac(&target.get());
2820
2821     m_outputContext = macTarget->outputContext();
2822     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), [m_outputContext.get().deviceName UTF8String]);
2823
2824     if (!m_outputContext || !m_outputContext.get().deviceName)
2825         setShouldPlayToPlaybackTarget(false);
2826 }
2827
2828 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2829 {
2830     m_shouldPlayToPlaybackTarget = shouldPlay;
2831
2832     AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2833     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2834
2835     if (!m_avPlayer)
2836         return;
2837
2838     RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2839     if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2840         return;
2841
2842     setDelayCallbacks(true);
2843     m_avPlayer.get().outputContext = newContext;
2844     setDelayCallbacks(false);
2845 }
2846 #endif // !PLATFORM(IOS)
2847
2848 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2849 {
2850     if (!m_avPlayer)
2851         return;
2852
2853 #if PLATFORM(IOS)
2854     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2855 #endif
2856 }
2857 #endif
2858
2859 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2860 {
2861     m_cachedItemStatus = status;
2862
2863     updateStates();
2864 }
2865
2866 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2867 {
2868     m_pendingStatusChanges++;
2869 }
2870
2871 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2872 {
2873     m_cachedLikelyToKeepUp = likelyToKeepUp;
2874
2875     ASSERT(m_pendingStatusChanges);
2876     if (!--m_pendingStatusChanges)
2877         updateStates();
2878 }
2879
2880 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2881 {
2882     m_pendingStatusChanges++;
2883 }
2884
2885 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2886 {
2887     m_cachedBufferEmpty = bufferEmpty;
2888
2889     ASSERT(m_pendingStatusChanges);
2890     if (!--m_pendingStatusChanges)
2891         updateStates();
2892 }
2893
2894 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2895 {
2896     m_pendingStatusChanges++;
2897 }
2898
2899 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2900 {
2901     m_cachedBufferFull = bufferFull;
2902
2903     ASSERT(m_pendingStatusChanges);
2904     if (!--m_pendingStatusChanges)
2905         updateStates();
2906 }
2907
2908 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2909 {
2910     m_cachedSeekableRanges = seekableRanges;
2911
2912     seekableTimeRangesChanged();
2913     updateStates();
2914 }
2915
2916 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2917 {
2918     m_cachedLoadedRanges = loadedRanges;
2919
2920     loadedTimeRangesChanged();
2921     updateStates();
2922 }
2923
2924 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2925 {
2926     m_cachedIsReadyForDisplay = isReady;
2927     if (!hasVideo() && isReady)
2928         tracksChanged();
2929     updateStates();
2930 }
2931
2932 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2933 {
2934     tracksChanged();
2935     updateStates();
2936 }
2937
2938 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2939 {
2940     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2941     if (m_shouldBufferData == shouldBuffer)
2942         return;
2943
2944     m_shouldBufferData = shouldBuffer;
2945     
2946     if (!m_avPlayer)
2947         return;
2948
2949     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2950 }
2951
2952 #if ENABLE(DATACUE_VALUE)
2953 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2954 {
2955     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2956     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2957     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2958     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2959     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2960
2961     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2962         return quickTimeUserData;
2963     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2964         return isoUserData;
2965     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2966         return quickTimeMetadata;
2967     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2968         return iTunesMetadata;
2969     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2970         return id3Metadata;
2971
2972     return emptyAtom;
2973 }
2974 #endif
2975
2976 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2977 {
2978     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2979
2980     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2981
2982 #if ENABLE(DATACUE_VALUE)
2983     if (seeking())
2984         return;
2985
2986     if (!m_metadataTrack)
2987         processMetadataTrack();
2988
2989     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2990         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2991         return;
2992     }
2993
2994     // Set the duration of all incomplete cues before adding new ones.
2995     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2996     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2997         MediaTime start = toMediaTime(item.time);
2998         if (start < earliestStartTime)
2999             earliestStartTime = start;
3000     }
3001     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3002
3003     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3004         MediaTime start = toMediaTime(item.time);
3005         MediaTime end = MediaTime::positiveInfiniteTime();
3006         if (CMTIME_IS_VALID(item.duration))
3007             end = start + toMediaTime(item.duration);
3008
3009         AtomicString type = nullAtom;
3010         if (item.keySpace)
3011             type = metadataType(item.keySpace);
3012
3013         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3014     }
3015 #endif
3016 }
3017
3018 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3019 {
3020     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3021         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3022
3023     NSArray *assetTracks = [m_avAsset tracks];
3024
3025     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3026         AVAssetTrack* assetTrack = [obj assetTrack];
3027
3028         if ([assetTracks containsObject:assetTrack])
3029             return YES;
3030
3031         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3032         if (!hasLoadedMediaSelectionGroups())
3033             return NO;
3034
3035         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3036             return NO;
3037
3038         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3039             return NO;
3040
3041         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3042             return NO;
3043
3044         return YES;
3045     }]];
3046
3047     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3048         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3049
3050     m_cachedTotalBytes = 0;
3051
3052     tracksChanged();
3053     updateStates();
3054 }
3055
3056 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3057 {
3058     m_cachedHasEnabledAudio = hasEnabledAudio;
3059
3060     tracksChanged();
3061     updateStates();
3062 }
3063
3064 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3065 {
3066     m_cachedPresentationSize = size;
3067
3068     sizeChanged();
3069     updateStates();
3070 }
3071
3072 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3073 {
3074     m_cachedDuration = duration;
3075
3076     invalidateCachedDuration();
3077 }
3078
3079 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3080 {
3081     m_cachedRate = rate;
3082
3083     updateStates();
3084     rateChanged();
3085 }
3086     
3087 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3088 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3089 {
3090     playbackTargetIsWirelessChanged();
3091 }
3092 #endif
3093
3094 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3095 {
3096     m_cachedCanPlayFastForward = newValue;
3097 }
3098
3099 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3100 {
3101     m_cachedCanPlayFastReverse = newValue;
3102 }
3103
3104 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3105 {
3106     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3107         return MediaPlayerPrivateAVFoundation::resolvedURL();
3108
3109     return URL([m_avAsset resolvedURL]);
3110 }
3111
3112 NSArray* assetMetadataKeyNames()
3113 {
3114     static NSArray* keys;
3115     if (!keys) {
3116         keys = [[NSArray alloc] initWithObjects:@"duration",
3117                     @"naturalSize",
3118                     @"preferredTransform",
3119                     @"preferredVolume",
3120                     @"preferredRate",
3121                     @"playable",
3122                     @"resolvedURL",
3123                     @"tracks",
3124                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3125                    nil];
3126     }
3127     return keys;
3128 }
3129
3130 NSArray* itemKVOProperties()
3131 {
3132     static NSArray* keys;
3133     if (!keys) {
3134         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3135                 @"status",
3136                 @"asset",
3137                 @"tracks",
3138                 @"seekableTimeRanges",
3139                 @"loadedTimeRanges",
3140                 @"playbackLikelyToKeepUp",
3141                 @"playbackBufferFull",
3142                 @"playbackBufferEmpty",
3143                 @"duration",
3144                 @"hasEnabledAudio",
3145                 @"timedMetadata",
3146                 @"canPlayFastForward",
3147                 @"canPlayFastReverse",
3148                 nil];
3149     }
3150     return keys;
3151 }
3152
3153 NSArray* assetTrackMetadataKeyNames()
3154 {
3155     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3156     return keys;
3157 }
3158
3159 NSArray* playerKVOProperties()
3160 {
3161     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3162 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3163                             @"externalPlaybackActive", @"allowsExternalPlayback",
3164 #endif
3165                             nil];
3166     return keys;
3167 }
3168 } // namespace WebCore
3169
3170 @implementation WebCoreAVFMovieObserver
3171
3172 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3173 {
3174     self = [super init];
3175     if (!self)
3176         return nil;
3177     m_callback = callback;
3178     return self;
3179 }
3180
3181 - (void)disconnect
3182 {
3183     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3184     m_callback = 0;
3185 }
3186
3187 - (void)metadataLoaded
3188 {
3189     if (!m_callback)
3190         return;
3191
3192     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3193 }
3194
3195 - (void)didEnd:(NSNotification *)unusedNotification
3196 {
3197     UNUSED_PARAM(unusedNotification);
3198     if (!m_callback)
3199         return;
3200     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3201 }
3202
3203 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3204 {
3205     UNUSED_PARAM(object);
3206     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3207
3208     if (!m_callback)
3209         return;
3210
3211     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3212
3213 #if !LOG_DISABLED
3214     if (willChange)
3215         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3216     else {
3217         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3218         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3219     }
3220 #endif
3221
3222     std::function<void ()> function;
3223
3224     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3225         if ([keyPath isEqualToString:@"readyForDisplay"])
3226             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3227     }
3228
3229     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3230         if ([keyPath isEqualToString:@"enabled"])
3231             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3232     }
3233
3234     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3235         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3236             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3237         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3238             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3239         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3240             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3241     }
3242
3243     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3244         // A value changed for an AVPlayerItem
3245         if ([keyPath isEqualToString:@"status"])
3246             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3247         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3248             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3249         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3250             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3251         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3252             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3253         else if ([keyPath isEqualToString:@"asset"])
3254             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3255         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3256             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3257         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3258             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3259         else if ([keyPath isEqualToString:@"tracks"])
3260             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3261         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3262             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3263         else if ([keyPath isEqualToString:@"presentationSize"])
3264             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3265         else if ([keyPath isEqualToString:@"duration"])
3266             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3267         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3268             MediaTime now;
3269             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3270             if (CMTIME_IS_NUMERIC(itemTime))
3271                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3272             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3273         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3274             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3275         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3276             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3277     }
3278
3279     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3280         // A value changed for an AVPlayer.
3281         if ([keyPath isEqualToString:@"rate"])
3282             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3283 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3284         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3285             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3286 #endif
3287     }
3288     
3289     if (!function)
3290         return;
3291
3292     auto weakThis = m_callback->createWeakPtr();
3293     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3294         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3295         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3296         if (!weakThis)
3297             return;
3298         function();
3299     }));
3300 }
3301
3302 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3303 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3304 {
3305     UNUSED_PARAM(output);
3306     UNUSED_PARAM(nativeSamples);
3307
3308     if (!m_callback)
3309         return;
3310
3311     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3312     RetainPtr<NSArray> strongStrings = strings;
3313     RetainPtr<NSArray> strongSamples = nativeSamples;
3314     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3315         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3316         if (!callback)
3317             return;
3318         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3319     });
3320 }
3321
3322 - (void)outputSequenceWasFlushed:(id)output
3323 {
3324     UNUSED_PARAM(output);
3325
3326     if (!m_callback)
3327         return;
3328     
3329     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3330     callOnMainThread([strongSelf] {
3331         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3332             callback->flushCues();
3333     });
3334 }
3335 #endif
3336
3337 @end
3338
3339 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3340 @implementation WebCoreAVFLoaderDelegate
3341
3342 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3343 {
3344     self = [super init];
3345     if (!self)
3346         return nil;
3347     m_callback = callback;
3348     return self;
3349 }
3350
3351 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3352 {
3353     UNUSED_PARAM(resourceLoader);
3354     if (!m_callback)
3355         return NO;
3356
3357     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3358     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3359     callOnMainThread([strongSelf, strongRequest] {
3360         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3361         if (!callback) {
3362             [strongRequest finishLoadingWithError:nil];
3363             return;
3364         }
3365
3366         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3367             [strongRequest finishLoadingWithError:nil];
3368     });
3369
3370     return YES;
3371 }
3372
3373 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3374 {
3375     UNUSED_PARAM(resourceLoader);
3376     if (!m_callback)
3377         return NO;
3378
3379     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3380         return NO;
3381
3382     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3383     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3384     callOnMainThread([strongSelf, strongChallenge] {
3385         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3386         if (!callback) {
3387             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3388             return;
3389         }
3390
3391         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3392             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3393     });
3394
3395     return YES;
3396 }
3397
3398 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3399 {
3400     UNUSED_PARAM(resourceLoader);
3401     if (!m_callback)
3402         return;
3403
3404     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3405     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3406     callOnMainThread([strongSelf, strongRequest] {
3407         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3408         if (callback)
3409             callback->didCancelLoadingRequest(strongRequest.get());
3410     });
3411 }
3412
3413 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3414 {
3415     m_callback = callback;
3416 }
3417 @end
3418 #endif
3419
3420 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3421 @implementation WebCoreAVFPullDelegate
3422 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3423 {
3424     self = [super init];
3425     if (self)
3426         m_callback = callback;
3427     return self;
3428 }
3429
3430 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3431 {
3432     m_callback = callback;
3433 }
3434
3435 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3436 {
3437     if (m_callback)
3438         m_callback->outputMediaDataWillChange(output);
3439 }
3440
3441 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3442 {
3443     UNUSED_PARAM(output);
3444     // No-op.
3445 }
3446 @end
3447 #endif
3448
3449 #endif