dispatch_group_t objects may not be retained properly when not backed by Objective...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "FloatConversion.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "OutOfBandTextTrackPrivateAVF.h"
48 #import "URL.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaSelectionGroupAVFObjC.h"
52 #import "MediaTimeAVFoundation.h"
53 #import "PlatformTimeRanges.h"
54 #import "QuartzCoreSPI.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "UUID.h"
60 #import "VideoTrackPrivateAVFObjC.h"
61 #import "WebCoreAVFResourceLoader.h"
62 #import "WebCoreCALayerExtras.h"
63 #import "WebCoreSystemInterface.h"
64 #import <functional>
65 #import <objc/runtime.h>
66 #import <runtime/DataView.h>
67 #import <runtime/JSCInlines.h>
68 #import <runtime/TypedArrayInlines.h>
69 #import <runtime/Uint16Array.h>
70 #import <runtime/Uint32Array.h>
71 #import <runtime/Uint8Array.h>
72 #import <wtf/CurrentTime.h>
73 #import <wtf/ListHashSet.h>
74 #import <wtf/NeverDestroyed.h>
75 #import <wtf/OSObjectPtr.h>
76 #import <wtf/text/CString.h>
77 #import <wtf/text/StringBuilder.h>
78
79 #if ENABLE(AVF_CAPTIONS)
80 #include "TextTrack.h"
81 #endif
82
83 #import <AVFoundation/AVFoundation.h>
84 #if PLATFORM(IOS)
85 #import "WAKAppKitStubs.h"
86 #import <CoreImage/CoreImage.h>
87 #import <mach/mach_port.h>
88 #else
89 #import <Foundation/NSGeometry.h>
90 #import <QuartzCore/CoreImage.h>
91 #endif
92
93 #if USE(VIDEOTOOLBOX)
94 #import <CoreVideo/CoreVideo.h>
95 #import <VideoToolbox/VideoToolbox.h>
96 #endif
97
98 #if USE(CFNETWORK)
99 #include "CFNSURLConnectionSPI.h"
100 #endif
101
102 namespace std {
103 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
104     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
105 };
106 }
107
108 @interface WebVideoContainerLayer : CALayer
109 @end
110
111 @implementation WebVideoContainerLayer
112
113 - (void)setBounds:(CGRect)bounds
114 {
115     [super setBounds:bounds];
116     for (CALayer* layer in self.sublayers)
117         layer.frame = bounds;
118 }
119
120 - (void)setPosition:(CGPoint)position
121 {
122     if (!CATransform3DIsIdentity(self.transform)) {
123         // Pre-apply the transform added in the WebProcess to fix <rdar://problem/18316542> to the position.
124         position = CGPointApplyAffineTransform(position, CATransform3DGetAffineTransform(self.transform));
125     }
126     [super setPosition:position];
127 }
128 @end
129
130 #if ENABLE(AVF_CAPTIONS)
131 // Note: This must be defined before our SOFT_LINK macros:
132 @class AVMediaSelectionOption;
133 @interface AVMediaSelectionOption (OutOfBandExtensions)
134 @property (nonatomic, readonly) NSString* outOfBandSource;
135 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
136 @end
137 #endif
138
139 @interface AVURLAsset (WebKitExtensions)
140 @property (nonatomic, readonly) NSURL *resolvedURL;
141 @end
142
143 typedef AVPlayer AVPlayerType;
144 typedef AVPlayerItem AVPlayerItemType;
145 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
146 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
147 typedef AVMetadataItem AVMetadataItemType;
148 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
149 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
150
151 #pragma mark - Soft Linking
152
153 // Soft-linking headers must be included last since they #define functions, constants, etc.
154 #import "CoreMediaSoftLink.h"
155
156 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
157 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
158 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
159
160 #if USE(VIDEOTOOLBOX)
161 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
162 #endif
163
164 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
166 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
167 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
168 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
169 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
170 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
171 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
172
173 #if USE(VIDEOTOOLBOX)
174 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
175 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
176 #endif
177
178 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
179 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
180 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
181 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
182 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
183 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
184 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
185
186 SOFT_LINK_CLASS(CoreImage, CIContext)
187 SOFT_LINK_CLASS(CoreImage, CIImage)
188
189 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
190 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
201 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
203 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
204 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
205
206 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
207
208 #define AVPlayer getAVPlayerClass()
209 #define AVPlayerItem getAVPlayerItemClass()
210 #define AVPlayerLayer getAVPlayerLayerClass()
211 #define AVURLAsset getAVURLAssetClass()
212 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
213 #define AVMetadataItem getAVMetadataItemClass()
214
215 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
216 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
217 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
218 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
219 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
220 #define AVMediaTypeVideo getAVMediaTypeVideo()
221 #define AVMediaTypeAudio getAVMediaTypeAudio()
222 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
223 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
224 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
225 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
226 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
227 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
228 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
229 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
230 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
231 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
232
233 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
234 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
235 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
236
237 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
238 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
239 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
240
241 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
242 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
243 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
244 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
245
246 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
247 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
248 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
249 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
250 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
251 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
252 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
253 #endif
254
255 #if ENABLE(AVF_CAPTIONS)
256 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
263 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
264 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
265 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
266
267 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
268 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
269 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
270 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
271 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
272 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
273 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
274 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
275 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
276 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
277 #endif
278
279 #if ENABLE(DATACUE_VALUE)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
281 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
282 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
283 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
284 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
285
286 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
287 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
288 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
289 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
290 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
291 #endif
292
293 #if PLATFORM(IOS)
294 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
295 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
296 #endif
297
298 using namespace WebCore;
299
300 enum MediaPlayerAVFoundationObservationContext {
301     MediaPlayerAVFoundationObservationContextPlayerItem,
302     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
303     MediaPlayerAVFoundationObservationContextPlayer,
304     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
305 };
306
307 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
308 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
309 #else
310 @interface WebCoreAVFMovieObserver : NSObject
311 #endif
312 {
313     MediaPlayerPrivateAVFoundationObjC* m_callback;
314     int m_delayCallbacks;
315 }
316 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
317 -(void)disconnect;
318 -(void)metadataLoaded;
319 -(void)didEnd:(NSNotification *)notification;
320 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
321 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
322 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
323 - (void)outputSequenceWasFlushed:(id)output;
324 #endif
325 @end
326
327 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
328 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
329     MediaPlayerPrivateAVFoundationObjC* m_callback;
330 }
331 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
332 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
333 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
334 @end
335 #endif
336
337 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
338 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
339     MediaPlayerPrivateAVFoundationObjC *m_callback;
340     dispatch_semaphore_t m_semaphore;
341 }
342 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
343 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
344 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
345 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
346 @end
347 #endif
348
349 namespace WebCore {
350
351 static NSArray *assetMetadataKeyNames();
352 static NSArray *itemKVOProperties();
353 static NSArray *assetTrackMetadataKeyNames();
354 static NSArray *playerKVOProperties();
355 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
356
357 #if !LOG_DISABLED
358 static const char *boolString(bool val)
359 {
360     return val ? "true" : "false";
361 }
362 #endif
363
364 #if ENABLE(ENCRYPTED_MEDIA_V2)
365 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
366 static PlayerToPrivateMapType& playerToPrivateMap()
367 {
368     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
369     return map;
370 };
371 #endif
372
373 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
374 static dispatch_queue_t globalLoaderDelegateQueue()
375 {
376     static dispatch_queue_t globalQueue;
377     static dispatch_once_t onceToken;
378     dispatch_once(&onceToken, ^{
379         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
380     });
381     return globalQueue;
382 }
383 #endif
384
385 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
386 static dispatch_queue_t globalPullDelegateQueue()
387 {
388     static dispatch_queue_t globalQueue;
389     static dispatch_once_t onceToken;
390     dispatch_once(&onceToken, ^{
391         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
392     });
393     return globalQueue;
394 }
395 #endif
396
397 #if USE(CFNETWORK)
398 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
399 public:
400     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
401     {
402         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
403     }
404
405     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
406     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
407
408 private:
409     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
410         : m_challenge(challenge)
411     {
412         ASSERT(m_challenge);
413     }
414
415     virtual void refAuthenticationClient() override { ref(); }
416     virtual void derefAuthenticationClient() override { deref(); }
417
418     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
419     {
420         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
421     }
422
423     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
424     {
425         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
426     }
427
428     virtual void receivedCancellation(const AuthenticationChallenge&) override
429     {
430         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
431     }
432
433     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
434     {
435         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
436             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
437     }
438
439     virtual void receivedChallengeRejection(const AuthenticationChallenge&) override
440     {
441         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
442             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
443     }
444
445     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
446 };
447 #endif
448
449 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
450 {
451     if (isAvailable())
452         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
453             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
454 }
455
456 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
457     : MediaPlayerPrivateAVFoundation(player)
458     , m_weakPtrFactory(this)
459 #if PLATFORM(IOS)
460     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
461 #endif
462     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
463     , m_videoFrameHasDrawn(false)
464     , m_haveCheckedPlayability(false)
465 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
466     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
467     , m_videoOutputSemaphore(nullptr)
468 #endif
469 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
470     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
471 #endif
472     , m_currentTextTrack(0)
473     , m_cachedRate(0)
474     , m_cachedTotalBytes(0)
475     , m_pendingStatusChanges(0)
476     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
477     , m_cachedLikelyToKeepUp(false)
478     , m_cachedBufferEmpty(false)
479     , m_cachedBufferFull(false)
480     , m_cachedHasEnabledAudio(false)
481     , m_shouldBufferData(true)
482     , m_cachedIsReadyForDisplay(false)
483     , m_haveBeenAskedToCreateLayer(false)
484 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
485     , m_allowsWirelessVideoPlayback(true)
486 #endif
487 {
488 #if ENABLE(ENCRYPTED_MEDIA_V2)
489     playerToPrivateMap().set(player, this);
490 #endif
491 }
492
493 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
494 {
495 #if ENABLE(ENCRYPTED_MEDIA_V2)
496     playerToPrivateMap().remove(player());
497 #endif
498 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
499     [m_loaderDelegate.get() setCallback:0];
500     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
501
502     for (auto& pair : m_resourceLoaderMap)
503         pair.value->invalidate();
504 #endif
505 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
506     [m_videoOutputDelegate setCallback:0];
507     [m_videoOutput setDelegate:nil queue:0];
508     if (m_videoOutputSemaphore)
509         dispatch_release(m_videoOutputSemaphore);
510 #endif
511
512     if (m_videoLayer)
513         destroyVideoLayer();
514
515     cancelLoad();
516 }
517
518 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
519 {
520     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
521     tearDownVideoRendering();
522
523     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
524     [m_objcObserver.get() disconnect];
525
526     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
527     setIgnoreLoadStateChanges(true);
528     if (m_avAsset) {
529         [m_avAsset.get() cancelLoading];
530         m_avAsset = nil;
531     }
532
533     clearTextTracks();
534
535 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
536     if (m_legibleOutput) {
537         if (m_avPlayerItem)
538             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
539         m_legibleOutput = nil;
540     }
541 #endif
542
543     if (m_avPlayerItem) {
544         for (NSString *keyName in itemKVOProperties())
545             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
546         
547         m_avPlayerItem = nil;
548     }
549     if (m_avPlayer) {
550         if (m_timeObserver)
551             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
552         m_timeObserver = nil;
553
554         for (NSString *keyName in playerKVOProperties())
555             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
556         m_avPlayer = nil;
557     }
558
559     // Reset cached properties
560     m_pendingStatusChanges = 0;
561     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
562     m_cachedSeekableRanges = nullptr;
563     m_cachedLoadedRanges = nullptr;
564     m_cachedHasEnabledAudio = false;
565     m_cachedPresentationSize = FloatSize();
566     m_cachedDuration = MediaTime::zeroTime();
567
568     for (AVPlayerItemTrack *track in m_cachedTracks.get())
569         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
570     m_cachedTracks = nullptr;
571
572 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
573     if (m_provider) {
574         m_provider->setPlayerItem(nullptr);
575         m_provider->setAudioTrack(nullptr);
576     }
577 #endif
578
579     setIgnoreLoadStateChanges(false);
580 }
581
582 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
583 {
584     return m_haveBeenAskedToCreateLayer;
585 }
586
587 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
588 {
589 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
590     if (m_videoOutput)
591         return true;
592 #endif
593     return m_imageGenerator;
594 }
595
596 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
597 {
598 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
599     createVideoOutput();
600 #else
601     createImageGenerator();
602 #endif
603 }
604
605 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
606 {
607     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
608
609     if (!m_avAsset || m_imageGenerator)
610         return;
611
612     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
613
614     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
615     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
616     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
617     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
618
619     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
620 }
621
622 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
623 {
624 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
625     destroyVideoOutput();
626 #endif
627     destroyImageGenerator();
628 }
629
630 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
631 {
632     if (!m_imageGenerator)
633         return;
634
635     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
636
637     m_imageGenerator = 0;
638 }
639
640 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
641 {
642     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
643         return;
644
645     auto weakThis = createWeakPtr();
646     callOnMainThread([this, weakThis] {
647         if (!weakThis)
648             return;
649
650         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
651             return;
652         m_haveBeenAskedToCreateLayer = true;
653
654         if (!m_videoLayer)
655             createAVPlayerLayer();
656
657 #if USE(VIDEOTOOLBOX) && (!defined(__MAC_OS_X_VERSION_MIN_REQUIRED) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 101000)
658         if (!m_videoOutput)
659             createVideoOutput();
660 #endif
661
662         player()->client().mediaPlayerRenderingModeChanged(player());
663     });
664 }
665
666 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
667 {
668     if (!m_avPlayer)
669         return;
670
671     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
672     [m_videoLayer setPlayer:m_avPlayer.get()];
673     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
674 #ifndef NDEBUG
675     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
676 #endif
677     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
678     updateVideoLayerGravity();
679     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
680     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
681     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
682
683 #if PLATFORM(IOS)
684     [m_videoLayer web_disableAllActions];
685     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
686 #ifndef NDEBUG
687     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
688 #endif
689     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
690     if (m_videoFullscreenLayer) {
691         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
692         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
693     } else {
694         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
695         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
696     }
697     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
698         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
699 #else
700     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
701 #endif
702 }
703
704 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
705 {
706     if (!m_videoLayer)
707         return;
708
709     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
710
711     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
712     [m_videoLayer.get() setPlayer:nil];
713
714 #if PLATFORM(IOS)
715     if (m_videoFullscreenLayer)
716         [m_videoLayer removeFromSuperlayer];
717     m_videoInlineLayer = nil;
718 #endif
719
720     m_videoLayer = nil;
721 }
722
723 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
724 {
725     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
726     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
727
728     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
729     if (!date)
730         return MediaTime::invalidTime();
731
732     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
733
734     // Rounding due to second offset error when subtracting.
735     return MediaTime::createWithDouble(round(date - currentTime));
736 }
737
738 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
739 {
740     if (currentRenderingMode() == MediaRenderingToLayer)
741         return m_cachedIsReadyForDisplay;
742
743     return m_videoFrameHasDrawn;
744 }
745
746 #if ENABLE(AVF_CAPTIONS)
747 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
748 {
749     // FIXME: Match these to correct types:
750     if (kind == PlatformTextTrack::Caption)
751         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
752
753     if (kind == PlatformTextTrack::Subtitle)
754         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
755
756     if (kind == PlatformTextTrack::Description)
757         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
758
759     if (kind == PlatformTextTrack::Forced)
760         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
761
762     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
763 }
764     
765 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
766 {
767     trackModeChanged();
768 }
769     
770 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
771 {
772     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
773     
774     for (auto& textTrack : m_textTracks) {
775         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
776             continue;
777         
778         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
779         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
780         
781         for (auto& track : outOfBandTrackSources) {
782             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
783             
784             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
785                 continue;
786             
787             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
788             if (track->mode() == PlatformTextTrack::Hidden)
789                 mode = InbandTextTrackPrivate::Hidden;
790             else if (track->mode() == PlatformTextTrack::Disabled)
791                 mode = InbandTextTrackPrivate::Disabled;
792             else if (track->mode() == PlatformTextTrack::Showing)
793                 mode = InbandTextTrackPrivate::Showing;
794             
795             textTrack->setMode(mode);
796             break;
797         }
798     }
799 }
800 #endif
801
802
803 static NSURL *canonicalURL(const String& url)
804 {
805     NSURL *cocoaURL = URL(ParsedURLString, url);
806     if (url.isEmpty())
807         return cocoaURL;
808
809     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
810     if (!request)
811         return cocoaURL;
812
813     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
814     if (!canonicalRequest)
815         return cocoaURL;
816
817     return [canonicalRequest URL];
818 }
819
820 #if PLATFORM(IOS)
821 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
822 {
823     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
824     [properties setDictionary:@{
825         NSHTTPCookieName: cookie.name,
826         NSHTTPCookieValue: cookie.value,
827         NSHTTPCookieDomain: cookie.domain,
828         NSHTTPCookiePath: cookie.path,
829         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
830     }];
831     if (cookie.secure)
832         [properties setObject:@YES forKey:NSHTTPCookieSecure];
833     if (cookie.session)
834         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
835
836     return [NSHTTPCookie cookieWithProperties:properties.get()];
837 }
838 #endif
839
840 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
841 {
842     if (m_avAsset)
843         return;
844
845     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
846
847     setDelayCallbacks(true);
848
849     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
850
851     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
852
853     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
854
855     String referrer = player()->referrer();
856     if (!referrer.isEmpty())
857         [headerFields.get() setObject:referrer forKey:@"Referer"];
858
859     String userAgent = player()->userAgent();
860     if (!userAgent.isEmpty())
861         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
862
863     if ([headerFields.get() count])
864         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
865
866     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
867         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
868
869 #if PLATFORM(IOS)
870     // FIXME: rdar://problem/20354688
871     String identifier = player()->sourceApplicationIdentifier();
872     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
873         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
874 #endif
875
876 #if ENABLE(AVF_CAPTIONS)
877     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
878     if (!outOfBandTrackSources.isEmpty()) {
879         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
880         for (auto& trackSource : outOfBandTrackSources) {
881             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
882             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
883             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
884             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
885             [outOfBandTracks.get() addObject:@{
886                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
887                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
888                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
889                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
890                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
891                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
892             }];
893         }
894
895         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
896     }
897 #endif
898
899 #if PLATFORM(IOS)
900     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
901     if (!networkInterfaceName.isEmpty())
902         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
903 #endif
904
905 #if PLATFORM(IOS)
906     Vector<Cookie> cookies;
907     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
908         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
909         for (auto& cookie : cookies)
910             [nsCookies addObject:toNSHTTPCookie(cookie)];
911
912         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
913     }
914 #endif
915
916     NSURL *cocoaURL = canonicalURL(url);
917     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
918
919 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
920     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
921 #endif
922
923     m_haveCheckedPlayability = false;
924
925     setDelayCallbacks(false);
926 }
927
928 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
929 {
930     if (!m_avPlayer)
931         return;
932
933     if (pthread_main_np()) {
934         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
935         return;
936     }
937
938     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
939     RetainPtr<AVPlayerItemType> strongItem = item;
940     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
941         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
942     });
943 }
944
945 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
946 {
947     if (m_avPlayer)
948         return;
949
950     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
951
952     setDelayCallbacks(true);
953
954     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
955     for (NSString *keyName in playerKVOProperties())
956         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
957
958 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
959     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
960 #endif
961
962 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
963     updateDisableExternalPlayback();
964     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
965 #endif
966
967 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
968     if (m_shouldPlayToPlaybackTarget)
969         setShouldPlayToPlaybackTarget(true);
970 #endif
971
972     if (player()->client().mediaPlayerIsVideo())
973         createAVPlayerLayer();
974
975     if (m_avPlayerItem)
976         setAVPlayerItem(m_avPlayerItem.get());
977
978     setDelayCallbacks(false);
979 }
980
981 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
982 {
983     if (m_avPlayerItem)
984         return;
985
986     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
987
988     setDelayCallbacks(true);
989
990     // Create the player item so we can load media data. 
991     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
992
993     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
994
995     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
996     for (NSString *keyName in itemKVOProperties())
997         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
998
999     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1000
1001     if (m_avPlayer)
1002         setAVPlayerItem(m_avPlayerItem.get());
1003
1004 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1005     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1006
1007     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1008     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1009     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1010
1011     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1012     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1013     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1014     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1015 #endif
1016
1017 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1018     if (m_provider) {
1019         m_provider->setPlayerItem(m_avPlayerItem.get());
1020         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1021     }
1022 #endif
1023
1024     setDelayCallbacks(false);
1025 }
1026
1027 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1028 {
1029     if (m_haveCheckedPlayability)
1030         return;
1031     m_haveCheckedPlayability = true;
1032
1033     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1034     auto weakThis = createWeakPtr();
1035
1036     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1037         callOnMainThread([weakThis] {
1038             if (weakThis)
1039                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1040         });
1041     }];
1042 }
1043
1044 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1045 {
1046     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1047
1048     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1049     dispatch_group_enter(metadataLoadingGroup.get());
1050     auto weakThis = createWeakPtr();
1051     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1052
1053         callOnMainThread([weakThis, metadataLoadingGroup] {
1054             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1055                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1056                     dispatch_group_enter(metadataLoadingGroup.get());
1057                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1058                         dispatch_group_leave(metadataLoadingGroup.get());
1059                     }];
1060                 }
1061             }
1062             dispatch_group_leave(metadataLoadingGroup.get());
1063         });
1064     }];
1065
1066     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1067         callOnMainThread([weakThis] {
1068             if (weakThis)
1069                 [weakThis->m_objcObserver.get() metadataLoaded];
1070         });
1071     });
1072 }
1073
1074 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1075 {
1076     if (!m_avPlayerItem)
1077         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1078
1079     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1080         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1081     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1082         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1083     if (m_cachedLikelyToKeepUp)
1084         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1085     if (m_cachedBufferFull)
1086         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1087     if (m_cachedBufferEmpty)
1088         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1089
1090     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1091 }
1092
1093 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1094 {
1095     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1096     PlatformMedia pm;
1097     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1098     pm.media.avfMediaPlayer = m_avPlayer.get();
1099     return pm;
1100 }
1101
1102 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1103 {
1104 #if PLATFORM(IOS)
1105     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1106 #else
1107     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1108 #endif
1109 }
1110
1111 #if PLATFORM(IOS)
1112 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1113 {
1114     if (m_videoFullscreenLayer == videoFullscreenLayer)
1115         return;
1116
1117     m_videoFullscreenLayer = videoFullscreenLayer;
1118
1119     [CATransaction begin];
1120     [CATransaction setDisableActions:YES];
1121     
1122     CAContext *oldContext = [m_videoLayer context];
1123     CAContext *newContext = nil;
1124     
1125     if (m_videoFullscreenLayer && m_videoLayer) {
1126         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1127         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1128         newContext = [m_videoFullscreenLayer context];
1129     } else if (m_videoInlineLayer && m_videoLayer) {
1130         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1131         [m_videoLayer removeFromSuperlayer];
1132         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1133         newContext = [m_videoInlineLayer context];
1134     } else if (m_videoLayer)
1135         [m_videoLayer removeFromSuperlayer];
1136
1137     if (oldContext && newContext && oldContext != newContext) {
1138         mach_port_t fencePort = [oldContext createFencePort];
1139         [newContext setFencePort:fencePort];
1140         mach_port_deallocate(mach_task_self(), fencePort);
1141     }
1142     [CATransaction commit];
1143
1144     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1145         syncTextTrackBounds();
1146         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1147     }
1148
1149     updateDisableExternalPlayback();
1150 }
1151
1152 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1153 {
1154     m_videoFullscreenFrame = frame;
1155     if (!m_videoFullscreenLayer)
1156         return;
1157
1158     if (m_videoLayer) {
1159         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1160     }
1161     syncTextTrackBounds();
1162 }
1163
1164 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1165 {
1166     m_videoFullscreenGravity = gravity;
1167     if (!m_videoLayer)
1168         return;
1169
1170     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1171     if (gravity == MediaPlayer::VideoGravityResize)
1172         videoGravity = AVLayerVideoGravityResize;
1173     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1174         videoGravity = AVLayerVideoGravityResizeAspect;
1175     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1176         videoGravity = AVLayerVideoGravityResizeAspectFill;
1177     else
1178         ASSERT_NOT_REACHED();
1179     
1180     if ([m_videoLayer videoGravity] == videoGravity)
1181         return;
1182
1183     [m_videoLayer setVideoGravity:videoGravity];
1184     syncTextTrackBounds();
1185 }
1186
1187 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1188 {
1189     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1190         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1191 }
1192
1193 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1194 {
1195     if (m_currentMetaData)
1196         return m_currentMetaData.get();
1197     return nil;
1198 }
1199
1200 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1201 {
1202     if (!m_avPlayerItem)
1203         return emptyString();
1204     
1205     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1206     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1207
1208     return logString.get();
1209 }
1210
1211 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1212 {
1213     if (!m_avPlayerItem)
1214         return emptyString();
1215
1216     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1217     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1218
1219     return logString.get();
1220 }
1221 #endif
1222
1223 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1224 {
1225     [CATransaction begin];
1226     [CATransaction setDisableActions:YES];    
1227     if (m_videoLayer)
1228         [m_videoLayer.get() setHidden:!isVisible];
1229     [CATransaction commit];
1230 }
1231     
1232 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1233 {
1234     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1235     if (!metaDataAvailable())
1236         return;
1237
1238     setDelayCallbacks(true);
1239     m_cachedRate = requestedRate();
1240     [m_avPlayer.get() setRate:requestedRate()];
1241     setDelayCallbacks(false);
1242 }
1243
1244 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1245 {
1246     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1247     if (!metaDataAvailable())
1248         return;
1249
1250     setDelayCallbacks(true);
1251     m_cachedRate = 0;
1252     [m_avPlayer.get() setRate:0];
1253     setDelayCallbacks(false);
1254 }
1255
1256 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1257 {
1258     // Do not ask the asset for duration before it has been loaded or it will fetch the
1259     // answer synchronously.
1260     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1261         return MediaTime::invalidTime();
1262     
1263     CMTime cmDuration;
1264     
1265     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1266     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1267         cmDuration = [m_avPlayerItem.get() duration];
1268     else
1269         cmDuration = [m_avAsset.get() duration];
1270
1271     if (CMTIME_IS_NUMERIC(cmDuration))
1272         return toMediaTime(cmDuration);
1273
1274     if (CMTIME_IS_INDEFINITE(cmDuration))
1275         return MediaTime::positiveInfiniteTime();
1276
1277     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1278     return MediaTime::invalidTime();
1279 }
1280
1281 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1282 {
1283     if (!metaDataAvailable() || !m_avPlayerItem)
1284         return MediaTime::zeroTime();
1285
1286     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1287     if (CMTIME_IS_NUMERIC(itemTime))
1288         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1289
1290     return MediaTime::zeroTime();
1291 }
1292
1293 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1294 {
1295     // setCurrentTime generates several event callbacks, update afterwards.
1296     setDelayCallbacks(true);
1297
1298     if (m_metadataTrack)
1299         m_metadataTrack->flushPartialCues();
1300
1301     CMTime cmTime = toCMTime(time);
1302     CMTime cmBefore = toCMTime(negativeTolerance);
1303     CMTime cmAfter = toCMTime(positiveTolerance);
1304
1305     auto weakThis = createWeakPtr();
1306
1307     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1308
1309     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1310         callOnMainThread([weakThis, finished] {
1311             auto _this = weakThis.get();
1312             if (!_this)
1313                 return;
1314
1315             _this->seekCompleted(finished);
1316         });
1317     }];
1318
1319     setDelayCallbacks(false);
1320 }
1321
1322 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1323 {
1324 #if PLATFORM(IOS)
1325     UNUSED_PARAM(volume);
1326     return;
1327 #else
1328     if (!metaDataAvailable())
1329         return;
1330
1331     [m_avPlayer.get() setVolume:volume];
1332 #endif
1333 }
1334
1335 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1336 {
1337     UNUSED_PARAM(closedCaptionsVisible);
1338
1339     if (!metaDataAvailable())
1340         return;
1341
1342     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1343 }
1344
1345 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1346 {
1347     setDelayCallbacks(true);
1348     m_cachedRate = rate;
1349     [m_avPlayer.get() setRate:rate];
1350     setDelayCallbacks(false);
1351 }
1352
1353 double MediaPlayerPrivateAVFoundationObjC::rate() const
1354 {
1355     if (!metaDataAvailable())
1356         return 0;
1357
1358     return m_cachedRate;
1359 }
1360
1361 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1362 {
1363     if (m_avPlayerItem)
1364         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1365 }
1366
1367 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1368 {
1369     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1370
1371     if (!m_avPlayerItem)
1372         return timeRanges;
1373
1374     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1375         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1376         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1377             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1378     }
1379     return timeRanges;
1380 }
1381
1382 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1383 {
1384     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1385         return MediaTime::zeroTime();
1386
1387     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1388     bool hasValidRange = false;
1389     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1390         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1391         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1392             continue;
1393
1394         hasValidRange = true;
1395         MediaTime startOfRange = toMediaTime(timeRange.start);
1396         if (minTimeSeekable > startOfRange)
1397             minTimeSeekable = startOfRange;
1398     }
1399     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1400 }
1401
1402 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1403 {
1404     if (!m_cachedSeekableRanges)
1405         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1406
1407     MediaTime maxTimeSeekable;
1408     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1409         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1410         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1411             continue;
1412         
1413         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1414         if (maxTimeSeekable < endOfRange)
1415             maxTimeSeekable = endOfRange;
1416     }
1417     return maxTimeSeekable;
1418 }
1419
1420 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1421 {
1422     if (!m_cachedLoadedRanges)
1423         return MediaTime::zeroTime();
1424
1425     MediaTime maxTimeLoaded;
1426     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1427         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1428         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1429             continue;
1430         
1431         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1432         if (maxTimeLoaded < endOfRange)
1433             maxTimeLoaded = endOfRange;
1434     }
1435
1436     return maxTimeLoaded;   
1437 }
1438
1439 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1440 {
1441     if (!metaDataAvailable())
1442         return 0;
1443
1444     if (m_cachedTotalBytes)
1445         return m_cachedTotalBytes;
1446
1447     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1448         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1449
1450     return m_cachedTotalBytes;
1451 }
1452
1453 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1454 {
1455     m_avAsset = asset;
1456 }
1457
1458 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1459 {
1460     if (!m_avAsset)
1461         return MediaPlayerAVAssetStatusDoesNotExist;
1462
1463     for (NSString *keyName in assetMetadataKeyNames()) {
1464         NSError *error = nil;
1465         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1466 #if !LOG_DISABLED
1467         if (error)
1468             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1469 #endif
1470
1471         if (keyStatus < AVKeyValueStatusLoaded)
1472             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1473         
1474         if (keyStatus == AVKeyValueStatusFailed)
1475             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1476
1477         if (keyStatus == AVKeyValueStatusCancelled)
1478             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1479     }
1480
1481     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1482         return MediaPlayerAVAssetStatusPlayable;
1483
1484     return MediaPlayerAVAssetStatusLoaded;
1485 }
1486
1487 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1488 {
1489     if (!m_avAsset)
1490         return 0;
1491
1492     NSError *error = nil;
1493     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1494     return [error code];
1495 }
1496
1497 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1498 {
1499     if (!metaDataAvailable() || context->paintingDisabled())
1500         return;
1501
1502     setDelayCallbacks(true);
1503     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1504
1505 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1506     if (videoOutputHasAvailableFrame())
1507         paintWithVideoOutput(context, rect);
1508     else
1509 #endif
1510         paintWithImageGenerator(context, rect);
1511
1512     END_BLOCK_OBJC_EXCEPTIONS;
1513     setDelayCallbacks(false);
1514
1515     m_videoFrameHasDrawn = true;
1516 }
1517
1518 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1519 {
1520     if (!metaDataAvailable() || context->paintingDisabled())
1521         return;
1522
1523     // We can ignore the request if we are already rendering to a layer.
1524     if (currentRenderingMode() == MediaRenderingToLayer)
1525         return;
1526
1527     // paint() is best effort, so only paint if we already have an image generator or video output available.
1528     if (!hasContextRenderer())
1529         return;
1530
1531     paintCurrentFrameInContext(context, rect);
1532 }
1533
1534 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1535 {
1536     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1537
1538     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1539     if (image) {
1540         GraphicsContextStateSaver stateSaver(*context);
1541         context->translate(rect.x(), rect.y() + rect.height());
1542         context->scale(FloatSize(1.0f, -1.0f));
1543         context->setImageInterpolationQuality(InterpolationLow);
1544         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1545         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1546         image = 0;
1547     }
1548 }
1549
1550 static const HashSet<String>& avfMIMETypes()
1551 {
1552     static NeverDestroyed<HashSet<String>> cache = [] () {
1553         HashSet<String> types;
1554
1555         NSArray *nsTypes = [AVURLAsset audiovisualMIMETypes];
1556         for (NSString *mimeType in nsTypes)
1557             types.add([mimeType lowercaseString]);
1558
1559         return types;
1560     }();
1561
1562     
1563     return cache;
1564 }
1565
1566 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1567 {
1568     if (!m_imageGenerator)
1569         createImageGenerator();
1570     ASSERT(m_imageGenerator);
1571
1572 #if !LOG_DISABLED
1573     double start = monotonicallyIncreasingTime();
1574 #endif
1575
1576     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1577     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1578     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1579
1580 #if !LOG_DISABLED
1581     double duration = monotonicallyIncreasingTime() - start;
1582     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1583 #endif
1584
1585     return image;
1586 }
1587
1588 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1589 {
1590     supportedTypes = avfMIMETypes();
1591
1592
1593 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1594 static bool keySystemIsSupported(const String& keySystem)
1595 {
1596     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1597         return true;
1598     return false;
1599 }
1600 #endif
1601
1602 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1603 {
1604 #if ENABLE(ENCRYPTED_MEDIA)
1605     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1606     // In addition to the steps in the current specification, this method must run the following steps:
1607
1608     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1609     //    If keySystem is null, continue to the next step.
1610     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1611         // "Clear Key" is only supported with HLS:
1612         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1613             return MediaPlayer::IsNotSupported;
1614
1615         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1616         if (!keySystemIsSupported(parameters.keySystem))
1617             return MediaPlayer::IsNotSupported;
1618
1619         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1620         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1621     }
1622
1623     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1624 #endif
1625
1626 #if ENABLE(MEDIA_SOURCE)
1627     if (parameters.isMediaSource)
1628         return MediaPlayer::IsNotSupported;
1629 #endif
1630 #if ENABLE(MEDIA_STREAM)
1631     if (parameters.isMediaStream)
1632         return MediaPlayer::IsNotSupported;
1633 #endif
1634     if (isUnsupportedMIMEType(parameters.type))
1635         return MediaPlayer::IsNotSupported;
1636
1637     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1638         return MediaPlayer::IsNotSupported;
1639
1640     // The spec says:
1641     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1642     if (parameters.codecs.isEmpty())
1643         return MediaPlayer::MayBeSupported;
1644
1645     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1646     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1647 }
1648
1649 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1650 {
1651 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1652     if (!keySystem.isEmpty()) {
1653         // "Clear Key" is only supported with HLS:
1654         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1655             return MediaPlayer::IsNotSupported;
1656
1657         if (!keySystemIsSupported(keySystem))
1658             return false;
1659
1660         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1661             return false;
1662
1663         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1664             return false;
1665
1666         return true;
1667     }
1668 #else
1669     UNUSED_PARAM(keySystem);
1670     UNUSED_PARAM(mimeType);
1671 #endif
1672     return false;
1673 }
1674
1675 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1676 #if ENABLE(ENCRYPTED_MEDIA_V2)
1677 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1678 {
1679     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1680         [infoRequest setContentLength:keyData->byteLength()];
1681         [infoRequest setByteRangeAccessSupported:YES];
1682     }
1683
1684     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1685         long long start = [dataRequest currentOffset];
1686         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1687
1688         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1689             [request finishLoadingWithError:nil];
1690             return;
1691         }
1692
1693         ASSERT(start <= std::numeric_limits<int>::max());
1694         ASSERT(end <= std::numeric_limits<int>::max());
1695         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1696         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1697         [dataRequest respondWithData:nsData.get()];
1698     }
1699
1700     [request finishLoading];
1701 }
1702 #endif
1703
1704 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1705 {
1706     String scheme = [[[avRequest request] URL] scheme];
1707     String keyURI = [[[avRequest request] URL] absoluteString];
1708
1709 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1710     if (scheme == "skd") {
1711         // Create an initData with the following layout:
1712         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1713         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1714         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1715         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1716         initDataView->set<uint32_t>(0, keyURISize, true);
1717
1718         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1719         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1720
1721 #if ENABLE(ENCRYPTED_MEDIA)
1722         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1723 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1724         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1725         if (!player()->keyNeeded(initData.get()))
1726 #endif
1727             return false;
1728
1729         m_keyURIToRequestMap.set(keyURI, avRequest);
1730         return true;
1731 #if ENABLE(ENCRYPTED_MEDIA_V2)
1732     } else if (scheme == "clearkey") {
1733         String keyID = [[[avRequest request] URL] resourceSpecifier];
1734         StringView keyIDView(keyID);
1735         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1736
1737         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1738         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1739
1740         auto keyData = player()->cachedKeyForKeyId(keyID);
1741         if (keyData) {
1742             fulfillRequestWithKeyData(avRequest, keyData.get());
1743             return false;
1744         }
1745
1746         if (!player()->keyNeeded(initData.get()))
1747             return false;
1748
1749         m_keyURIToRequestMap.set(keyID, avRequest);
1750         return true;
1751 #endif
1752     }
1753 #endif
1754
1755     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1756     m_resourceLoaderMap.add(avRequest, resourceLoader);
1757     resourceLoader->startLoading();
1758     return true;
1759 }
1760
1761 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1762 {
1763 #if USE(CFNETWORK)
1764     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1765     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1766     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1767 #else
1768     AuthenticationChallenge challenge(nsChallenge);
1769 #endif
1770
1771     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1772 }
1773
1774 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1775 {
1776     String scheme = [[[avRequest request] URL] scheme];
1777
1778     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1779
1780     if (resourceLoader)
1781         resourceLoader->stopLoading();
1782 }
1783
1784 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1785 {
1786     m_resourceLoaderMap.remove(avRequest);
1787 }
1788 #endif
1789
1790 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1791 {
1792     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1793 }
1794
1795 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1796 {
1797     if (!metaDataAvailable())
1798         return timeValue;
1799
1800     // FIXME - impossible to implement until rdar://8721510 is fixed.
1801     return timeValue;
1802 }
1803
1804 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1805 {
1806 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1807     return 0;
1808 #else
1809     return 5;
1810 #endif
1811 }
1812
1813 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1814 {
1815     if (!m_videoLayer)
1816         return;
1817
1818 #if PLATFORM(IOS)
1819     // Do not attempt to change the video gravity while in full screen mode.
1820     // See setVideoFullscreenGravity().
1821     if (m_videoFullscreenLayer)
1822         return;
1823 #endif
1824
1825     [CATransaction begin];
1826     [CATransaction setDisableActions:YES];    
1827     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1828     [m_videoLayer.get() setVideoGravity:gravity];
1829     [CATransaction commit];
1830 }
1831
1832 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1833 {
1834     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1835         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1836     }];
1837     if (index == NSNotFound)
1838         return nil;
1839     return [tracks objectAtIndex:index];
1840 }
1841
1842 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1843 {
1844     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1845     m_languageOfPrimaryAudioTrack = String();
1846
1847     if (!m_avAsset)
1848         return;
1849
1850     setDelayCharacteristicsChangedNotification(true);
1851
1852     bool haveCCTrack = false;
1853     bool hasCaptions = false;
1854
1855     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1856     // asked about those fairly fequently.
1857     if (!m_avPlayerItem) {
1858         // We don't have a player item yet, so check with the asset because some assets support inspection
1859         // prior to becoming ready to play.
1860         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1861         setHasVideo(firstEnabledVideoTrack);
1862         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1863 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1864         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1865 #endif
1866
1867         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1868     } else {
1869         bool hasVideo = false;
1870         bool hasAudio = false;
1871         bool hasMetaData = false;
1872         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1873             if ([track isEnabled]) {
1874                 AVAssetTrack *assetTrack = [track assetTrack];
1875                 NSString *mediaType = [assetTrack mediaType];
1876                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1877                     hasVideo = true;
1878                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1879                     hasAudio = true;
1880                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1881 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1882                     hasCaptions = true;
1883 #endif
1884                     haveCCTrack = true;
1885                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1886                     hasMetaData = true;
1887                 }
1888             }
1889         }
1890
1891 #if ENABLE(VIDEO_TRACK)
1892         updateAudioTracks();
1893         updateVideoTracks();
1894
1895 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1896         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1897         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1898 #endif
1899 #endif
1900
1901         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1902         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1903         // when it is not.
1904         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1905
1906         setHasAudio(hasAudio);
1907 #if ENABLE(DATACUE_VALUE)
1908         if (hasMetaData)
1909             processMetadataTrack();
1910 #endif
1911     }
1912
1913 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1914     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1915     if (legibleGroup && m_cachedTracks) {
1916         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1917         if (hasCaptions)
1918             processMediaSelectionOptions();
1919     }
1920 #endif
1921
1922 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1923     if (!hasCaptions && haveCCTrack)
1924         processLegacyClosedCaptionsTracks();
1925 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1926     if (haveCCTrack)
1927         processLegacyClosedCaptionsTracks();
1928 #endif
1929
1930     setHasClosedCaptions(hasCaptions);
1931
1932     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1933         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1934
1935     sizeChanged();
1936
1937     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1938         characteristicsChanged();
1939
1940 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1941     if (m_provider)
1942         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1943 #endif
1944
1945     setDelayCharacteristicsChangedNotification(false);
1946 }
1947
1948 #if ENABLE(VIDEO_TRACK)
1949 template <typename RefT, typename PassRefT>
1950 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1951 {
1952     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1953         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1954     }]]]);
1955     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1956
1957     for (auto& oldItem : oldItems) {
1958         if (oldItem->playerItemTrack())
1959             [oldTracks addObject:oldItem->playerItemTrack()];
1960     }
1961
1962     // Find the added & removed AVPlayerItemTracks:
1963     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1964     [removedTracks minusSet:newTracks.get()];
1965
1966     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1967     [addedTracks minusSet:oldTracks.get()];
1968
1969     typedef Vector<RefT> ItemVector;
1970     ItemVector replacementItems;
1971     ItemVector addedItems;
1972     ItemVector removedItems;
1973     for (auto& oldItem : oldItems) {
1974         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1975             removedItems.append(oldItem);
1976         else
1977             replacementItems.append(oldItem);
1978     }
1979
1980     for (AVPlayerItemTrack* track in addedTracks.get())
1981         addedItems.append(itemFactory(track));
1982
1983     replacementItems.appendVector(addedItems);
1984     oldItems.swap(replacementItems);
1985
1986     for (auto& removedItem : removedItems)
1987         (player->*removedFunction)(removedItem);
1988
1989     for (auto& addedItem : addedItems)
1990         (player->*addedFunction)(addedItem);
1991 }
1992
1993 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1994 template <typename RefT, typename PassRefT>
1995 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1996 {
1997     group->updateOptions(characteristics);
1998
1999     // Only add selection options which do not have an associated persistant track.
2000     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2001     for (auto& option : group->options()) {
2002         if (!option)
2003             continue;
2004         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2005         if (!avOption)
2006             continue;
2007         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
2008             newSelectionOptions.add(option);
2009     }
2010
2011     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2012     for (auto& oldItem : oldItems) {
2013         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2014             oldSelectionOptions.add(option);
2015     }
2016
2017     // Find the added & removed AVMediaSelectionOptions:
2018     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2019     for (auto& oldOption : oldSelectionOptions) {
2020         if (!newSelectionOptions.contains(oldOption))
2021             removedSelectionOptions.add(oldOption);
2022     }
2023
2024     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2025     for (auto& newOption : newSelectionOptions) {
2026         if (!oldSelectionOptions.contains(newOption))
2027             addedSelectionOptions.add(newOption);
2028     }
2029
2030     typedef Vector<RefT> ItemVector;
2031     ItemVector replacementItems;
2032     ItemVector addedItems;
2033     ItemVector removedItems;
2034     for (auto& oldItem : oldItems) {
2035         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2036             removedItems.append(oldItem);
2037         else
2038             replacementItems.append(oldItem);
2039     }
2040
2041     for (auto& option : addedSelectionOptions)
2042         addedItems.append(itemFactory(*option.get()));
2043
2044     replacementItems.appendVector(addedItems);
2045     oldItems.swap(replacementItems);
2046     
2047     for (auto& removedItem : removedItems)
2048         (player->*removedFunction)(removedItem);
2049     
2050     for (auto& addedItem : addedItems)
2051         (player->*addedFunction)(addedItem);
2052 }
2053 #endif
2054
2055 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2056 {
2057 #if !LOG_DISABLED
2058     size_t count = m_audioTracks.size();
2059 #endif
2060
2061     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2062
2063 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2064     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2065     if (!m_audibleGroup) {
2066         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2067             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2068     }
2069
2070     if (m_audibleGroup)
2071         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2072 #endif
2073
2074     for (auto& track : m_audioTracks)
2075         track->resetPropertiesFromTrack();
2076
2077 #if !LOG_DISABLED
2078     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2079 #endif
2080 }
2081
2082 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2083 {
2084 #if !LOG_DISABLED
2085     size_t count = m_videoTracks.size();
2086 #endif
2087
2088     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2089
2090 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2091     if (!m_visualGroup) {
2092         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2093             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2094     }
2095
2096     if (m_visualGroup)
2097         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2098 #endif
2099
2100     for (auto& track : m_audioTracks)
2101         track->resetPropertiesFromTrack();
2102
2103 #if !LOG_DISABLED
2104     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2105 #endif
2106 }
2107
2108 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2109 {
2110 #if PLATFORM(IOS)
2111     if (m_videoFullscreenLayer)
2112         return true;
2113 #endif
2114     return false;
2115 }
2116
2117 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2118 {
2119 #if PLATFORM(IOS)
2120     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2121         return;
2122     
2123     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2124     [m_textTrackRepresentationLayer setFrame:textFrame];
2125 #endif
2126 }
2127
2128 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2129 {
2130 #if PLATFORM(IOS)
2131     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2132     if (representationLayer == m_textTrackRepresentationLayer) {
2133         syncTextTrackBounds();
2134         return;
2135     }
2136
2137     if (m_textTrackRepresentationLayer)
2138         [m_textTrackRepresentationLayer removeFromSuperlayer];
2139
2140     m_textTrackRepresentationLayer = representationLayer;
2141
2142     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2143         syncTextTrackBounds();
2144         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2145     }
2146
2147 #else
2148     UNUSED_PARAM(representation);
2149 #endif
2150 }
2151 #endif // ENABLE(VIDEO_TRACK)
2152
2153 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2154 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2155 {
2156     if (!m_provider) {
2157         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2158         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2159     }
2160
2161     return m_provider.get();
2162 }
2163 #endif
2164
2165 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2166 {
2167     if (!m_avAsset)
2168         return;
2169
2170     setNaturalSize(m_cachedPresentationSize);
2171 }
2172     
2173 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2174 {
2175     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2176         return false;
2177     
2178     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2179     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2180     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2181 }
2182
2183 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2184 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2185 {
2186     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2187
2188     if (!m_avPlayerItem || m_videoOutput)
2189         return;
2190
2191 #if USE(VIDEOTOOLBOX) && (!defined(__MAC_OS_X_VERSION_MIN_REQUIRED) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 101000)
2192     NSDictionary* attributes = nil;
2193 #else
2194     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2195                                 nil];
2196 #endif
2197     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2198     ASSERT(m_videoOutput);
2199
2200     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2201
2202     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2203
2204 #if defined(__MAC_OS_X_VERSION_MIN_REQUIRED) && __MAC_OS_X_VERSION_MIN_REQUIRED < 101000
2205     waitForVideoOutputMediaDataWillChange();
2206 #endif
2207
2208     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2209 }
2210
2211 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2212 {
2213     if (!m_videoOutput)
2214         return;
2215
2216     if (m_avPlayerItem)
2217         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2218     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2219
2220     m_videoOutput = 0;
2221 }
2222
2223 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2224 {
2225     if (!m_videoOutput)
2226         createVideoOutput();
2227     ASSERT(m_videoOutput);
2228
2229 #if !LOG_DISABLED
2230     double start = monotonicallyIncreasingTime();
2231 #endif
2232
2233     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2234
2235     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2236         return 0;
2237
2238     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2239     if (!buffer)
2240         return 0;
2241
2242 #if USE(VIDEOTOOLBOX)
2243     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2244     if (!m_pixelTransferSession) {
2245         VTPixelTransferSessionRef session = 0;
2246         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2247         m_pixelTransferSession = adoptCF(session);
2248     }
2249
2250     CVPixelBufferRef outputBuffer;
2251     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2252     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2253     buffer = adoptCF(outputBuffer);
2254 #endif
2255
2256 #if !LOG_DISABLED
2257     double duration = monotonicallyIncreasingTime() - start;
2258     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2259 #endif
2260
2261     return buffer;
2262 }
2263
2264 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2265 {
2266     if (!m_avPlayerItem)
2267         return false;
2268
2269     if (m_lastImage)
2270         return true;
2271
2272     if (!m_videoOutput)
2273         createVideoOutput();
2274
2275     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2276 }
2277
2278 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2279 {
2280     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2281     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2282     return CVPixelBufferGetBaseAddress(pixelBuffer);
2283 }
2284
2285 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2286 {
2287     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2288     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2289 }
2290
2291 static void CVPixelBufferReleaseInfoCallback(void* info)
2292 {
2293     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2294     CFRelease(pixelBuffer);
2295 }
2296
2297 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2298 {
2299     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2300     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2301
2302     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2303     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2304     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2305     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2306     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2307
2308     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2309     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2310     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2311
2312     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2313 }
2314
2315 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2316 {
2317     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2318
2319     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2320     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2321     // should be displayed.
2322     if (pixelBuffer)
2323         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2324 }
2325
2326 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2327 {
2328 #if (!defined(__MAC_OS_X_VERSION_MIN_REQUIRED) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 101000)
2329     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2330         waitForVideoOutputMediaDataWillChange();
2331 #endif
2332
2333     updateLastImage();
2334
2335     if (!m_lastImage)
2336         return;
2337
2338     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2339     if (!firstEnabledVideoTrack)
2340         return;
2341
2342     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2343
2344     GraphicsContextStateSaver stateSaver(*context);
2345     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2346     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2347     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2348
2349     context->concatCTM(videoTransform);
2350     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2351
2352     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2353     // video frame, destroy it now that it is no longer needed.
2354     if (m_imageGenerator)
2355         destroyImageGenerator();
2356
2357 }
2358
2359 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2360 {
2361     updateLastImage();
2362     return m_lastImage.get();
2363 }
2364
2365 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2366 {
2367     if (!m_videoOutputSemaphore)
2368         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2369
2370     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2371
2372     // Wait for 1 second.
2373     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2374
2375     if (result)
2376         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2377 }
2378
2379 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2380 {
2381     dispatch_semaphore_signal(m_videoOutputSemaphore);
2382 }
2383 #endif
2384
2385 #if ENABLE(ENCRYPTED_MEDIA)
2386 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2387 {
2388     if (!keySystemIsSupported(keySystem))
2389         return MediaPlayer::KeySystemNotSupported;
2390
2391     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2392     String keyURI;
2393     String keyID;
2394     RefPtr<Uint8Array> certificate;
2395     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2396         return MediaPlayer::InvalidPlayerState;
2397
2398     if (!m_keyURIToRequestMap.contains(keyURI))
2399         return MediaPlayer::InvalidPlayerState;
2400
2401     String sessionID = createCanonicalUUIDString();
2402
2403     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2404
2405     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2406     NSString* assetStr = keyID;
2407     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2408     NSError* error = 0;
2409     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2410
2411     if (!keyRequest) {
2412         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2413         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2414         return MediaPlayer::NoError;
2415     }
2416
2417     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2418     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2419     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2420
2421     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2422     m_sessionIDToRequestMap.set(sessionID, avRequest);
2423     m_keyURIToRequestMap.remove(keyURI);
2424
2425     return MediaPlayer::NoError;
2426 }
2427
2428 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2429 {
2430     if (!keySystemIsSupported(keySystem))
2431         return MediaPlayer::KeySystemNotSupported;
2432
2433     if (!m_sessionIDToRequestMap.contains(sessionID))
2434         return MediaPlayer::InvalidPlayerState;
2435
2436     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2437     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2438     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2439     [avRequest.get() finishLoading];
2440     m_sessionIDToRequestMap.remove(sessionID);
2441
2442     player()->keyAdded(keySystem, sessionID);
2443
2444     UNUSED_PARAM(initDataPtr);
2445     UNUSED_PARAM(initDataLength);
2446     return MediaPlayer::NoError;
2447 }
2448
2449 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2450 {
2451     if (!keySystemIsSupported(keySystem))
2452         return MediaPlayer::KeySystemNotSupported;
2453
2454     if (!m_sessionIDToRequestMap.contains(sessionID))
2455         return MediaPlayer::InvalidPlayerState;
2456
2457     m_sessionIDToRequestMap.remove(sessionID);
2458     return MediaPlayer::NoError;
2459 }
2460 #endif
2461
2462 #if ENABLE(ENCRYPTED_MEDIA_V2)
2463 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2464 {
2465     return m_keyURIToRequestMap.take(keyURI);
2466 }
2467
2468 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2469 {
2470     Vector<String> fulfilledKeyIds;
2471
2472     for (auto& pair : m_keyURIToRequestMap) {
2473         const String& keyId = pair.key;
2474         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2475
2476         auto keyData = player()->cachedKeyForKeyId(keyId);
2477         if (!keyData)
2478             continue;
2479
2480         fulfillRequestWithKeyData(request.get(), keyData.get());
2481         fulfilledKeyIds.append(keyId);
2482     }
2483
2484     for (auto& keyId : fulfilledKeyIds)
2485         m_keyURIToRequestMap.remove(keyId);
2486 }
2487
2488 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2489 {
2490     if (!keySystemIsSupported(keySystem))
2491         return nullptr;
2492
2493     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2494 }
2495 #endif
2496
2497 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2498 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2499 {
2500 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2501     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2502 #endif
2503
2504     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2505     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2506
2507         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2508         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2509             continue;
2510
2511         bool newCCTrack = true;
2512         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2513             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2514                 continue;
2515
2516             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2517             if (track->avPlayerItemTrack() == playerItemTrack) {
2518                 removedTextTracks.remove(i - 1);
2519                 newCCTrack = false;
2520                 break;
2521             }
2522         }
2523
2524         if (!newCCTrack)
2525             continue;
2526         
2527         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2528     }
2529
2530     processNewAndRemovedTextTracks(removedTextTracks);
2531 }
2532 #endif
2533
2534 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2535 {
2536     if (!m_avAsset)
2537         return nil;
2538
2539     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2540         return nil;
2541
2542     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2543 }
2544
2545 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2546 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2547 {
2548     if (!m_avAsset)
2549         return false;
2550
2551     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2552         return false;
2553
2554     return true;
2555 }
2556
2557 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2558 {
2559     if (!hasLoadedMediaSelectionGroups())
2560         return nil;
2561
2562     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2563 }
2564
2565 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2566 {
2567     if (!hasLoadedMediaSelectionGroups())
2568         return nil;
2569
2570     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2571 }
2572
2573 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2574 {
2575     if (!hasLoadedMediaSelectionGroups())
2576         return nil;
2577
2578     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2579 }
2580
2581 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2582 {
2583     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2584     if (!legibleGroup) {
2585         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2586         return;
2587     }
2588
2589     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2590     // but set the selected legible track to nil so text tracks will not be automatically configured.
2591     if (!m_textTracks.size())
2592         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2593
2594     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2595     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2596     for (AVMediaSelectionOptionType *option in legibleOptions) {
2597         bool newTrack = true;
2598         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2599             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2600                 continue;
2601             
2602             RetainPtr<AVMediaSelectionOptionType> currentOption;
2603 #if ENABLE(AVF_CAPTIONS)
2604             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2605                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2606                 currentOption = track->mediaSelectionOption();
2607             } else
2608 #endif
2609             {
2610                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2611                 currentOption = track->mediaSelectionOption();
2612             }
2613             
2614             if ([currentOption.get() isEqual:option]) {
2615                 removedTextTracks.remove(i - 1);
2616                 newTrack = false;
2617                 break;
2618             }
2619         }
2620         if (!newTrack)
2621             continue;
2622
2623 #if ENABLE(AVF_CAPTIONS)
2624         if ([option outOfBandSource]) {
2625             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2626             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2627             continue;
2628         }
2629 #endif
2630
2631         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2632     }
2633
2634     processNewAndRemovedTextTracks(removedTextTracks);
2635 }
2636
2637 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2638 {
2639     if (m_metadataTrack)
2640         return;
2641
2642     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2643     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2644     player()->addTextTrack(m_metadataTrack);
2645 }
2646
2647 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2648 {
2649     if (!m_currentTextTrack)
2650         return;
2651
2652     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2653 }
2654
2655 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2656 {
2657     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2658
2659     if (!m_currentTextTrack)
2660         return;
2661     
2662     m_currentTextTrack->resetCueValues();
2663 }
2664 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2665
2666 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2667 {
2668     if (m_currentTextTrack == track)
2669         return;
2670
2671     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2672         
2673     m_currentTextTrack = track;
2674
2675     if (track) {
2676         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2677             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2678 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2679 #if ENABLE(AVF_CAPTIONS)
2680         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2681             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2682 #endif
2683         else
2684             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2685 #endif
2686     } else {
2687 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2688         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2689 #endif
2690         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2691     }
2692
2693 }
2694
2695 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2696 {
2697     if (!m_languageOfPrimaryAudioTrack.isNull())
2698         return m_languageOfPrimaryAudioTrack;
2699
2700     if (!m_avPlayerItem.get())
2701         return emptyString();
2702
2703 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2704     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2705     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2706     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2707     if (currentlySelectedAudibleOption) {
2708         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2709         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2710
2711         return m_languageOfPrimaryAudioTrack;
2712     }
2713 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2714
2715     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2716     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2717     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2718     if (!tracks || [tracks count] != 1) {
2719         m_languageOfPrimaryAudioTrack = emptyString();
2720         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2721         return m_languageOfPrimaryAudioTrack;
2722     }
2723
2724     AVAssetTrack *track = [tracks objectAtIndex:0];
2725     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2726
2727 #if !LOG_DISABLED
2728     if (m_languageOfPrimaryAudioTrack == emptyString())
2729         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2730     else
2731         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2732 #endif
2733
2734     return m_languageOfPrimaryAudioTrack;
2735 }
2736
2737 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2738 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2739 {
2740     if (!m_avPlayer)
2741         return false;
2742
2743     bool wirelessTarget = m_avPlayer.get().externalPlaybackActive;
2744     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2745
2746     return wirelessTarget;
2747 }
2748
2749 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2750 {
2751     if (!m_avPlayer)
2752         return MediaPlayer::TargetTypeNone;
2753
2754 #if PLATFORM(IOS)
2755     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2756     case wkExternalPlaybackTypeNone:
2757         return MediaPlayer::TargetTypeNone;
2758     case wkExternalPlaybackTypeAirPlay:
2759         return MediaPlayer::TargetTypeAirPlay;
2760     case wkExternalPlaybackTypeTVOut:
2761         return MediaPlayer::TargetTypeTVOut;
2762     }
2763
2764     ASSERT_NOT_REACHED();
2765     return MediaPlayer::TargetTypeNone;
2766
2767 #else
2768     return MediaPlayer::TargetTypeAirPlay;
2769 #endif
2770 }
2771
2772 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2773 {
2774     if (!m_avPlayer)
2775         return emptyString();
2776
2777     String wirelessTargetName;
2778 #if !PLATFORM(IOS)
2779     if (m_outputContext)
2780         wirelessTargetName = m_outputContext.get().deviceName;
2781 #else
2782     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2783 #endif
2784     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2785
2786     return wirelessTargetName;
2787 }
2788
2789 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2790 {
2791     if (!m_avPlayer)
2792         return !m_allowsWirelessVideoPlayback;
2793
2794     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2795     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2796
2797     return !m_allowsWirelessVideoPlayback;
2798 }
2799
2800 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2801 {
2802     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2803     m_allowsWirelessVideoPlayback = !disabled;
2804     if (!m_avPlayer)
2805         return;
2806
2807     setDelayCallbacks(true);
2808     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2809     setDelayCallbacks(false);
2810 }
2811
2812 #if !PLATFORM(IOS)
2813 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2814 {
2815     MediaPlaybackTargetMac* macTarget = toMediaPlaybackTargetMac(&target.get());
2816
2817     m_outputContext = macTarget->outputContext();
2818     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), [m_outputContext.get().deviceName UTF8String]);
2819
2820     if (!m_outputContext || !m_outputContext.get().deviceName)
2821         setShouldPlayToPlaybackTarget(false);
2822 }
2823
2824 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2825 {
2826     m_shouldPlayToPlaybackTarget = shouldPlay;
2827
2828     AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2829     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2830
2831     if (!m_avPlayer)
2832         return;
2833
2834     RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2835     if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2836         return;
2837
2838     setDelayCallbacks(true);
2839     m_avPlayer.get().outputContext = newContext;
2840     setDelayCallbacks(false);
2841 }
2842 #endif // !PLATFORM(IOS)
2843
2844 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2845 {
2846     if (!m_avPlayer)
2847         return;
2848
2849 #if PLATFORM(IOS)
2850     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2851 #endif
2852 }
2853 #endif
2854
2855 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2856 {
2857     m_cachedItemStatus = status;
2858
2859     updateStates();
2860 }
2861
2862 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2863 {
2864     m_pendingStatusChanges++;
2865 }
2866
2867 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2868 {
2869     m_cachedLikelyToKeepUp = likelyToKeepUp;
2870
2871     ASSERT(m_pendingStatusChanges);
2872     if (!--m_pendingStatusChanges)
2873         updateStates();
2874 }
2875
2876 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2877 {
2878     m_pendingStatusChanges++;
2879 }
2880
2881 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2882 {
2883     m_cachedBufferEmpty = bufferEmpty;
2884
2885     ASSERT(m_pendingStatusChanges);
2886     if (!--m_pendingStatusChanges)
2887         updateStates();
2888 }
2889
2890 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2891 {
2892     m_pendingStatusChanges++;
2893 }
2894
2895 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2896 {
2897     m_cachedBufferFull = bufferFull;
2898
2899     ASSERT(m_pendingStatusChanges);
2900     if (!--m_pendingStatusChanges)
2901         updateStates();
2902 }
2903
2904 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2905 {
2906     m_cachedSeekableRanges = seekableRanges;
2907
2908     seekableTimeRangesChanged();
2909     updateStates();
2910 }
2911
2912 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2913 {
2914     m_cachedLoadedRanges = loadedRanges;
2915
2916     loadedTimeRangesChanged();
2917     updateStates();
2918 }
2919
2920 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2921 {
2922     m_cachedIsReadyForDisplay = isReady;
2923     if (!hasVideo() && isReady)
2924         tracksChanged();
2925     updateStates();
2926 }
2927
2928 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2929 {
2930     tracksChanged();
2931     updateStates();
2932 }
2933
2934 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2935 {
2936     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2937     if (m_shouldBufferData == shouldBuffer)
2938         return;
2939
2940     m_shouldBufferData = shouldBuffer;
2941     
2942     if (!m_avPlayer)
2943         return;
2944
2945     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2946 }
2947
2948 #if ENABLE(DATACUE_VALUE)
2949 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2950 {
2951     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2952     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2953     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2954     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2955     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2956
2957     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2958         return quickTimeUserData;
2959     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2960         return isoUserData;
2961     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2962         return quickTimeMetadata;
2963     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2964         return iTunesMetadata;
2965     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2966         return id3Metadata;
2967
2968     return emptyAtom;
2969 }
2970 #endif
2971
2972 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2973 {
2974     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2975
2976     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2977
2978 #if ENABLE(DATACUE_VALUE)
2979     if (seeking())
2980         return;
2981
2982     if (!m_metadataTrack)
2983         processMetadataTrack();
2984
2985     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2986         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2987         return;
2988     }
2989
2990     // Set the duration of all incomplete cues before adding new ones.
2991     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2992     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2993         MediaTime start = toMediaTime(item.time);
2994         if (start < earliestStartTime)
2995             earliestStartTime = start;
2996     }
2997     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2998
2999     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3000         MediaTime start = toMediaTime(item.time);
3001         MediaTime end = MediaTime::positiveInfiniteTime();
3002         if (CMTIME_IS_VALID(item.duration))
3003             end = start + toMediaTime(item.duration);
3004
3005         AtomicString type = nullAtom;
3006         if (item.keySpace)
3007             type = metadataType(item.keySpace);
3008
3009         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3010     }
3011 #endif
3012 }
3013
3014 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3015 {
3016     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3017         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3018
3019     NSArray *assetTracks = [m_avAsset tracks];
3020
3021     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3022         AVAssetTrack* assetTrack = [obj assetTrack];
3023
3024         if ([assetTracks containsObject:assetTrack])
3025             return YES;
3026
3027         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3028         if (!hasLoadedMediaSelectionGroups())
3029             return NO;
3030
3031         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3032             return NO;
3033
3034         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3035             return NO;
3036
3037         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3038             return NO;
3039
3040         return YES;
3041     }]];
3042
3043     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3044         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3045
3046     m_cachedTotalBytes = 0;
3047
3048     tracksChanged();
3049     updateStates();
3050 }
3051
3052 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3053 {
3054     m_cachedHasEnabledAudio = hasEnabledAudio;
3055
3056     tracksChanged();
3057     updateStates();
3058 }
3059
3060 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3061 {
3062     m_cachedPresentationSize = size;
3063
3064     sizeChanged();
3065     updateStates();
3066 }
3067
3068 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3069 {
3070     m_cachedDuration = duration;
3071
3072     invalidateCachedDuration();
3073 }
3074
3075 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3076 {
3077     m_cachedRate = rate;
3078
3079     updateStates();
3080     rateChanged();
3081 }
3082     
3083 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3084 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3085 {
3086     playbackTargetIsWirelessChanged();
3087 }
3088 #endif
3089
3090 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3091 {
3092     m_cachedCanPlayFastForward = newValue;
3093 }
3094
3095 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3096 {
3097     m_cachedCanPlayFastReverse = newValue;
3098 }
3099
3100 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3101 {
3102     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3103         return MediaPlayerPrivateAVFoundation::resolvedURL();
3104
3105     return URL([m_avAsset resolvedURL]);
3106 }
3107
3108 NSArray* assetMetadataKeyNames()
3109 {
3110     static NSArray* keys;
3111     if (!keys) {
3112         keys = [[NSArray alloc] initWithObjects:@"duration",
3113                     @"naturalSize",
3114                     @"preferredTransform",
3115                     @"preferredVolume",
3116                     @"preferredRate",
3117                     @"playable",
3118                     @"resolvedURL",
3119                     @"tracks",
3120                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3121                    nil];
3122     }
3123     return keys;
3124 }
3125
3126 NSArray* itemKVOProperties()
3127 {
3128     static NSArray* keys;
3129     if (!keys) {
3130         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3131                 @"status",
3132                 @"asset",
3133                 @"tracks",
3134                 @"seekableTimeRanges",
3135                 @"loadedTimeRanges",
3136                 @"playbackLikelyToKeepUp",
3137                 @"playbackBufferFull",
3138                 @"playbackBufferEmpty",
3139                 @"duration",
3140                 @"hasEnabledAudio",
3141                 @"timedMetadata",
3142                 @"canPlayFastForward",
3143                 @"canPlayFastReverse",
3144                 nil];
3145     }
3146     return keys;
3147 }
3148
3149 NSArray* assetTrackMetadataKeyNames()
3150 {
3151     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3152     return keys;
3153 }
3154
3155 NSArray* playerKVOProperties()
3156 {
3157     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3158 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3159                             @"externalPlaybackActive", @"allowsExternalPlayback",
3160 #endif
3161                             nil];
3162     return keys;
3163 }
3164 } // namespace WebCore
3165
3166 @implementation WebCoreAVFMovieObserver
3167
3168 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3169 {
3170     self = [super init];
3171     if (!self)
3172         return nil;
3173     m_callback = callback;
3174     return self;
3175 }
3176
3177 - (void)disconnect
3178 {
3179     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3180     m_callback = 0;
3181 }
3182
3183 - (void)metadataLoaded
3184 {
3185     if (!m_callback)
3186         return;
3187
3188     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3189 }
3190
3191 - (void)didEnd:(NSNotification *)unusedNotification
3192 {
3193     UNUSED_PARAM(unusedNotification);
3194     if (!m_callback)
3195         return;
3196     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3197 }
3198
3199 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3200 {
3201     UNUSED_PARAM(object);
3202     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3203
3204     if (!m_callback)
3205         return;
3206
3207     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3208
3209 #if !LOG_DISABLED
3210     if (willChange)
3211         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3212     else {
3213         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3214         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3215     }
3216 #endif
3217
3218     std::function<void ()> function;
3219
3220     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3221         if ([keyPath isEqualToString:@"readyForDisplay"])
3222             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3223     }
3224
3225     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3226         if ([keyPath isEqualToString:@"enabled"])
3227             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3228     }
3229
3230     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3231         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3232             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3233         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3234             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3235         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3236             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3237     }
3238
3239     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3240         // A value changed for an AVPlayerItem
3241         if ([keyPath isEqualToString:@"status"])
3242             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3243         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3244             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3245         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3246             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3247         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3248             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3249         else if ([keyPath isEqualToString:@"asset"])
3250             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3251         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3252             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3253         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3254             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3255         else if ([keyPath isEqualToString:@"tracks"])
3256             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3257         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3258             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3259         else if ([keyPath isEqualToString:@"presentationSize"])
3260             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3261         else if ([keyPath isEqualToString:@"duration"])
3262             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3263         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3264             MediaTime now;
3265             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3266             if (CMTIME_IS_NUMERIC(itemTime))
3267                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3268             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3269         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3270             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3271         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3272             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3273     }
3274
3275     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3276         // A value changed for an AVPlayer.
3277         if ([keyPath isEqualToString:@"rate"])
3278             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3279 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3280         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3281             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3282 #endif
3283     }
3284     
3285     if (!function)
3286         return;
3287
3288     auto weakThis = m_callback->createWeakPtr();
3289     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3290         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3291         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3292         if (!weakThis)
3293             return;
3294         function();
3295     }));
3296 }
3297
3298 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3299 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3300 {
3301     UNUSED_PARAM(output);
3302     UNUSED_PARAM(nativeSamples);
3303
3304     if (!m_callback)
3305         return;
3306
3307     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3308     RetainPtr<NSArray> strongStrings = strings;
3309     RetainPtr<NSArray> strongSamples = nativeSamples;
3310     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3311         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3312         if (!callback)
3313             return;
3314         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3315     });
3316 }
3317
3318 - (void)outputSequenceWasFlushed:(id)output
3319 {
3320     UNUSED_PARAM(output);
3321
3322     if (!m_callback)
3323         return;
3324     
3325     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3326     callOnMainThread([strongSelf] {
3327         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3328             callback->flushCues();
3329     });
3330 }
3331 #endif
3332
3333 @end
3334
3335 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3336 @implementation WebCoreAVFLoaderDelegate
3337
3338 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3339 {
3340     self = [super init];
3341     if (!self)
3342         return nil;
3343     m_callback = callback;
3344     return self;
3345 }
3346
3347 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3348 {
3349     UNUSED_PARAM(resourceLoader);
3350     if (!m_callback)
3351         return NO;
3352
3353     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3354     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3355     callOnMainThread([strongSelf, strongRequest] {
3356         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3357         if (!callback) {
3358             [strongRequest finishLoadingWithError:nil];
3359             return;
3360         }
3361
3362         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3363             [strongRequest finishLoadingWithError:nil];
3364     });
3365
3366     return YES;
3367 }
3368
3369 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3370 {
3371     UNUSED_PARAM(resourceLoader);
3372     if (!m_callback)
3373         return NO;
3374
3375     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3376         return NO;
3377
3378     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3379     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3380     callOnMainThread([strongSelf, strongChallenge] {
3381         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3382         if (!callback) {
3383             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3384             return;
3385         }
3386
3387         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3388             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3389     });
3390
3391     return YES;
3392 }
3393
3394 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3395 {
3396     UNUSED_PARAM(resourceLoader);
3397     if (!m_callback)
3398         return;
3399
3400     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3401     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3402     callOnMainThread([strongSelf, strongRequest] {
3403         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3404         if (callback)
3405             callback->didCancelLoadingRequest(strongRequest.get());
3406     });
3407 }
3408
3409 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3410 {
3411     m_callback = callback;
3412 }
3413 @end
3414 #endif
3415
3416 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3417 @implementation WebCoreAVFPullDelegate
3418 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3419 {
3420     self = [super init];
3421     if (self)
3422         m_callback = callback;
3423     return self;
3424 }
3425
3426 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3427 {
3428     m_callback = callback;
3429 }
3430
3431 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3432 {
3433     if (m_callback)
3434         m_callback->outputMediaDataWillChange(output);
3435 }
3436
3437 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3438 {
3439     UNUSED_PARAM(output);
3440     // No-op.
3441 }
3442 @end
3443 #endif
3444
3445 #endif