Switch remaining CoreMedia soft-linking in WebCore over to CoreMediaSoftLink.{cpp,h}
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioSourceProviderAVFObjC.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "Cookie.h"
38 #import "ExceptionCodePlaceholder.h"
39 #import "FloatConversion.h"
40 #import "FloatConversion.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandMetadataTextTrackPrivateAVF.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
46 #import "OutOfBandTextTrackPrivateAVF.h"
47 #import "URL.h"
48 #import "Logging.h"
49 #import "MediaSelectionGroupAVFObjC.h"
50 #import "MediaTimeAVFoundation.h"
51 #import "PlatformTimeRanges.h"
52 #import "QuartzCoreSPI.h"
53 #import "SecurityOrigin.h"
54 #import "SerializedPlatformRepresentationMac.h"
55 #import "TextEncoding.h"
56 #import "TextTrackRepresentation.h"
57 #import "UUID.h"
58 #import "VideoTrackPrivateAVFObjC.h"
59 #import "WebCoreAVFResourceLoader.h"
60 #import "WebCoreCALayerExtras.h"
61 #import "WebCoreSystemInterface.h"
62 #import <objc/runtime.h>
63 #import <runtime/DataView.h>
64 #import <runtime/JSCInlines.h>
65 #import <runtime/TypedArrayInlines.h>
66 #import <runtime/Uint16Array.h>
67 #import <runtime/Uint32Array.h>
68 #import <runtime/Uint8Array.h>
69 #import <wtf/CurrentTime.h>
70 #import <wtf/Functional.h>
71 #import <wtf/ListHashSet.h>
72 #import <wtf/NeverDestroyed.h>
73 #import <wtf/text/CString.h>
74 #import <wtf/text/StringBuilder.h>
75
76 #if ENABLE(AVF_CAPTIONS)
77 #include "TextTrack.h"
78 #endif
79
80 #import <AVFoundation/AVFoundation.h>
81 #if PLATFORM(IOS)
82 #import "WAKAppKitStubs.h"
83 #import <CoreImage/CoreImage.h>
84 #import <mach/mach_port.h>
85 #else
86 #import <Foundation/NSGeometry.h>
87 #import <QuartzCore/CoreImage.h>
88 #endif
89
90 #if USE(VIDEOTOOLBOX)
91 #import <CoreVideo/CoreVideo.h>
92 #import <VideoToolbox/VideoToolbox.h>
93 #endif
94
95 #if USE(CFNETWORK)
96 #include "CFNSURLConnectionSPI.h"
97 #endif
98
99 namespace std {
100 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
101     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
102 };
103 }
104
105 @interface WebVideoContainerLayer : CALayer
106 @end
107
108 @implementation WebVideoContainerLayer
109
110 - (void)setBounds:(CGRect)bounds
111 {
112     [super setBounds:bounds];
113     for (CALayer* layer in self.sublayers)
114         layer.frame = bounds;
115 }
116 @end
117
118 #if ENABLE(AVF_CAPTIONS)
119 // Note: This must be defined before our SOFT_LINK macros:
120 @class AVMediaSelectionOption;
121 @interface AVMediaSelectionOption (OutOfBandExtensions)
122 @property (nonatomic, readonly) NSString* outOfBandSource;
123 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
124 @end
125 #endif
126
127 #if PLATFORM(IOS)
128 @class AVPlayerItem;
129 @interface AVPlayerItem (WebKitExtensions)
130 @property (nonatomic, copy) NSString* dataYouTubeID;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145
146 #pragma mark - Soft Linking
147
148 // Soft-linking headers must be included last since they #define functions, constants, etc.
149 #import "CoreMediaSoftLink.h"
150
151 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
152 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
153 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
154
155 #if USE(VIDEOTOOLBOX)
156 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
157 #endif
158
159 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
160 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
161 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
162 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
163 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
166 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
167
168 #if USE(VIDEOTOOLBOX)
169 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
170 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
171 #endif
172
173 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
174 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
175 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
176 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
177 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
178 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
179 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
180
181 SOFT_LINK_CLASS(CoreImage, CIContext)
182 SOFT_LINK_CLASS(CoreImage, CIImage)
183
184 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
185 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
186 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
199 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
200
201 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
202
203 #define AVPlayer getAVPlayerClass()
204 #define AVPlayerItem getAVPlayerItemClass()
205 #define AVPlayerLayer getAVPlayerLayerClass()
206 #define AVURLAsset getAVURLAssetClass()
207 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
208 #define AVMetadataItem getAVMetadataItemClass()
209
210 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
211 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
212 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
213 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
214 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
215 #define AVMediaTypeVideo getAVMediaTypeVideo()
216 #define AVMediaTypeAudio getAVMediaTypeAudio()
217 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
218 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
219 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
220 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
221 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
222 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
223 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
224 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
225 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
226 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
227
228 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
229 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
230 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
231
232 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
233 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
234 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
235
236 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
237 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
238 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
239 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
240
241 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
242 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
243 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
244 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
245 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
246 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
247 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
248 #endif
249
250 #if ENABLE(AVF_CAPTIONS)
251 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
261
262 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
263 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
264 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
265 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
266 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
267 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
268 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
269 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
270 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
271 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
272 #endif
273
274 #if ENABLE(DATACUE_VALUE)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
276 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
277 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
280
281 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
282 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
283 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
284 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
285 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
286 #endif
287
288 #if PLATFORM(IOS)
289 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
290 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
291 #endif
292
293 using namespace WebCore;
294
295 enum MediaPlayerAVFoundationObservationContext {
296     MediaPlayerAVFoundationObservationContextPlayerItem,
297     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
298     MediaPlayerAVFoundationObservationContextPlayer,
299     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
300 };
301
302 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
303 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
304 #else
305 @interface WebCoreAVFMovieObserver : NSObject
306 #endif
307 {
308     MediaPlayerPrivateAVFoundationObjC* m_callback;
309     int m_delayCallbacks;
310 }
311 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
312 -(void)disconnect;
313 -(void)metadataLoaded;
314 -(void)didEnd:(NSNotification *)notification;
315 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
316 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
317 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
318 - (void)outputSequenceWasFlushed:(id)output;
319 #endif
320 @end
321
322 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
323 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
324     MediaPlayerPrivateAVFoundationObjC* m_callback;
325 }
326 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
327 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
328 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
329 @end
330 #endif
331
332 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
333 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
334     MediaPlayerPrivateAVFoundationObjC *m_callback;
335     dispatch_semaphore_t m_semaphore;
336 }
337 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
338 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
339 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
340 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
341 @end
342 #endif
343
344 namespace WebCore {
345
346 static NSArray *assetMetadataKeyNames();
347 static NSArray *itemKVOProperties();
348 static NSArray* assetTrackMetadataKeyNames();
349
350 #if !LOG_DISABLED
351 static const char *boolString(bool val)
352 {
353     return val ? "true" : "false";
354 }
355 #endif
356
357 #if ENABLE(ENCRYPTED_MEDIA_V2)
358 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
359 static PlayerToPrivateMapType& playerToPrivateMap()
360 {
361     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
362     return map;
363 };
364 #endif
365
366 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
367 static dispatch_queue_t globalLoaderDelegateQueue()
368 {
369     static dispatch_queue_t globalQueue;
370     static dispatch_once_t onceToken;
371     dispatch_once(&onceToken, ^{
372         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
373     });
374     return globalQueue;
375 }
376 #endif
377
378 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
379 static dispatch_queue_t globalPullDelegateQueue()
380 {
381     static dispatch_queue_t globalQueue;
382     static dispatch_once_t onceToken;
383     dispatch_once(&onceToken, ^{
384         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
385     });
386     return globalQueue;
387 }
388 #endif
389
390 #if USE(CFNETWORK)
391 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
392 public:
393     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
394     {
395         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
396     }
397
398     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
399     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
400
401 private:
402     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
403         : m_challenge(challenge)
404     {
405         ASSERT(m_challenge);
406     }
407
408     virtual void refAuthenticationClient() override { ref(); }
409     virtual void derefAuthenticationClient() override { deref(); }
410
411     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential)
412     {
413         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
414     }
415
416     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
417     {
418         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
419     }
420
421     virtual void receivedCancellation(const AuthenticationChallenge&)
422     {
423         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
424     }
425
426     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
427     {
428         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
429             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
430     }
431
432     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
433     {
434         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
435             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
436     }
437
438     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
439 };
440 #endif
441
442 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
443 {
444     if (isAvailable())
445         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
446             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
447 }
448
449 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
450     : MediaPlayerPrivateAVFoundation(player)
451     , m_weakPtrFactory(this)
452 #if PLATFORM(IOS)
453     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
454 #endif
455     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
456     , m_videoFrameHasDrawn(false)
457     , m_haveCheckedPlayability(false)
458 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
459     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
460     , m_videoOutputSemaphore(nullptr)
461 #endif
462 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
463     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
464 #endif
465     , m_currentTextTrack(0)
466     , m_cachedRate(0)
467     , m_cachedTotalBytes(0)
468     , m_pendingStatusChanges(0)
469     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
470     , m_cachedLikelyToKeepUp(false)
471     , m_cachedBufferEmpty(false)
472     , m_cachedBufferFull(false)
473     , m_cachedHasEnabledAudio(false)
474     , m_shouldBufferData(true)
475     , m_cachedIsReadyForDisplay(false)
476     , m_haveBeenAskedToCreateLayer(false)
477 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
478     , m_allowsWirelessVideoPlayback(true)
479 #endif
480 {
481 #if ENABLE(ENCRYPTED_MEDIA_V2)
482     playerToPrivateMap().set(player, this);
483 #endif
484 }
485
486 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
487 {
488 #if ENABLE(ENCRYPTED_MEDIA_V2)
489     playerToPrivateMap().remove(player());
490 #endif
491 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
492     [m_loaderDelegate.get() setCallback:0];
493     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
494
495     for (auto& pair : m_resourceLoaderMap)
496         pair.value->invalidate();
497 #endif
498 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
499     [m_videoOutputDelegate setCallback:0];
500     [m_videoOutput setDelegate:nil queue:0];
501     if (m_videoOutputSemaphore)
502         dispatch_release(m_videoOutputSemaphore);
503 #endif
504
505     if (m_videoLayer)
506         destroyVideoLayer();
507
508     cancelLoad();
509 }
510
511 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
512 {
513     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
514     tearDownVideoRendering();
515
516     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
517     [m_objcObserver.get() disconnect];
518
519     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
520     setIgnoreLoadStateChanges(true);
521     if (m_avAsset) {
522         [m_avAsset.get() cancelLoading];
523         m_avAsset = nil;
524     }
525
526     clearTextTracks();
527
528 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
529     if (m_legibleOutput) {
530         if (m_avPlayerItem)
531             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
532         m_legibleOutput = nil;
533     }
534 #endif
535
536     if (m_avPlayerItem) {
537         for (NSString *keyName in itemKVOProperties())
538             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
539         
540         m_avPlayerItem = nil;
541     }
542     if (m_avPlayer) {
543         if (m_timeObserver)
544             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
545         m_timeObserver = nil;
546         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
547 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
548         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
549 #endif
550         m_avPlayer = nil;
551     }
552
553     // Reset cached properties
554     m_pendingStatusChanges = 0;
555     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
556     m_cachedSeekableRanges = nullptr;
557     m_cachedLoadedRanges = nullptr;
558     m_cachedHasEnabledAudio = false;
559     m_cachedPresentationSize = FloatSize();
560     m_cachedDuration = MediaTime::zeroTime();
561
562     for (AVPlayerItemTrack *track in m_cachedTracks.get())
563         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
564     m_cachedTracks = nullptr;
565
566 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
567     if (m_provider)
568         m_provider->setPlayerItem(nullptr);
569 #endif
570
571     setIgnoreLoadStateChanges(false);
572 }
573
574 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
575 {
576     return m_haveBeenAskedToCreateLayer;
577 }
578
579 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
580 {
581 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
582     if (m_videoOutput)
583         return true;
584 #endif
585     return m_imageGenerator;
586 }
587
588 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
589 {
590 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
591     createVideoOutput();
592 #else
593     createImageGenerator();
594 #endif
595 }
596
597 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
598 {
599     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
600
601     if (!m_avAsset || m_imageGenerator)
602         return;
603
604     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
605
606     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
607     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
608     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
609     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
610
611     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
612 }
613
614 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
615 {
616 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
617     destroyVideoOutput();
618 #endif
619     destroyImageGenerator();
620 }
621
622 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
623 {
624     if (!m_imageGenerator)
625         return;
626
627     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
628
629     m_imageGenerator = 0;
630 }
631
632 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
633 {
634     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
635         return;
636
637     auto weakThis = createWeakPtr();
638     callOnMainThread([this, weakThis] {
639         if (!weakThis)
640             return;
641
642         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
643             return;
644         m_haveBeenAskedToCreateLayer = true;
645
646         if (!m_videoLayer)
647             createAVPlayerLayer();
648
649 #if USE(VIDEOTOOLBOX)
650         if (!m_videoOutput)
651             createVideoOutput();
652 #endif
653
654         player()->client().mediaPlayerRenderingModeChanged(player());
655     });
656 }
657
658 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
659 {
660     if (!m_avPlayer)
661         return;
662
663     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
664     [m_videoLayer setPlayer:m_avPlayer.get()];
665     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
666 #ifndef NDEBUG
667     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
668 #endif
669     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
670     updateVideoLayerGravity();
671     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
672     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
673     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
674
675 #if PLATFORM(IOS)
676     [m_videoLayer web_disableAllActions];
677     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
678 #ifndef NDEBUG
679     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
680 #endif
681     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
682     if (m_videoFullscreenLayer) {
683         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
684         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
685     } else {
686         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
687         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
688     }
689 #else
690     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
691 #endif
692 }
693
694 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
695 {
696     if (!m_videoLayer)
697         return;
698
699     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
700
701     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
702     [m_videoLayer.get() setPlayer:nil];
703
704 #if PLATFORM(IOS)
705     if (m_videoFullscreenLayer)
706         [m_videoLayer removeFromSuperlayer];
707     m_videoInlineLayer = nil;
708 #endif
709
710     m_videoLayer = nil;
711 }
712
713 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
714 {
715     if (currentRenderingMode() == MediaRenderingToLayer)
716         return m_cachedIsReadyForDisplay;
717
718     return m_videoFrameHasDrawn;
719 }
720
721 #if ENABLE(AVF_CAPTIONS)
722 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
723 {
724     // FIXME: Match these to correct types:
725     if (kind == PlatformTextTrack::Caption)
726         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
727
728     if (kind == PlatformTextTrack::Subtitle)
729         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
730
731     if (kind == PlatformTextTrack::Description)
732         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
733
734     if (kind == PlatformTextTrack::Forced)
735         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
736
737     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
738 }
739     
740 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
741 {
742     trackModeChanged();
743 }
744     
745 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
746 {
747     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
748     
749     for (auto& textTrack : m_textTracks) {
750         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
751             continue;
752         
753         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
754         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
755         
756         for (auto& track : outOfBandTrackSources) {
757             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
758             
759             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
760                 continue;
761             
762             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
763             if (track->mode() == PlatformTextTrack::Hidden)
764                 mode = InbandTextTrackPrivate::Hidden;
765             else if (track->mode() == PlatformTextTrack::Disabled)
766                 mode = InbandTextTrackPrivate::Disabled;
767             else if (track->mode() == PlatformTextTrack::Showing)
768                 mode = InbandTextTrackPrivate::Showing;
769             
770             textTrack->setMode(mode);
771             break;
772         }
773     }
774 }
775 #endif
776
777
778 static NSURL *canonicalURL(const String& url)
779 {
780     NSURL *cocoaURL = URL(ParsedURLString, url);
781     if (url.isEmpty())
782         return cocoaURL;
783
784     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
785     if (!request)
786         return cocoaURL;
787
788     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
789     if (!canonicalRequest)
790         return cocoaURL;
791
792     return [canonicalRequest URL];
793 }
794
795 #if PLATFORM(IOS)
796 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
797 {
798     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
799     [properties setDictionary:@{
800         NSHTTPCookieName: cookie.name,
801         NSHTTPCookieValue: cookie.value,
802         NSHTTPCookieDomain: cookie.domain,
803         NSHTTPCookiePath: cookie.path,
804         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
805     }];
806     if (cookie.secure)
807         [properties setObject:@YES forKey:NSHTTPCookieSecure];
808     if (cookie.session)
809         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
810
811     return [NSHTTPCookie cookieWithProperties:properties.get()];
812 }
813 #endif
814
815 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
816 {
817     if (m_avAsset)
818         return;
819
820     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
821
822     setDelayCallbacks(true);
823
824     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
825
826     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
827
828     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
829
830     String referrer = player()->referrer();
831     if (!referrer.isEmpty())
832         [headerFields.get() setObject:referrer forKey:@"Referer"];
833
834     String userAgent = player()->userAgent();
835     if (!userAgent.isEmpty())
836         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
837
838     if ([headerFields.get() count])
839         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
840
841     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
842         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
843
844     String identifier = player()->sourceApplicationIdentifier();
845     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
846         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
847
848 #if ENABLE(AVF_CAPTIONS)
849     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
850     if (!outOfBandTrackSources.isEmpty()) {
851         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
852         for (auto& trackSource : outOfBandTrackSources) {
853             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
854             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
855             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
856             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
857             [outOfBandTracks.get() addObject:@{
858                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
859                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
860                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
861                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
862                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
863                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
864             }];
865         }
866
867         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
868     }
869 #endif
870
871 #if PLATFORM(IOS)
872     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
873     if (!networkInterfaceName.isEmpty())
874         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
875 #endif
876
877 #if PLATFORM(IOS)
878     Vector<Cookie> cookies;
879     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
880         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
881         for (auto& cookie : cookies)
882             [nsCookies addObject:toNSHTTPCookie(cookie)];
883
884         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
885     }
886 #endif
887
888     NSURL *cocoaURL = canonicalURL(url);
889     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
890
891 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
892     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
893 #endif
894
895     m_haveCheckedPlayability = false;
896
897     setDelayCallbacks(false);
898 }
899
900 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
901 {
902     if (!m_avPlayer)
903         return;
904
905     if (pthread_main_np()) {
906         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
907         return;
908     }
909
910     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
911     RetainPtr<AVPlayerItemType> strongItem = item;
912     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
913         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
914     });
915 }
916
917 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
918 {
919     if (m_avPlayer)
920         return;
921
922     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
923
924     setDelayCallbacks(true);
925
926     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
927     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
928 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
929     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
930     updateDisableExternalPlayback();
931 #endif
932
933 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
934     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
935 #endif
936
937 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
938     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
939 #endif
940
941     if (player()->client().mediaPlayerIsVideo())
942         createAVPlayerLayer();
943
944     if (m_avPlayerItem)
945         setAVPlayerItem(m_avPlayerItem.get());
946
947     setDelayCallbacks(false);
948 }
949
950 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
951 {
952     if (m_avPlayerItem)
953         return;
954
955     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
956
957     setDelayCallbacks(true);
958
959     // Create the player item so we can load media data. 
960     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
961
962     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
963
964     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
965     for (NSString *keyName in itemKVOProperties())
966         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
967
968     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
969
970     if (m_avPlayer)
971         setAVPlayerItem(m_avPlayerItem.get());
972
973 #if PLATFORM(IOS)
974     AtomicString value;
975     if (player()->doesHaveAttribute("data-youtube-id", &value))
976         [m_avPlayerItem.get() setDataYouTubeID: value];
977 #endif
978
979 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
980     const NSTimeInterval legibleOutputAdvanceInterval = 2;
981
982     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
983     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
984     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
985
986     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
987     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
988     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
989     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
990 #endif
991
992 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
993     if (m_provider)
994         m_provider->setPlayerItem(m_avPlayerItem.get());
995 #endif
996
997     setDelayCallbacks(false);
998 }
999
1000 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1001 {
1002     if (m_haveCheckedPlayability)
1003         return;
1004     m_haveCheckedPlayability = true;
1005
1006     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1007     auto weakThis = createWeakPtr();
1008
1009     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1010         callOnMainThread([weakThis] {
1011             if (weakThis)
1012                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1013         });
1014     }];
1015 }
1016
1017 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1018 {
1019     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1020
1021     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1022     dispatch_group_enter(metadataLoadingGroup);
1023     auto weakThis = createWeakPtr();
1024     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1025
1026         callOnMainThread([weakThis, metadataLoadingGroup] {
1027             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1028                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1029                     dispatch_group_enter(metadataLoadingGroup);
1030                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1031                         dispatch_group_leave(metadataLoadingGroup);
1032                     }];
1033                 }
1034             }
1035             dispatch_group_leave(metadataLoadingGroup);
1036         });
1037     }];
1038
1039     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1040         callOnMainThread([weakThis] {
1041             if (weakThis)
1042                 [weakThis->m_objcObserver.get() metadataLoaded];
1043         });
1044
1045         dispatch_release(metadataLoadingGroup);
1046     });
1047 }
1048
1049 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1050 {
1051     if (!m_avPlayerItem)
1052         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1053
1054     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1055         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1056     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1057         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1058     if (m_cachedLikelyToKeepUp)
1059         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1060     if (m_cachedBufferFull)
1061         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1062     if (m_cachedBufferEmpty)
1063         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1064
1065     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1066 }
1067
1068 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1069 {
1070     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1071     PlatformMedia pm;
1072     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1073     pm.media.avfMediaPlayer = m_avPlayer.get();
1074     return pm;
1075 }
1076
1077 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1078 {
1079 #if PLATFORM(IOS)
1080     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1081 #else
1082     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1083 #endif
1084 }
1085
1086 #if PLATFORM(IOS)
1087 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1088 {
1089     if (m_videoFullscreenLayer == videoFullscreenLayer)
1090         return;
1091
1092     m_videoFullscreenLayer = videoFullscreenLayer;
1093
1094     [CATransaction begin];
1095     [CATransaction setDisableActions:YES];
1096     
1097     CALayer *oldRootLayer = videoFullscreenLayer;
1098     while (oldRootLayer.superlayer)
1099         oldRootLayer = oldRootLayer.superlayer;
1100
1101     CALayer *newRootLayer = nil;
1102     
1103     if (m_videoFullscreenLayer && m_videoLayer) {
1104         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1105         [m_videoLayer removeFromSuperlayer];
1106         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1107         newRootLayer = m_videoFullscreenLayer.get();
1108     } else if (m_videoInlineLayer && m_videoLayer) {
1109         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1110         [m_videoLayer removeFromSuperlayer];
1111         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1112         newRootLayer = m_videoInlineLayer.get();
1113     } else if (m_videoLayer)
1114         [m_videoLayer removeFromSuperlayer];
1115
1116     while (newRootLayer.superlayer)
1117         newRootLayer = newRootLayer.superlayer;
1118
1119     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1120         mach_port_t fencePort = 0;
1121         for (CAContext *context in [CAContext allContexts]) {
1122             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1123                 if (!fencePort)
1124                     fencePort = [context createFencePort];
1125                 else
1126                     [context setFencePort:fencePort];
1127             }
1128         }
1129         mach_port_deallocate(mach_task_self(), fencePort);
1130     }
1131     [CATransaction commit];
1132
1133     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1134         syncTextTrackBounds();
1135         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1136     }
1137 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1138     updateDisableExternalPlayback();
1139 #endif
1140 }
1141
1142 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1143 {
1144     m_videoFullscreenFrame = frame;
1145     if (!m_videoFullscreenLayer)
1146         return;
1147
1148     if (m_videoLayer) {
1149         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1150         [CATransaction begin];
1151         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1152         [CATransaction commit];
1153         [m_videoLayer web_disableAllActions];
1154     }
1155     syncTextTrackBounds();
1156 }
1157
1158 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1159 {
1160     m_videoFullscreenGravity = gravity;
1161     if (!m_videoLayer)
1162         return;
1163
1164     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1165     if (gravity == MediaPlayer::VideoGravityResize)
1166         videoGravity = AVLayerVideoGravityResize;
1167     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1168         videoGravity = AVLayerVideoGravityResizeAspect;
1169     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1170         videoGravity = AVLayerVideoGravityResizeAspectFill;
1171     else
1172         ASSERT_NOT_REACHED();
1173
1174     [m_videoLayer setVideoGravity:videoGravity];
1175 }
1176
1177 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1178 {
1179     if (m_currentMetaData)
1180         return m_currentMetaData.get();
1181     return nil;
1182 }
1183
1184 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1185 {
1186     if (!m_avPlayerItem)
1187         return emptyString();
1188     
1189     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1190     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1191
1192     return logString.get();
1193 }
1194
1195 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1196 {
1197     if (!m_avPlayerItem)
1198         return emptyString();
1199
1200     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1201     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1202
1203     return logString.get();
1204 }
1205 #endif
1206
1207 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1208 {
1209     [CATransaction begin];
1210     [CATransaction setDisableActions:YES];    
1211     if (m_videoLayer)
1212         [m_videoLayer.get() setHidden:!isVisible];
1213     [CATransaction commit];
1214 }
1215     
1216 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1217 {
1218     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1219     if (!metaDataAvailable())
1220         return;
1221
1222     setDelayCallbacks(true);
1223     m_cachedRate = requestedRate();
1224     [m_avPlayer.get() setRate:requestedRate()];
1225     setDelayCallbacks(false);
1226 }
1227
1228 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1229 {
1230     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1231     if (!metaDataAvailable())
1232         return;
1233
1234     setDelayCallbacks(true);
1235     m_cachedRate = 0;
1236     [m_avPlayer.get() setRate:0];
1237     setDelayCallbacks(false);
1238 }
1239
1240 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1241 {
1242     // Do not ask the asset for duration before it has been loaded or it will fetch the
1243     // answer synchronously.
1244     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1245         return MediaTime::invalidTime();
1246     
1247     CMTime cmDuration;
1248     
1249     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1250     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1251         cmDuration = [m_avPlayerItem.get() duration];
1252     else
1253         cmDuration = [m_avAsset.get() duration];
1254
1255     if (CMTIME_IS_NUMERIC(cmDuration))
1256         return toMediaTime(cmDuration);
1257
1258     if (CMTIME_IS_INDEFINITE(cmDuration))
1259         return MediaTime::positiveInfiniteTime();
1260
1261     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1262     return MediaTime::invalidTime();
1263 }
1264
1265 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1266 {
1267     if (!metaDataAvailable() || !m_avPlayerItem)
1268         return MediaTime::zeroTime();
1269
1270     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1271     if (CMTIME_IS_NUMERIC(itemTime))
1272         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1273
1274     return MediaTime::zeroTime();
1275 }
1276
1277 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1278 {
1279     // setCurrentTime generates several event callbacks, update afterwards.
1280     setDelayCallbacks(true);
1281
1282     if (m_metadataTrack)
1283         m_metadataTrack->flushPartialCues();
1284
1285     CMTime cmTime = toCMTime(time);
1286     CMTime cmBefore = toCMTime(negativeTolerance);
1287     CMTime cmAfter = toCMTime(positiveTolerance);
1288
1289     auto weakThis = createWeakPtr();
1290
1291     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1292         callOnMainThread([weakThis, finished] {
1293             auto _this = weakThis.get();
1294             if (!_this)
1295                 return;
1296
1297             _this->seekCompleted(finished);
1298         });
1299     }];
1300
1301     setDelayCallbacks(false);
1302 }
1303
1304 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1305 {
1306 #if PLATFORM(IOS)
1307     UNUSED_PARAM(volume);
1308     return;
1309 #else
1310     if (!metaDataAvailable())
1311         return;
1312
1313     [m_avPlayer.get() setVolume:volume];
1314 #endif
1315 }
1316
1317 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1318 {
1319     UNUSED_PARAM(closedCaptionsVisible);
1320
1321     if (!metaDataAvailable())
1322         return;
1323
1324     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1325 }
1326
1327 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1328 {
1329     setDelayCallbacks(true);
1330     m_cachedRate = rate;
1331     [m_avPlayer.get() setRate:rate];
1332     setDelayCallbacks(false);
1333 }
1334
1335 double MediaPlayerPrivateAVFoundationObjC::rate() const
1336 {
1337     if (!metaDataAvailable())
1338         return 0;
1339
1340     return m_cachedRate;
1341 }
1342
1343 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1344 {
1345     if (m_avPlayerItem)
1346         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1347 }
1348
1349 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1350 {
1351     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1352
1353     if (!m_avPlayerItem)
1354         return timeRanges;
1355
1356     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1357         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1358         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1359             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1360     }
1361     return timeRanges;
1362 }
1363
1364 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1365 {
1366     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1367         return MediaTime::zeroTime();
1368
1369     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1370     bool hasValidRange = false;
1371     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1372         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1373         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1374             continue;
1375
1376         hasValidRange = true;
1377         MediaTime startOfRange = toMediaTime(timeRange.start);
1378         if (minTimeSeekable > startOfRange)
1379             minTimeSeekable = startOfRange;
1380     }
1381     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1382 }
1383
1384 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1385 {
1386     if (!m_cachedSeekableRanges)
1387         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1388
1389     MediaTime maxTimeSeekable;
1390     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1391         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1392         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1393             continue;
1394         
1395         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1396         if (maxTimeSeekable < endOfRange)
1397             maxTimeSeekable = endOfRange;
1398     }
1399     return maxTimeSeekable;
1400 }
1401
1402 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1403 {
1404     if (!m_cachedLoadedRanges)
1405         return MediaTime::zeroTime();
1406
1407     MediaTime maxTimeLoaded;
1408     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1409         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1410         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1411             continue;
1412         
1413         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1414         if (maxTimeLoaded < endOfRange)
1415             maxTimeLoaded = endOfRange;
1416     }
1417
1418     return maxTimeLoaded;   
1419 }
1420
1421 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1422 {
1423     if (!metaDataAvailable())
1424         return 0;
1425
1426     if (m_cachedTotalBytes)
1427         return m_cachedTotalBytes;
1428
1429     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1430         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1431
1432     return m_cachedTotalBytes;
1433 }
1434
1435 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1436 {
1437     m_avAsset = asset;
1438 }
1439
1440 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1441 {
1442     if (!m_avAsset)
1443         return MediaPlayerAVAssetStatusDoesNotExist;
1444
1445     for (NSString *keyName in assetMetadataKeyNames()) {
1446         NSError *error = nil;
1447         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1448 #if !LOG_DISABLED
1449         if (error)
1450             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1451 #endif
1452
1453         if (keyStatus < AVKeyValueStatusLoaded)
1454             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1455         
1456         if (keyStatus == AVKeyValueStatusFailed)
1457             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1458
1459         if (keyStatus == AVKeyValueStatusCancelled)
1460             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1461     }
1462
1463     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1464         return MediaPlayerAVAssetStatusPlayable;
1465
1466     return MediaPlayerAVAssetStatusLoaded;
1467 }
1468
1469 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1470 {
1471     if (!m_avAsset)
1472         return 0;
1473
1474     NSError *error = nil;
1475     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1476     return [error code];
1477 }
1478
1479 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1480 {
1481     if (!metaDataAvailable() || context->paintingDisabled())
1482         return;
1483
1484     setDelayCallbacks(true);
1485     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1486
1487 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1488     if (videoOutputHasAvailableFrame())
1489         paintWithVideoOutput(context, rect);
1490     else
1491 #endif
1492         paintWithImageGenerator(context, rect);
1493
1494     END_BLOCK_OBJC_EXCEPTIONS;
1495     setDelayCallbacks(false);
1496
1497     m_videoFrameHasDrawn = true;
1498 }
1499
1500 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1501 {
1502     if (!metaDataAvailable() || context->paintingDisabled())
1503         return;
1504
1505     // We can ignore the request if we are already rendering to a layer.
1506     if (currentRenderingMode() == MediaRenderingToLayer)
1507         return;
1508
1509     // paint() is best effort, so only paint if we already have an image generator or video output available.
1510     if (!hasContextRenderer())
1511         return;
1512
1513     paintCurrentFrameInContext(context, rect);
1514 }
1515
1516 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1517 {
1518     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1519
1520     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1521     if (image) {
1522         GraphicsContextStateSaver stateSaver(*context);
1523         context->translate(rect.x(), rect.y() + rect.height());
1524         context->scale(FloatSize(1.0f, -1.0f));
1525         context->setImageInterpolationQuality(InterpolationLow);
1526         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1527         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1528         image = 0;
1529     }
1530 }
1531
1532 static HashSet<String> mimeTypeCache()
1533 {
1534     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1535     static bool typeListInitialized = false;
1536
1537     if (typeListInitialized)
1538         return cache;
1539     typeListInitialized = true;
1540
1541     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1542     for (NSString *mimeType in types)
1543         cache.add([mimeType lowercaseString]);
1544
1545     return cache;
1546
1547
1548 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1549 {
1550     if (!m_imageGenerator)
1551         createImageGenerator();
1552     ASSERT(m_imageGenerator);
1553
1554 #if !LOG_DISABLED
1555     double start = monotonicallyIncreasingTime();
1556 #endif
1557
1558     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1559     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1560     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1561
1562 #if !LOG_DISABLED
1563     double duration = monotonicallyIncreasingTime() - start;
1564     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1565 #endif
1566
1567     return image;
1568 }
1569
1570 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1571 {
1572     supportedTypes = mimeTypeCache();
1573
1574
1575 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1576 static bool keySystemIsSupported(const String& keySystem)
1577 {
1578     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1579         return true;
1580     return false;
1581 }
1582 #endif
1583
1584 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1585 {
1586 #if ENABLE(ENCRYPTED_MEDIA)
1587     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1588     // In addition to the steps in the current specification, this method must run the following steps:
1589
1590     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1591     //    If keySystem is null, continue to the next step.
1592     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1593         // "Clear Key" is only supported with HLS:
1594         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1595             return MediaPlayer::IsNotSupported;
1596
1597         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1598         if (!keySystemIsSupported(parameters.keySystem))
1599             return MediaPlayer::IsNotSupported;
1600
1601         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1602         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1603     }
1604
1605     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1606 #endif
1607
1608 #if ENABLE(MEDIA_SOURCE)
1609     if (parameters.isMediaSource)
1610         return MediaPlayer::IsNotSupported;
1611 #endif
1612
1613     if (!mimeTypeCache().contains(parameters.type))
1614         return MediaPlayer::IsNotSupported;
1615
1616     // The spec says:
1617     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1618     if (parameters.codecs.isEmpty())
1619         return MediaPlayer::MayBeSupported;
1620
1621     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1622     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1623 }
1624
1625 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1626 {
1627 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1628     if (!keySystem.isEmpty()) {
1629         // "Clear Key" is only supported with HLS:
1630         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1631             return MediaPlayer::IsNotSupported;
1632
1633         if (!keySystemIsSupported(keySystem))
1634             return false;
1635
1636         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1637             return false;
1638
1639         return true;
1640     }
1641 #else
1642     UNUSED_PARAM(keySystem);
1643     UNUSED_PARAM(mimeType);
1644 #endif
1645     return false;
1646 }
1647
1648 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1649 #if ENABLE(ENCRYPTED_MEDIA_V2)
1650 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1651 {
1652     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1653         [infoRequest setContentLength:keyData->byteLength()];
1654         [infoRequest setByteRangeAccessSupported:YES];
1655     }
1656
1657     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1658         long long start = [dataRequest currentOffset];
1659         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1660
1661         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1662             [request finishLoadingWithError:nil];
1663             return;
1664         }
1665
1666         ASSERT(start <= std::numeric_limits<int>::max());
1667         ASSERT(end <= std::numeric_limits<int>::max());
1668         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1669         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1670         [dataRequest respondWithData:nsData.get()];
1671     }
1672
1673     [request finishLoading];
1674 }
1675 #endif
1676
1677 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1678 {
1679     String scheme = [[[avRequest request] URL] scheme];
1680     String keyURI = [[[avRequest request] URL] absoluteString];
1681
1682 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1683     if (scheme == "skd") {
1684         // Create an initData with the following layout:
1685         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1686         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1687         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1688         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1689         initDataView->set<uint32_t>(0, keyURISize, true);
1690
1691         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1692         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1693
1694 #if ENABLE(ENCRYPTED_MEDIA)
1695         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1696 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1697         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1698         if (!player()->keyNeeded(initData.get()))
1699 #endif
1700             return false;
1701
1702         m_keyURIToRequestMap.set(keyURI, avRequest);
1703         return true;
1704 #if ENABLE(ENCRYPTED_MEDIA_V2)
1705     } else if (scheme == "clearkey") {
1706         String keyID = [[[avRequest request] URL] resourceSpecifier];
1707         StringView keyIDView(keyID);
1708         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1709
1710         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1711         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1712
1713         auto keyData = player()->cachedKeyForKeyId(keyID);
1714         if (keyData) {
1715             fulfillRequestWithKeyData(avRequest, keyData.get());
1716             return false;
1717         }
1718
1719         if (!player()->keyNeeded(initData.get()))
1720             return false;
1721
1722         m_keyURIToRequestMap.set(keyID, avRequest);
1723         return true;
1724 #endif
1725     }
1726 #endif
1727
1728     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1729     m_resourceLoaderMap.add(avRequest, resourceLoader);
1730     resourceLoader->startLoading();
1731     return true;
1732 }
1733
1734 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1735 {
1736 #if USE(CFNETWORK)
1737     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1738     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1739     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1740 #else
1741     AuthenticationChallenge challenge(nsChallenge);
1742 #endif
1743
1744     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1745 }
1746
1747 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1748 {
1749     String scheme = [[[avRequest request] URL] scheme];
1750
1751     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1752
1753     if (resourceLoader)
1754         resourceLoader->stopLoading();
1755 }
1756
1757 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1758 {
1759     m_resourceLoaderMap.remove(avRequest);
1760 }
1761 #endif
1762
1763 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1764 {
1765     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1766 }
1767
1768 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1769 {
1770     if (!metaDataAvailable())
1771         return timeValue;
1772
1773     // FIXME - impossible to implement until rdar://8721510 is fixed.
1774     return timeValue;
1775 }
1776
1777 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1778 {
1779 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1780     return 0;
1781 #else
1782     return 5;
1783 #endif
1784 }
1785
1786 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1787 {
1788     if (!m_videoLayer)
1789         return;
1790
1791 #if PLATFORM(IOS)
1792     // Do not attempt to change the video gravity while in full screen mode.
1793     // See setVideoFullscreenGravity().
1794     if (m_videoFullscreenLayer)
1795         return;
1796 #endif
1797
1798     [CATransaction begin];
1799     [CATransaction setDisableActions:YES];    
1800     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1801     [m_videoLayer.get() setVideoGravity:gravity];
1802     [CATransaction commit];
1803 }
1804
1805 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1806 {
1807     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1808         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1809     }];
1810     if (index == NSNotFound)
1811         return nil;
1812     return [tracks objectAtIndex:index];
1813 }
1814
1815 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1816 {
1817     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1818     m_languageOfPrimaryAudioTrack = String();
1819
1820     if (!m_avAsset)
1821         return;
1822
1823     setDelayCharacteristicsChangedNotification(true);
1824
1825     bool haveCCTrack = false;
1826     bool hasCaptions = false;
1827
1828     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1829     // asked about those fairly fequently.
1830     if (!m_avPlayerItem) {
1831         // We don't have a player item yet, so check with the asset because some assets support inspection
1832         // prior to becoming ready to play.
1833         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1834         setHasVideo(firstEnabledVideoTrack);
1835         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1836 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1837         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1838 #endif
1839
1840         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1841     } else {
1842         bool hasVideo = false;
1843         bool hasAudio = false;
1844         bool hasMetaData = false;
1845         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1846             if ([track isEnabled]) {
1847                 AVAssetTrack *assetTrack = [track assetTrack];
1848                 NSString *mediaType = [assetTrack mediaType];
1849                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1850                     hasVideo = true;
1851                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1852                     hasAudio = true;
1853                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1854 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1855                     hasCaptions = true;
1856 #endif
1857                     haveCCTrack = true;
1858                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1859                     hasMetaData = true;
1860                 }
1861             }
1862         }
1863
1864 #if ENABLE(VIDEO_TRACK)
1865         updateAudioTracks();
1866         updateVideoTracks();
1867
1868 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1869         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1870         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1871 #endif
1872 #endif
1873
1874         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1875         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1876         // when it is not.
1877         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1878
1879         setHasAudio(hasAudio);
1880 #if ENABLE(DATACUE_VALUE)
1881         if (hasMetaData)
1882             processMetadataTrack();
1883 #endif
1884     }
1885
1886 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1887     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1888     if (legibleGroup && m_cachedTracks) {
1889         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1890         if (hasCaptions)
1891             processMediaSelectionOptions();
1892     }
1893 #endif
1894
1895 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1896     if (!hasCaptions && haveCCTrack)
1897         processLegacyClosedCaptionsTracks();
1898 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1899     if (haveCCTrack)
1900         processLegacyClosedCaptionsTracks();
1901 #endif
1902
1903     setHasClosedCaptions(hasCaptions);
1904
1905     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1906         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1907
1908     sizeChanged();
1909
1910     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1911         characteristicsChanged();
1912
1913     setDelayCharacteristicsChangedNotification(false);
1914 }
1915
1916 #if ENABLE(VIDEO_TRACK)
1917 template <typename RefT, typename PassRefT>
1918 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1919 {
1920     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1921         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1922     }]]]);
1923     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1924
1925     for (auto& oldItem : oldItems) {
1926         if (oldItem->playerItemTrack())
1927             [oldTracks addObject:oldItem->playerItemTrack()];
1928     }
1929
1930     // Find the added & removed AVPlayerItemTracks:
1931     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1932     [removedTracks minusSet:newTracks.get()];
1933
1934     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1935     [addedTracks minusSet:oldTracks.get()];
1936
1937     typedef Vector<RefT> ItemVector;
1938     ItemVector replacementItems;
1939     ItemVector addedItems;
1940     ItemVector removedItems;
1941     for (auto& oldItem : oldItems) {
1942         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1943             removedItems.append(oldItem);
1944         else
1945             replacementItems.append(oldItem);
1946     }
1947
1948     for (AVPlayerItemTrack* track in addedTracks.get())
1949         addedItems.append(itemFactory(track));
1950
1951     replacementItems.appendVector(addedItems);
1952     oldItems.swap(replacementItems);
1953
1954     for (auto& removedItem : removedItems)
1955         (player->*removedFunction)(removedItem);
1956
1957     for (auto& addedItem : addedItems)
1958         (player->*addedFunction)(addedItem);
1959 }
1960
1961 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1962 template <typename RefT, typename PassRefT>
1963 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1964 {
1965     group->updateOptions();
1966
1967     // Only add selection options which do not have an associated persistant track.
1968     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1969     for (auto& option : group->options()) {
1970         if (!option)
1971             continue;
1972         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1973         if (!avOption)
1974             continue;
1975         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1976             newSelectionOptions.add(option);
1977     }
1978
1979     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1980     for (auto& oldItem : oldItems) {
1981         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1982             oldSelectionOptions.add(option);
1983     }
1984
1985     // Find the added & removed AVMediaSelectionOptions:
1986     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1987     for (auto& oldOption : oldSelectionOptions) {
1988         if (!newSelectionOptions.contains(oldOption))
1989             removedSelectionOptions.add(oldOption);
1990     }
1991
1992     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1993     for (auto& newOption : newSelectionOptions) {
1994         if (!oldSelectionOptions.contains(newOption))
1995             addedSelectionOptions.add(newOption);
1996     }
1997
1998     typedef Vector<RefT> ItemVector;
1999     ItemVector replacementItems;
2000     ItemVector addedItems;
2001     ItemVector removedItems;
2002     for (auto& oldItem : oldItems) {
2003         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2004             removedItems.append(oldItem);
2005         else
2006             replacementItems.append(oldItem);
2007     }
2008
2009     for (auto& option : addedSelectionOptions)
2010         addedItems.append(itemFactory(*option.get()));
2011
2012     replacementItems.appendVector(addedItems);
2013     oldItems.swap(replacementItems);
2014     
2015     for (auto& removedItem : removedItems)
2016         (player->*removedFunction)(removedItem);
2017     
2018     for (auto& addedItem : addedItems)
2019         (player->*addedFunction)(addedItem);
2020 }
2021 #endif
2022
2023 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2024 {
2025 #if !LOG_DISABLED
2026     size_t count = m_audioTracks.size();
2027 #endif
2028
2029     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2030
2031 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2032     if (!m_audibleGroup) {
2033         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2034             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2035     }
2036
2037     if (m_audibleGroup)
2038         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2039 #endif
2040
2041     for (auto& track : m_audioTracks)
2042         track->resetPropertiesFromTrack();
2043
2044 #if !LOG_DISABLED
2045     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2046 #endif
2047 }
2048
2049 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2050 {
2051 #if !LOG_DISABLED
2052     size_t count = m_videoTracks.size();
2053 #endif
2054
2055     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2056
2057 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2058     if (!m_visualGroup) {
2059         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2060             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2061     }
2062
2063     if (m_visualGroup)
2064         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2065 #endif
2066
2067     for (auto& track : m_audioTracks)
2068         track->resetPropertiesFromTrack();
2069
2070 #if !LOG_DISABLED
2071     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2072 #endif
2073 }
2074
2075 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2076 {
2077 #if PLATFORM(IOS)
2078     if (m_videoFullscreenLayer)
2079         return true;
2080 #endif
2081     return false;
2082 }
2083
2084 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2085 {
2086 #if PLATFORM(IOS)
2087     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2088         return;
2089     
2090     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2091     [m_textTrackRepresentationLayer setFrame:textFrame];
2092 #endif
2093 }
2094
2095 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2096 {
2097 #if PLATFORM(IOS)
2098     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2099     if (representationLayer == m_textTrackRepresentationLayer) {
2100         syncTextTrackBounds();
2101         return;
2102     }
2103
2104     if (m_textTrackRepresentationLayer)
2105         [m_textTrackRepresentationLayer removeFromSuperlayer];
2106
2107     m_textTrackRepresentationLayer = representationLayer;
2108
2109     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2110         syncTextTrackBounds();
2111         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2112     }
2113
2114 #else
2115     UNUSED_PARAM(representation);
2116 #endif
2117 }
2118 #endif // ENABLE(VIDEO_TRACK)
2119
2120 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2121 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2122 {
2123     if (!m_provider)
2124         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2125     return m_provider.get();
2126 }
2127 #endif
2128
2129 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2130 {
2131     if (!m_avAsset)
2132         return;
2133
2134     setNaturalSize(m_cachedPresentationSize);
2135 }
2136     
2137 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2138 {
2139     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2140         return false;
2141     
2142     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2143     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2144     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2145 }
2146
2147 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2148 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2149 {
2150     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2151
2152     if (!m_avPlayerItem || m_videoOutput)
2153         return;
2154
2155 #if USE(VIDEOTOOLBOX)
2156     NSDictionary* attributes = nil;
2157 #else
2158     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2159                                 nil];
2160 #endif
2161     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2162     ASSERT(m_videoOutput);
2163
2164     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2165
2166     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2167
2168     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2169 }
2170
2171 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2172 {
2173     if (!m_videoOutput)
2174         return;
2175
2176     if (m_avPlayerItem)
2177         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2178     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2179
2180     m_videoOutput = 0;
2181 }
2182
2183 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2184 {
2185     if (!m_videoOutput)
2186         createVideoOutput();
2187     ASSERT(m_videoOutput);
2188
2189 #if !LOG_DISABLED
2190     double start = monotonicallyIncreasingTime();
2191 #endif
2192
2193     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2194
2195     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2196         return 0;
2197
2198     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2199     if (!buffer)
2200         return 0;
2201
2202 #if USE(VIDEOTOOLBOX)
2203     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2204     if (!m_pixelTransferSession) {
2205         VTPixelTransferSessionRef session = 0;
2206         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2207         m_pixelTransferSession = adoptCF(session);
2208     }
2209
2210     CVPixelBufferRef outputBuffer;
2211     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2212     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2213     buffer = adoptCF(outputBuffer);
2214 #endif
2215
2216 #if !LOG_DISABLED
2217     double duration = monotonicallyIncreasingTime() - start;
2218     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2219 #endif
2220
2221     return buffer;
2222 }
2223
2224 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2225 {
2226     if (!m_avPlayerItem)
2227         return false;
2228
2229     if (m_lastImage)
2230         return true;
2231
2232     if (!m_videoOutput)
2233         createVideoOutput();
2234
2235     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2236 }
2237
2238 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2239 {
2240     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2241     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2242     return CVPixelBufferGetBaseAddress(pixelBuffer);
2243 }
2244
2245 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2246 {
2247     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2248     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2249 }
2250
2251 static void CVPixelBufferReleaseInfoCallback(void* info)
2252 {
2253     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2254     CFRelease(pixelBuffer);
2255 }
2256
2257 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2258 {
2259     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2260     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2261
2262     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2263     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2264     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2265     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2266     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2267
2268     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2269     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2270     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2271
2272     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2273 }
2274
2275 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2276 {
2277     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2278
2279     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2280     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2281     // should be displayed.
2282     if (pixelBuffer)
2283         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2284 }
2285
2286 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2287 {
2288     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2289         waitForVideoOutputMediaDataWillChange();
2290
2291     updateLastImage();
2292
2293     if (!m_lastImage)
2294         return;
2295
2296     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2297     if (!firstEnabledVideoTrack)
2298         return;
2299
2300     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2301
2302     GraphicsContextStateSaver stateSaver(*context);
2303     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2304     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2305     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2306
2307     context->concatCTM(videoTransform);
2308     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2309
2310     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2311     // video frame, destroy it now that it is no longer needed.
2312     if (m_imageGenerator)
2313         destroyImageGenerator();
2314
2315 }
2316
2317 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2318 {
2319     updateLastImage();
2320     return m_lastImage.get();
2321 }
2322
2323 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2324 {
2325     if (!m_videoOutputSemaphore)
2326         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2327
2328     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2329
2330     // Wait for 1 second.
2331     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2332
2333     if (result)
2334         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2335 }
2336
2337 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2338 {
2339     dispatch_semaphore_signal(m_videoOutputSemaphore);
2340 }
2341 #endif
2342
2343 #if ENABLE(ENCRYPTED_MEDIA)
2344 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2345 {
2346     if (!keySystemIsSupported(keySystem))
2347         return MediaPlayer::KeySystemNotSupported;
2348
2349     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2350     String keyURI;
2351     String keyID;
2352     RefPtr<Uint8Array> certificate;
2353     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2354         return MediaPlayer::InvalidPlayerState;
2355
2356     if (!m_keyURIToRequestMap.contains(keyURI))
2357         return MediaPlayer::InvalidPlayerState;
2358
2359     String sessionID = createCanonicalUUIDString();
2360
2361     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2362
2363     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2364     NSString* assetStr = keyID;
2365     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2366     NSError* error = 0;
2367     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2368
2369     if (!keyRequest) {
2370         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2371         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2372         return MediaPlayer::NoError;
2373     }
2374
2375     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2376     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2377     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2378
2379     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2380     m_sessionIDToRequestMap.set(sessionID, avRequest);
2381     m_keyURIToRequestMap.remove(keyURI);
2382
2383     return MediaPlayer::NoError;
2384 }
2385
2386 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2387 {
2388     if (!keySystemIsSupported(keySystem))
2389         return MediaPlayer::KeySystemNotSupported;
2390
2391     if (!m_sessionIDToRequestMap.contains(sessionID))
2392         return MediaPlayer::InvalidPlayerState;
2393
2394     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2395     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2396     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2397     [avRequest.get() finishLoading];
2398     m_sessionIDToRequestMap.remove(sessionID);
2399
2400     player()->keyAdded(keySystem, sessionID);
2401
2402     UNUSED_PARAM(initDataPtr);
2403     UNUSED_PARAM(initDataLength);
2404     return MediaPlayer::NoError;
2405 }
2406
2407 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2408 {
2409     if (!keySystemIsSupported(keySystem))
2410         return MediaPlayer::KeySystemNotSupported;
2411
2412     if (!m_sessionIDToRequestMap.contains(sessionID))
2413         return MediaPlayer::InvalidPlayerState;
2414
2415     m_sessionIDToRequestMap.remove(sessionID);
2416     return MediaPlayer::NoError;
2417 }
2418 #endif
2419
2420 #if ENABLE(ENCRYPTED_MEDIA_V2)
2421 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2422 {
2423     return m_keyURIToRequestMap.take(keyURI);
2424 }
2425
2426 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2427 {
2428     Vector<String> fulfilledKeyIds;
2429
2430     for (auto& pair : m_keyURIToRequestMap) {
2431         const String& keyId = pair.key;
2432         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2433
2434         auto keyData = player()->cachedKeyForKeyId(keyId);
2435         if (!keyData)
2436             continue;
2437
2438         fulfillRequestWithKeyData(request.get(), keyData.get());
2439         fulfilledKeyIds.append(keyId);
2440     }
2441
2442     for (auto& keyId : fulfilledKeyIds)
2443         m_keyURIToRequestMap.remove(keyId);
2444 }
2445
2446 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2447 {
2448     if (!keySystemIsSupported(keySystem))
2449         return nullptr;
2450
2451     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2452 }
2453 #endif
2454
2455 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2456 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2457 {
2458 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2459     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2460 #endif
2461
2462     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2463     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2464
2465         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2466         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2467             continue;
2468
2469         bool newCCTrack = true;
2470         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2471             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2472                 continue;
2473
2474             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2475             if (track->avPlayerItemTrack() == playerItemTrack) {
2476                 removedTextTracks.remove(i - 1);
2477                 newCCTrack = false;
2478                 break;
2479             }
2480         }
2481
2482         if (!newCCTrack)
2483             continue;
2484         
2485         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2486     }
2487
2488     processNewAndRemovedTextTracks(removedTextTracks);
2489 }
2490 #endif
2491
2492 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2493 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2494 {
2495     if (!m_avAsset)
2496         return false;
2497
2498     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2499         return false;
2500
2501     return true;
2502 }
2503
2504 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2505 {
2506     if (!hasLoadedMediaSelectionGroups())
2507         return nil;
2508
2509     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2510 }
2511
2512 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2513 {
2514     if (!hasLoadedMediaSelectionGroups())
2515         return nil;
2516
2517     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2518 }
2519
2520 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2521 {
2522     if (!hasLoadedMediaSelectionGroups())
2523         return nil;
2524
2525     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2526 }
2527
2528 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2529 {
2530     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2531     if (!legibleGroup) {
2532         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2533         return;
2534     }
2535
2536     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2537     // but set the selected legible track to nil so text tracks will not be automatically configured.
2538     if (!m_textTracks.size())
2539         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2540
2541     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2542     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2543     for (AVMediaSelectionOptionType *option in legibleOptions) {
2544         bool newTrack = true;
2545         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2546             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2547                 continue;
2548             
2549             RetainPtr<AVMediaSelectionOptionType> currentOption;
2550 #if ENABLE(AVF_CAPTIONS)
2551             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2552                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2553                 currentOption = track->mediaSelectionOption();
2554             } else
2555 #endif
2556             {
2557                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2558                 currentOption = track->mediaSelectionOption();
2559             }
2560             
2561             if ([currentOption.get() isEqual:option]) {
2562                 removedTextTracks.remove(i - 1);
2563                 newTrack = false;
2564                 break;
2565             }
2566         }
2567         if (!newTrack)
2568             continue;
2569
2570 #if ENABLE(AVF_CAPTIONS)
2571         if ([option outOfBandSource]) {
2572             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2573             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2574             continue;
2575         }
2576 #endif
2577
2578         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2579     }
2580
2581     processNewAndRemovedTextTracks(removedTextTracks);
2582 }
2583
2584 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2585 {
2586     if (m_metadataTrack)
2587         return;
2588
2589     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2590     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2591     player()->addTextTrack(m_metadataTrack);
2592 }
2593
2594 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2595 {
2596     if (!m_currentTextTrack)
2597         return;
2598
2599     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2600 }
2601
2602 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2603 {
2604     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2605
2606     if (!m_currentTextTrack)
2607         return;
2608     
2609     m_currentTextTrack->resetCueValues();
2610 }
2611 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2612
2613 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2614 {
2615     if (m_currentTextTrack == track)
2616         return;
2617
2618     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2619         
2620     m_currentTextTrack = track;
2621
2622     if (track) {
2623         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2624             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2625 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2626 #if ENABLE(AVF_CAPTIONS)
2627         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2628             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2629 #endif
2630         else
2631             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2632 #endif
2633     } else {
2634 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2635         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2636 #endif
2637         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2638     }
2639
2640 }
2641
2642 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2643 {
2644     if (!m_languageOfPrimaryAudioTrack.isNull())
2645         return m_languageOfPrimaryAudioTrack;
2646
2647     if (!m_avPlayerItem.get())
2648         return emptyString();
2649
2650 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2651     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2652     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2653     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2654     if (currentlySelectedAudibleOption) {
2655         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2656         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2657
2658         return m_languageOfPrimaryAudioTrack;
2659     }
2660 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2661
2662     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2663     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2664     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2665     if (!tracks || [tracks count] != 1) {
2666         m_languageOfPrimaryAudioTrack = emptyString();
2667         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2668         return m_languageOfPrimaryAudioTrack;
2669     }
2670
2671     AVAssetTrack *track = [tracks objectAtIndex:0];
2672     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2673
2674 #if !LOG_DISABLED
2675     if (m_languageOfPrimaryAudioTrack == emptyString())
2676         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2677     else
2678         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2679 #endif
2680
2681     return m_languageOfPrimaryAudioTrack;
2682 }
2683
2684 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2685 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2686 {
2687     if (!m_avPlayer)
2688         return false;
2689
2690     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2691     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2692     return wirelessTarget;
2693 }
2694
2695 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2696 {
2697     if (!m_avPlayer)
2698         return MediaPlayer::TargetTypeNone;
2699
2700     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2701     case wkExternalPlaybackTypeNone:
2702         return MediaPlayer::TargetTypeNone;
2703     case wkExternalPlaybackTypeAirPlay:
2704         return MediaPlayer::TargetTypeAirPlay;
2705     case wkExternalPlaybackTypeTVOut:
2706         return MediaPlayer::TargetTypeTVOut;
2707     }
2708
2709     ASSERT_NOT_REACHED();
2710     return MediaPlayer::TargetTypeNone;
2711 }
2712
2713 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2714 {
2715     if (!m_avPlayer)
2716         return emptyString();
2717     
2718     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2719     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2720
2721     return wirelessTargetName;
2722 }
2723
2724 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2725 {
2726     if (!m_avPlayer)
2727         return !m_allowsWirelessVideoPlayback;
2728     
2729     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2730     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2731
2732     return !m_allowsWirelessVideoPlayback;
2733 }
2734
2735 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2736 {
2737     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2738     m_allowsWirelessVideoPlayback = !disabled;
2739     if (!m_avPlayer)
2740         return;
2741     
2742     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2743 }
2744
2745 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2746 {
2747     if (!m_avPlayer)
2748         return;
2749
2750     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2751 }
2752 #endif
2753
2754 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2755 {
2756     m_cachedItemStatus = status;
2757
2758     updateStates();
2759 }
2760
2761 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2762 {
2763     m_pendingStatusChanges++;
2764 }
2765
2766 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2767 {
2768     m_cachedLikelyToKeepUp = likelyToKeepUp;
2769
2770     ASSERT(m_pendingStatusChanges);
2771     if (!--m_pendingStatusChanges)
2772         updateStates();
2773 }
2774
2775 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2776 {
2777     m_pendingStatusChanges++;
2778 }
2779
2780 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2781 {
2782     m_cachedBufferEmpty = bufferEmpty;
2783
2784     ASSERT(m_pendingStatusChanges);
2785     if (!--m_pendingStatusChanges)
2786         updateStates();
2787 }
2788
2789 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2790 {
2791     m_pendingStatusChanges++;
2792 }
2793
2794 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2795 {
2796     m_cachedBufferFull = bufferFull;
2797
2798     ASSERT(m_pendingStatusChanges);
2799     if (!--m_pendingStatusChanges)
2800         updateStates();
2801 }
2802
2803 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2804 {
2805     m_cachedSeekableRanges = seekableRanges;
2806
2807     seekableTimeRangesChanged();
2808     updateStates();
2809 }
2810
2811 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2812 {
2813     m_cachedLoadedRanges = loadedRanges;
2814
2815     loadedTimeRangesChanged();
2816     updateStates();
2817 }
2818
2819 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2820 {
2821     m_cachedIsReadyForDisplay = isReady;
2822     if (!hasVideo() && isReady)
2823         tracksChanged();
2824     updateStates();
2825 }
2826
2827 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2828 {
2829     tracksChanged();
2830     updateStates();
2831 }
2832
2833 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2834 {
2835     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2836     if (m_shouldBufferData == shouldBuffer)
2837         return;
2838
2839     m_shouldBufferData = shouldBuffer;
2840     
2841     if (!m_avPlayer)
2842         return;
2843
2844     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2845 }
2846
2847 #if ENABLE(DATACUE_VALUE)
2848 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2849 {
2850     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2851     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2852     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2853     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2854     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2855
2856     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2857         return quickTimeUserData;
2858     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2859         return isoUserData;
2860     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2861         return quickTimeMetadata;
2862     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2863         return iTunesMetadata;
2864     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2865         return id3Metadata;
2866
2867     return emptyAtom;
2868 }
2869 #endif
2870
2871 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2872 {
2873     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2874
2875     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2876
2877 #if ENABLE(DATACUE_VALUE)
2878     if (seeking())
2879         return;
2880
2881     if (!m_metadataTrack)
2882         processMetadataTrack();
2883
2884     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2885         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2886         return;
2887     }
2888
2889     // Set the duration of all incomplete cues before adding new ones.
2890     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2891     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2892         MediaTime start = toMediaTime(item.time);
2893         if (start < earliestStartTime)
2894             earliestStartTime = start;
2895     }
2896     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2897
2898     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2899         MediaTime start = toMediaTime(item.time);
2900         MediaTime end = MediaTime::positiveInfiniteTime();
2901         if (CMTIME_IS_VALID(item.duration))
2902             end = start + toMediaTime(item.duration);
2903
2904         AtomicString type = nullAtom;
2905         if (item.keySpace)
2906             type = metadataType(item.keySpace);
2907
2908         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2909     }
2910 #endif
2911 }
2912
2913 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2914 {
2915     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2916         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2917
2918     NSArray *assetTracks = [m_avAsset tracks];
2919
2920     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2921         AVAssetTrack* assetTrack = [obj assetTrack];
2922
2923         if ([assetTracks containsObject:assetTrack])
2924             return YES;
2925
2926         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2927         if (!hasLoadedMediaSelectionGroups())
2928             return NO;
2929
2930         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2931             return NO;
2932
2933         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2934             return NO;
2935
2936         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2937             return NO;
2938
2939         return YES;
2940     }]];
2941
2942     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2943         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2944
2945     m_cachedTotalBytes = 0;
2946
2947     tracksChanged();
2948     updateStates();
2949 }
2950
2951 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2952 {
2953     m_cachedHasEnabledAudio = hasEnabledAudio;
2954
2955     tracksChanged();
2956     updateStates();
2957 }
2958
2959 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2960 {
2961     m_cachedPresentationSize = size;
2962
2963     sizeChanged();
2964     updateStates();
2965 }
2966
2967 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
2968 {
2969     m_cachedDuration = duration;
2970
2971     invalidateCachedDuration();
2972 }
2973
2974 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2975 {
2976     m_cachedRate = rate;
2977
2978     updateStates();
2979     rateChanged();
2980 }
2981     
2982 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2983 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2984 {
2985     playbackTargetIsWirelessChanged();
2986 }
2987 #endif
2988
2989 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
2990 {
2991     m_cachedCanPlayFastForward = newValue;
2992 }
2993
2994 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
2995 {
2996     m_cachedCanPlayFastReverse = newValue;
2997 }
2998
2999 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3000 {
3001     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3002         return MediaPlayerPrivateAVFoundation::resolvedURL();
3003
3004     return URL([m_avAsset resolvedURL]);
3005 }
3006
3007 NSArray* assetMetadataKeyNames()
3008 {
3009     static NSArray* keys;
3010     if (!keys) {
3011         keys = [[NSArray alloc] initWithObjects:@"duration",
3012                     @"naturalSize",
3013                     @"preferredTransform",
3014                     @"preferredVolume",
3015                     @"preferredRate",
3016                     @"playable",
3017                     @"resolvedURL",
3018                     @"tracks",
3019                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3020                    nil];
3021     }
3022     return keys;
3023 }
3024
3025 NSArray* itemKVOProperties()
3026 {
3027     static NSArray* keys;
3028     if (!keys) {
3029         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3030                 @"status",
3031                 @"asset",
3032                 @"tracks",
3033                 @"seekableTimeRanges",
3034                 @"loadedTimeRanges",
3035                 @"playbackLikelyToKeepUp",
3036                 @"playbackBufferFull",
3037                 @"playbackBufferEmpty",
3038                 @"duration",
3039                 @"hasEnabledAudio",
3040                 @"timedMetadata",
3041                 @"canPlayFastForward",
3042                 @"canPlayFastReverse",
3043                 nil];
3044     }
3045     return keys;
3046 }
3047
3048 NSArray* assetTrackMetadataKeyNames()
3049 {
3050     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3051     return keys;
3052 }
3053
3054 } // namespace WebCore
3055
3056 @implementation WebCoreAVFMovieObserver
3057
3058 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3059 {
3060     self = [super init];
3061     if (!self)
3062         return nil;
3063     m_callback = callback;
3064     return self;
3065 }
3066
3067 - (void)disconnect
3068 {
3069     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3070     m_callback = 0;
3071 }
3072
3073 - (void)metadataLoaded
3074 {
3075     if (!m_callback)
3076         return;
3077
3078     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3079 }
3080
3081 - (void)didEnd:(NSNotification *)unusedNotification
3082 {
3083     UNUSED_PARAM(unusedNotification);
3084     if (!m_callback)
3085         return;
3086     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3087 }
3088
3089 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3090 {
3091     UNUSED_PARAM(object);
3092     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3093
3094     if (!m_callback)
3095         return;
3096
3097     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3098
3099 #if !LOG_DISABLED
3100     if (willChange)
3101         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3102     else {
3103         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3104         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3105     }
3106 #endif
3107
3108     WTF::Function<void ()> function;
3109
3110     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3111         if ([keyPath isEqualToString:@"readyForDisplay"])
3112             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3113     }
3114
3115     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3116         if ([keyPath isEqualToString:@"enabled"])
3117             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3118     }
3119
3120     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3121         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3122             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3123         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3124             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3125         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3126             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3127     }
3128
3129     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3130         // A value changed for an AVPlayerItem
3131         if ([keyPath isEqualToString:@"status"])
3132             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3133         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3134             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3135         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3136             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3137         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3138             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3139         else if ([keyPath isEqualToString:@"asset"])
3140             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
3141         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3142             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3143         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3144             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3145         else if ([keyPath isEqualToString:@"tracks"])
3146             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3147         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3148             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3149         else if ([keyPath isEqualToString:@"presentationSize"])
3150             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3151         else if ([keyPath isEqualToString:@"duration"])
3152             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3153         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3154             MediaTime now;
3155             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3156             if (CMTIME_IS_NUMERIC(itemTime))
3157                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3158             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3159         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3160             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3161         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3162             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3163     }
3164
3165     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3166         // A value changed for an AVPlayer.
3167         if ([keyPath isEqualToString:@"rate"])
3168             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3169 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3170         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
3171             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3172 #endif
3173     }
3174     
3175     if (function.isNull())
3176         return;
3177
3178     auto weakThis = m_callback->createWeakPtr();
3179     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3180         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3181         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3182         if (!weakThis)
3183             return;
3184         function();
3185     }));
3186 }
3187
3188 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3189 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3190 {
3191     UNUSED_PARAM(output);
3192     UNUSED_PARAM(nativeSamples);
3193
3194     if (!m_callback)
3195         return;
3196
3197     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3198     RetainPtr<NSArray> strongStrings = strings;
3199     RetainPtr<NSArray> strongSamples = nativeSamples;
3200     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3201         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3202         if (!callback)
3203             return;
3204         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3205     });
3206 }
3207
3208 - (void)outputSequenceWasFlushed:(id)output
3209 {
3210     UNUSED_PARAM(output);
3211
3212     if (!m_callback)
3213         return;
3214     
3215     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3216     callOnMainThread([strongSelf] {
3217         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3218             callback->flushCues();
3219     });
3220 }
3221 #endif
3222
3223 @end
3224
3225 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3226 @implementation WebCoreAVFLoaderDelegate
3227
3228 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3229 {
3230     self = [super init];
3231     if (!self)
3232         return nil;
3233     m_callback = callback;
3234     return self;
3235 }
3236
3237 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3238 {
3239     UNUSED_PARAM(resourceLoader);
3240     if (!m_callback)
3241         return NO;
3242
3243     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3244     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3245     callOnMainThread([strongSelf, strongRequest] {
3246         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3247         if (!callback) {
3248             [strongRequest finishLoadingWithError:nil];
3249             return;
3250         }
3251
3252         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3253             [strongRequest finishLoadingWithError:nil];
3254     });
3255
3256     return YES;
3257 }
3258
3259 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3260 {
3261     UNUSED_PARAM(resourceLoader);
3262     if (!m_callback)
3263         return NO;
3264
3265     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3266         return NO;
3267
3268     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3269     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3270     callOnMainThread([strongSelf, strongChallenge] {
3271         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3272         if (!callback) {
3273             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3274             return;
3275         }
3276
3277         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3278             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3279     });
3280
3281     return YES;
3282 }
3283
3284 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3285 {
3286     UNUSED_PARAM(resourceLoader);
3287     if (!m_callback)
3288         return;
3289
3290     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3291     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3292     callOnMainThread([strongSelf, strongRequest] {
3293         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3294         if (callback)
3295             callback->didCancelLoadingRequest(strongRequest.get());
3296     });
3297 }
3298
3299 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3300 {
3301     m_callback = callback;
3302 }
3303 @end
3304 #endif
3305
3306 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3307 @implementation WebCoreAVFPullDelegate
3308 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3309 {
3310     self = [super init];
3311     if (self)
3312         m_callback = callback;
3313     return self;
3314 }
3315
3316 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3317 {
3318     m_callback = callback;
3319 }
3320
3321 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3322 {
3323     if (m_callback)
3324         m_callback->outputMediaDataWillChange(output);
3325 }
3326
3327 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3328 {
3329     UNUSED_PARAM(output);
3330     // No-op.
3331 }
3332 @end
3333 #endif
3334
3335 #endif