Remove unnecessary semicolons.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "FloatConversion.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "OutOfBandTextTrackPrivateAVF.h"
48 #import "URL.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaSelectionGroupAVFObjC.h"
52 #import "MediaTimeAVFoundation.h"
53 #import "PlatformTimeRanges.h"
54 #import "QuartzCoreSPI.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "UUID.h"
60 #import "VideoTrackPrivateAVFObjC.h"
61 #import "WebCoreAVFResourceLoader.h"
62 #import "WebCoreCALayerExtras.h"
63 #import "WebCoreSystemInterface.h"
64 #import <functional>
65 #import <objc/runtime.h>
66 #import <runtime/DataView.h>
67 #import <runtime/JSCInlines.h>
68 #import <runtime/TypedArrayInlines.h>
69 #import <runtime/Uint16Array.h>
70 #import <runtime/Uint32Array.h>
71 #import <runtime/Uint8Array.h>
72 #import <wtf/CurrentTime.h>
73 #import <wtf/ListHashSet.h>
74 #import <wtf/NeverDestroyed.h>
75 #import <wtf/text/CString.h>
76 #import <wtf/text/StringBuilder.h>
77
78 #if ENABLE(AVF_CAPTIONS)
79 #include "TextTrack.h"
80 #endif
81
82 #import <AVFoundation/AVFoundation.h>
83 #if PLATFORM(IOS)
84 #import "WAKAppKitStubs.h"
85 #import <CoreImage/CoreImage.h>
86 #import <mach/mach_port.h>
87 #else
88 #import <Foundation/NSGeometry.h>
89 #import <QuartzCore/CoreImage.h>
90 #endif
91
92 #if USE(VIDEOTOOLBOX)
93 #import <CoreVideo/CoreVideo.h>
94 #import <VideoToolbox/VideoToolbox.h>
95 #endif
96
97 #if USE(CFNETWORK)
98 #include "CFNSURLConnectionSPI.h"
99 #endif
100
101 namespace std {
102 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
103     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
104 };
105 }
106
107 @interface WebVideoContainerLayer : CALayer
108 @end
109
110 @implementation WebVideoContainerLayer
111
112 - (void)setBounds:(CGRect)bounds
113 {
114     [super setBounds:bounds];
115     for (CALayer* layer in self.sublayers)
116         layer.frame = bounds;
117 }
118
119 - (void)setPosition:(CGPoint)position
120 {
121     if (!CATransform3DIsIdentity(self.transform)) {
122         // Pre-apply the transform added in the WebProcess to fix <rdar://problem/18316542> to the position.
123         position = CGPointApplyAffineTransform(position, CATransform3DGetAffineTransform(self.transform));
124     }
125     [super setPosition:position];
126 }
127 @end
128
129 #if ENABLE(AVF_CAPTIONS)
130 // Note: This must be defined before our SOFT_LINK macros:
131 @class AVMediaSelectionOption;
132 @interface AVMediaSelectionOption (OutOfBandExtensions)
133 @property (nonatomic, readonly) NSString* outOfBandSource;
134 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
135 @end
136 #endif
137
138 @interface AVURLAsset (WebKitExtensions)
139 @property (nonatomic, readonly) NSURL *resolvedURL;
140 @end
141
142 typedef AVPlayer AVPlayerType;
143 typedef AVPlayerItem AVPlayerItemType;
144 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
145 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
146 typedef AVMetadataItem AVMetadataItemType;
147 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
148 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
149
150 #pragma mark - Soft Linking
151
152 // Soft-linking headers must be included last since they #define functions, constants, etc.
153 #import "CoreMediaSoftLink.h"
154
155 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
156 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
157 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
158
159 #if USE(VIDEOTOOLBOX)
160 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
161 #endif
162
163 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
166 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
167 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
168 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
169 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
170 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
171
172 #if USE(VIDEOTOOLBOX)
173 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
174 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
175 #endif
176
177 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
178 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
179 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
180 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
181 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
182 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
183 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
184
185 SOFT_LINK_CLASS(CoreImage, CIContext)
186 SOFT_LINK_CLASS(CoreImage, CIImage)
187
188 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
189 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
201 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
203 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
204
205 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
206
207 #define AVPlayer getAVPlayerClass()
208 #define AVPlayerItem getAVPlayerItemClass()
209 #define AVPlayerLayer getAVPlayerLayerClass()
210 #define AVURLAsset getAVURLAssetClass()
211 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
212 #define AVMetadataItem getAVMetadataItemClass()
213
214 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
215 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
216 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
217 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
218 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
219 #define AVMediaTypeVideo getAVMediaTypeVideo()
220 #define AVMediaTypeAudio getAVMediaTypeAudio()
221 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
222 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
223 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
224 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
225 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
226 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
227 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
228 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
229 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
230 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
231
232 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
233 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
234 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
235
236 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
237 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
238 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
239
240 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
241 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
242 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
243 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
244
245 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
246 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
247 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
248 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
249 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
250 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
251 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
252 #endif
253
254 #if ENABLE(AVF_CAPTIONS)
255 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
263 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
264 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
265
266 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
267 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
268 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
269 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
270 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
271 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
272 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
273 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
274 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
275 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
276 #endif
277
278 #if ENABLE(DATACUE_VALUE)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
280 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
281 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
282 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
283 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
284
285 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
286 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
287 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
288 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
289 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
290 #endif
291
292 #if PLATFORM(IOS)
293 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
294 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
295 #endif
296
297 using namespace WebCore;
298
299 enum MediaPlayerAVFoundationObservationContext {
300     MediaPlayerAVFoundationObservationContextPlayerItem,
301     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
302     MediaPlayerAVFoundationObservationContextPlayer,
303     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
304 };
305
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
307 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
308 #else
309 @interface WebCoreAVFMovieObserver : NSObject
310 #endif
311 {
312     MediaPlayerPrivateAVFoundationObjC* m_callback;
313     int m_delayCallbacks;
314 }
315 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 -(void)disconnect;
317 -(void)metadataLoaded;
318 -(void)didEnd:(NSNotification *)notification;
319 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
320 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
321 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
322 - (void)outputSequenceWasFlushed:(id)output;
323 #endif
324 @end
325
326 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
327 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
333 @end
334 #endif
335
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
338     MediaPlayerPrivateAVFoundationObjC *m_callback;
339     dispatch_semaphore_t m_semaphore;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
344 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
345 @end
346 #endif
347
348 namespace WebCore {
349
350 static NSArray *assetMetadataKeyNames();
351 static NSArray *itemKVOProperties();
352 static NSArray *assetTrackMetadataKeyNames();
353 static NSArray *playerKVOProperties();
354 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
355
356 #if !LOG_DISABLED
357 static const char *boolString(bool val)
358 {
359     return val ? "true" : "false";
360 }
361 #endif
362
363 #if ENABLE(ENCRYPTED_MEDIA_V2)
364 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
365 static PlayerToPrivateMapType& playerToPrivateMap()
366 {
367     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
368     return map;
369 };
370 #endif
371
372 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
373 static dispatch_queue_t globalLoaderDelegateQueue()
374 {
375     static dispatch_queue_t globalQueue;
376     static dispatch_once_t onceToken;
377     dispatch_once(&onceToken, ^{
378         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
379     });
380     return globalQueue;
381 }
382 #endif
383
384 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
385 static dispatch_queue_t globalPullDelegateQueue()
386 {
387     static dispatch_queue_t globalQueue;
388     static dispatch_once_t onceToken;
389     dispatch_once(&onceToken, ^{
390         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
391     });
392     return globalQueue;
393 }
394 #endif
395
396 #if USE(CFNETWORK)
397 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
398 public:
399     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
400     {
401         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
402     }
403
404     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
405     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
406
407 private:
408     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
409         : m_challenge(challenge)
410     {
411         ASSERT(m_challenge);
412     }
413
414     virtual void refAuthenticationClient() override { ref(); }
415     virtual void derefAuthenticationClient() override { deref(); }
416
417     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential)
418     {
419         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
420     }
421
422     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
423     {
424         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
425     }
426
427     virtual void receivedCancellation(const AuthenticationChallenge&)
428     {
429         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
430     }
431
432     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
433     {
434         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
435             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
436     }
437
438     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
439     {
440         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
441             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
442     }
443
444     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
445 };
446 #endif
447
448 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
449 {
450     if (isAvailable())
451         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
452             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
453 }
454
455 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
456     : MediaPlayerPrivateAVFoundation(player)
457     , m_weakPtrFactory(this)
458 #if PLATFORM(IOS)
459     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
460 #endif
461     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
462     , m_videoFrameHasDrawn(false)
463     , m_haveCheckedPlayability(false)
464 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
465     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
466     , m_videoOutputSemaphore(nullptr)
467 #endif
468 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
469     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
470 #endif
471     , m_currentTextTrack(0)
472     , m_cachedRate(0)
473     , m_cachedTotalBytes(0)
474     , m_pendingStatusChanges(0)
475     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
476     , m_cachedLikelyToKeepUp(false)
477     , m_cachedBufferEmpty(false)
478     , m_cachedBufferFull(false)
479     , m_cachedHasEnabledAudio(false)
480     , m_shouldBufferData(true)
481     , m_cachedIsReadyForDisplay(false)
482     , m_haveBeenAskedToCreateLayer(false)
483 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
484     , m_allowsWirelessVideoPlayback(true)
485 #endif
486 {
487 #if ENABLE(ENCRYPTED_MEDIA_V2)
488     playerToPrivateMap().set(player, this);
489 #endif
490 }
491
492 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
493 {
494 #if ENABLE(ENCRYPTED_MEDIA_V2)
495     playerToPrivateMap().remove(player());
496 #endif
497 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
498     [m_loaderDelegate.get() setCallback:0];
499     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
500
501     for (auto& pair : m_resourceLoaderMap)
502         pair.value->invalidate();
503 #endif
504 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
505     [m_videoOutputDelegate setCallback:0];
506     [m_videoOutput setDelegate:nil queue:0];
507     if (m_videoOutputSemaphore)
508         dispatch_release(m_videoOutputSemaphore);
509 #endif
510
511     if (m_videoLayer)
512         destroyVideoLayer();
513
514     cancelLoad();
515 }
516
517 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
518 {
519     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
520     tearDownVideoRendering();
521
522     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
523     [m_objcObserver.get() disconnect];
524
525     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
526     setIgnoreLoadStateChanges(true);
527     if (m_avAsset) {
528         [m_avAsset.get() cancelLoading];
529         m_avAsset = nil;
530     }
531
532     clearTextTracks();
533
534 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
535     if (m_legibleOutput) {
536         if (m_avPlayerItem)
537             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
538         m_legibleOutput = nil;
539     }
540 #endif
541
542     if (m_avPlayerItem) {
543         for (NSString *keyName in itemKVOProperties())
544             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
545         
546         m_avPlayerItem = nil;
547     }
548     if (m_avPlayer) {
549         if (m_timeObserver)
550             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
551         m_timeObserver = nil;
552
553         for (NSString *keyName in playerKVOProperties())
554             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
555         m_avPlayer = nil;
556     }
557
558     // Reset cached properties
559     m_pendingStatusChanges = 0;
560     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
561     m_cachedSeekableRanges = nullptr;
562     m_cachedLoadedRanges = nullptr;
563     m_cachedHasEnabledAudio = false;
564     m_cachedPresentationSize = FloatSize();
565     m_cachedDuration = MediaTime::zeroTime();
566
567     for (AVPlayerItemTrack *track in m_cachedTracks.get())
568         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
569     m_cachedTracks = nullptr;
570
571 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
572     if (m_provider) {
573         m_provider->setPlayerItem(nullptr);
574         m_provider->setAudioTrack(nullptr);
575     }
576 #endif
577
578     setIgnoreLoadStateChanges(false);
579 }
580
581 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
582 {
583     return m_haveBeenAskedToCreateLayer;
584 }
585
586 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
587 {
588 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
589     if (m_videoOutput)
590         return true;
591 #endif
592     return m_imageGenerator;
593 }
594
595 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
596 {
597 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
598     createVideoOutput();
599 #else
600     createImageGenerator();
601 #endif
602 }
603
604 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
605 {
606     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
607
608     if (!m_avAsset || m_imageGenerator)
609         return;
610
611     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
612
613     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
614     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
615     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
616     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
617
618     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
619 }
620
621 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
622 {
623 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
624     destroyVideoOutput();
625 #endif
626     destroyImageGenerator();
627 }
628
629 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
630 {
631     if (!m_imageGenerator)
632         return;
633
634     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
635
636     m_imageGenerator = 0;
637 }
638
639 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
640 {
641     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
642         return;
643
644     auto weakThis = createWeakPtr();
645     callOnMainThread([this, weakThis] {
646         if (!weakThis)
647             return;
648
649         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
650             return;
651         m_haveBeenAskedToCreateLayer = true;
652
653         if (!m_videoLayer)
654             createAVPlayerLayer();
655
656 #if USE(VIDEOTOOLBOX)
657         if (!m_videoOutput)
658             createVideoOutput();
659 #endif
660
661         player()->client().mediaPlayerRenderingModeChanged(player());
662     });
663 }
664
665 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
666 {
667     if (!m_avPlayer)
668         return;
669
670     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
671     [m_videoLayer setPlayer:m_avPlayer.get()];
672     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
673 #ifndef NDEBUG
674     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
675 #endif
676     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
677     updateVideoLayerGravity();
678     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
679     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
680     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
681
682 #if PLATFORM(IOS)
683     [m_videoLayer web_disableAllActions];
684     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
685 #ifndef NDEBUG
686     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
687 #endif
688     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
689     if (m_videoFullscreenLayer) {
690         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
691         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
692     } else {
693         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
694         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
695     }
696 #else
697     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
698 #endif
699 }
700
701 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
702 {
703     if (!m_videoLayer)
704         return;
705
706     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
707
708     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
709     [m_videoLayer.get() setPlayer:nil];
710
711 #if PLATFORM(IOS)
712     if (m_videoFullscreenLayer)
713         [m_videoLayer removeFromSuperlayer];
714     m_videoInlineLayer = nil;
715 #endif
716
717     m_videoLayer = nil;
718 }
719
720 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
721 {
722     if (currentRenderingMode() == MediaRenderingToLayer)
723         return m_cachedIsReadyForDisplay;
724
725     return m_videoFrameHasDrawn;
726 }
727
728 #if ENABLE(AVF_CAPTIONS)
729 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
730 {
731     // FIXME: Match these to correct types:
732     if (kind == PlatformTextTrack::Caption)
733         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
734
735     if (kind == PlatformTextTrack::Subtitle)
736         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
737
738     if (kind == PlatformTextTrack::Description)
739         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
740
741     if (kind == PlatformTextTrack::Forced)
742         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
743
744     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
745 }
746     
747 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
748 {
749     trackModeChanged();
750 }
751     
752 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
753 {
754     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
755     
756     for (auto& textTrack : m_textTracks) {
757         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
758             continue;
759         
760         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
761         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
762         
763         for (auto& track : outOfBandTrackSources) {
764             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
765             
766             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
767                 continue;
768             
769             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
770             if (track->mode() == PlatformTextTrack::Hidden)
771                 mode = InbandTextTrackPrivate::Hidden;
772             else if (track->mode() == PlatformTextTrack::Disabled)
773                 mode = InbandTextTrackPrivate::Disabled;
774             else if (track->mode() == PlatformTextTrack::Showing)
775                 mode = InbandTextTrackPrivate::Showing;
776             
777             textTrack->setMode(mode);
778             break;
779         }
780     }
781 }
782 #endif
783
784
785 static NSURL *canonicalURL(const String& url)
786 {
787     NSURL *cocoaURL = URL(ParsedURLString, url);
788     if (url.isEmpty())
789         return cocoaURL;
790
791     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
792     if (!request)
793         return cocoaURL;
794
795     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
796     if (!canonicalRequest)
797         return cocoaURL;
798
799     return [canonicalRequest URL];
800 }
801
802 #if PLATFORM(IOS)
803 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
804 {
805     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
806     [properties setDictionary:@{
807         NSHTTPCookieName: cookie.name,
808         NSHTTPCookieValue: cookie.value,
809         NSHTTPCookieDomain: cookie.domain,
810         NSHTTPCookiePath: cookie.path,
811         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
812     }];
813     if (cookie.secure)
814         [properties setObject:@YES forKey:NSHTTPCookieSecure];
815     if (cookie.session)
816         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
817
818     return [NSHTTPCookie cookieWithProperties:properties.get()];
819 }
820 #endif
821
822 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
823 {
824     if (m_avAsset)
825         return;
826
827     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
828
829     setDelayCallbacks(true);
830
831     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
832
833     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
834
835     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
836
837     String referrer = player()->referrer();
838     if (!referrer.isEmpty())
839         [headerFields.get() setObject:referrer forKey:@"Referer"];
840
841     String userAgent = player()->userAgent();
842     if (!userAgent.isEmpty())
843         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
844
845     if ([headerFields.get() count])
846         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
847
848     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
849         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
850
851 #if PLATFORM(IOS)
852     // FIXME: rdar://problem/20354688
853     String identifier = player()->sourceApplicationIdentifier();
854     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
855         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
856 #endif
857
858 #if ENABLE(AVF_CAPTIONS)
859     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
860     if (!outOfBandTrackSources.isEmpty()) {
861         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
862         for (auto& trackSource : outOfBandTrackSources) {
863             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
864             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
865             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
866             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
867             [outOfBandTracks.get() addObject:@{
868                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
869                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
870                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
871                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
872                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
873                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
874             }];
875         }
876
877         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
878     }
879 #endif
880
881 #if PLATFORM(IOS)
882     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
883     if (!networkInterfaceName.isEmpty())
884         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
885 #endif
886
887 #if PLATFORM(IOS)
888     Vector<Cookie> cookies;
889     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
890         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
891         for (auto& cookie : cookies)
892             [nsCookies addObject:toNSHTTPCookie(cookie)];
893
894         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
895     }
896 #endif
897
898     NSURL *cocoaURL = canonicalURL(url);
899     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
900
901 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
902     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
903 #endif
904
905     m_haveCheckedPlayability = false;
906
907     setDelayCallbacks(false);
908 }
909
910 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
911 {
912     if (!m_avPlayer)
913         return;
914
915     if (pthread_main_np()) {
916         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
917         return;
918     }
919
920     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
921     RetainPtr<AVPlayerItemType> strongItem = item;
922     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
923         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
924     });
925 }
926
927 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
928 {
929     if (m_avPlayer)
930         return;
931
932     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
933
934     setDelayCallbacks(true);
935
936     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
937     for (NSString *keyName in playerKVOProperties())
938         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
939
940 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
941     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
942 #endif
943
944 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
945     updateDisableExternalPlayback();
946     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
947 #endif
948
949 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
950     if (m_shouldPlayToPlaybackTarget)
951         setShouldPlayToPlaybackTarget(true);
952 #endif
953
954     if (player()->client().mediaPlayerIsVideo())
955         createAVPlayerLayer();
956
957     if (m_avPlayerItem)
958         setAVPlayerItem(m_avPlayerItem.get());
959
960     setDelayCallbacks(false);
961 }
962
963 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
964 {
965     if (m_avPlayerItem)
966         return;
967
968     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
969
970     setDelayCallbacks(true);
971
972     // Create the player item so we can load media data. 
973     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
974
975     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
976
977     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
978     for (NSString *keyName in itemKVOProperties())
979         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
980
981     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
982
983     if (m_avPlayer)
984         setAVPlayerItem(m_avPlayerItem.get());
985
986 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
987     const NSTimeInterval legibleOutputAdvanceInterval = 2;
988
989     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
990     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
991     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
992
993     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
994     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
995     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
996     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
997 #endif
998
999 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1000     if (m_provider) {
1001         m_provider->setPlayerItem(m_avPlayerItem.get());
1002         m_provider->setAudioTrack(firstEnabledTrack([m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1003     }
1004 #endif
1005
1006     setDelayCallbacks(false);
1007 }
1008
1009 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1010 {
1011     if (m_haveCheckedPlayability)
1012         return;
1013     m_haveCheckedPlayability = true;
1014
1015     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1016     auto weakThis = createWeakPtr();
1017
1018     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1019         callOnMainThread([weakThis] {
1020             if (weakThis)
1021                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1022         });
1023     }];
1024 }
1025
1026 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1027 {
1028     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1029
1030     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1031     dispatch_group_enter(metadataLoadingGroup);
1032     auto weakThis = createWeakPtr();
1033     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1034
1035         callOnMainThread([weakThis, metadataLoadingGroup] {
1036             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1037                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1038                     dispatch_group_enter(metadataLoadingGroup);
1039                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1040                         dispatch_group_leave(metadataLoadingGroup);
1041                     }];
1042                 }
1043             }
1044             dispatch_group_leave(metadataLoadingGroup);
1045         });
1046     }];
1047
1048     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1049         callOnMainThread([weakThis] {
1050             if (weakThis)
1051                 [weakThis->m_objcObserver.get() metadataLoaded];
1052         });
1053
1054         dispatch_release(metadataLoadingGroup);
1055     });
1056 }
1057
1058 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1059 {
1060     if (!m_avPlayerItem)
1061         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1062
1063     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1064         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1065     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1066         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1067     if (m_cachedLikelyToKeepUp)
1068         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1069     if (m_cachedBufferFull)
1070         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1071     if (m_cachedBufferEmpty)
1072         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1073
1074     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1075 }
1076
1077 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1078 {
1079     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1080     PlatformMedia pm;
1081     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1082     pm.media.avfMediaPlayer = m_avPlayer.get();
1083     return pm;
1084 }
1085
1086 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1087 {
1088 #if PLATFORM(IOS)
1089     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1090 #else
1091     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1092 #endif
1093 }
1094
1095 #if PLATFORM(IOS)
1096 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1097 {
1098     if (m_videoFullscreenLayer == videoFullscreenLayer)
1099         return;
1100
1101     m_videoFullscreenLayer = videoFullscreenLayer;
1102
1103     [CATransaction begin];
1104     [CATransaction setDisableActions:YES];
1105     
1106     CALayer *oldRootLayer = videoFullscreenLayer;
1107     while (oldRootLayer.superlayer)
1108         oldRootLayer = oldRootLayer.superlayer;
1109
1110     CALayer *newRootLayer = nil;
1111     
1112     if (m_videoFullscreenLayer && m_videoLayer) {
1113         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1114         [m_videoLayer removeFromSuperlayer];
1115         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1116         newRootLayer = m_videoFullscreenLayer.get();
1117     } else if (m_videoInlineLayer && m_videoLayer) {
1118         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1119         [m_videoLayer removeFromSuperlayer];
1120         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1121         newRootLayer = m_videoInlineLayer.get();
1122     } else if (m_videoLayer)
1123         [m_videoLayer removeFromSuperlayer];
1124
1125     while (newRootLayer.superlayer)
1126         newRootLayer = newRootLayer.superlayer;
1127
1128     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1129         mach_port_t fencePort = 0;
1130         for (CAContext *context in [CAContext allContexts]) {
1131             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1132                 if (!fencePort)
1133                     fencePort = [context createFencePort];
1134                 else
1135                     [context setFencePort:fencePort];
1136             }
1137         }
1138         mach_port_deallocate(mach_task_self(), fencePort);
1139     }
1140     [CATransaction commit];
1141
1142     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1143         syncTextTrackBounds();
1144         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1145     }
1146
1147     updateDisableExternalPlayback();
1148 }
1149
1150 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1151 {
1152     m_videoFullscreenFrame = frame;
1153     if (!m_videoFullscreenLayer)
1154         return;
1155
1156     if (m_videoLayer) {
1157         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1158         [CATransaction begin];
1159         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1160         [CATransaction commit];
1161         [m_videoLayer web_disableAllActions];
1162     }
1163     syncTextTrackBounds();
1164 }
1165
1166 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1167 {
1168     m_videoFullscreenGravity = gravity;
1169     if (!m_videoLayer)
1170         return;
1171
1172     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1173     if (gravity == MediaPlayer::VideoGravityResize)
1174         videoGravity = AVLayerVideoGravityResize;
1175     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1176         videoGravity = AVLayerVideoGravityResizeAspect;
1177     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1178         videoGravity = AVLayerVideoGravityResizeAspectFill;
1179     else
1180         ASSERT_NOT_REACHED();
1181
1182     [m_videoLayer setVideoGravity:videoGravity];
1183 }
1184
1185 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1186 {
1187     if (m_currentMetaData)
1188         return m_currentMetaData.get();
1189     return nil;
1190 }
1191
1192 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1193 {
1194     if (!m_avPlayerItem)
1195         return emptyString();
1196     
1197     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1198     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1199
1200     return logString.get();
1201 }
1202
1203 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1204 {
1205     if (!m_avPlayerItem)
1206         return emptyString();
1207
1208     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1209     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1210
1211     return logString.get();
1212 }
1213 #endif
1214
1215 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1216 {
1217     [CATransaction begin];
1218     [CATransaction setDisableActions:YES];    
1219     if (m_videoLayer)
1220         [m_videoLayer.get() setHidden:!isVisible];
1221     [CATransaction commit];
1222 }
1223     
1224 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1225 {
1226     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1227     if (!metaDataAvailable())
1228         return;
1229
1230     setDelayCallbacks(true);
1231     m_cachedRate = requestedRate();
1232     [m_avPlayer.get() setRate:requestedRate()];
1233     setDelayCallbacks(false);
1234 }
1235
1236 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1237 {
1238     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1239     if (!metaDataAvailable())
1240         return;
1241
1242     setDelayCallbacks(true);
1243     m_cachedRate = 0;
1244     [m_avPlayer.get() setRate:0];
1245     setDelayCallbacks(false);
1246 }
1247
1248 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1249 {
1250     // Do not ask the asset for duration before it has been loaded or it will fetch the
1251     // answer synchronously.
1252     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1253         return MediaTime::invalidTime();
1254     
1255     CMTime cmDuration;
1256     
1257     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1258     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1259         cmDuration = [m_avPlayerItem.get() duration];
1260     else
1261         cmDuration = [m_avAsset.get() duration];
1262
1263     if (CMTIME_IS_NUMERIC(cmDuration))
1264         return toMediaTime(cmDuration);
1265
1266     if (CMTIME_IS_INDEFINITE(cmDuration))
1267         return MediaTime::positiveInfiniteTime();
1268
1269     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1270     return MediaTime::invalidTime();
1271 }
1272
1273 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1274 {
1275     if (!metaDataAvailable() || !m_avPlayerItem)
1276         return MediaTime::zeroTime();
1277
1278     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1279     if (CMTIME_IS_NUMERIC(itemTime))
1280         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1281
1282     return MediaTime::zeroTime();
1283 }
1284
1285 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1286 {
1287     // setCurrentTime generates several event callbacks, update afterwards.
1288     setDelayCallbacks(true);
1289
1290     if (m_metadataTrack)
1291         m_metadataTrack->flushPartialCues();
1292
1293     CMTime cmTime = toCMTime(time);
1294     CMTime cmBefore = toCMTime(negativeTolerance);
1295     CMTime cmAfter = toCMTime(positiveTolerance);
1296
1297     auto weakThis = createWeakPtr();
1298
1299     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1300         callOnMainThread([weakThis, finished] {
1301             auto _this = weakThis.get();
1302             if (!_this)
1303                 return;
1304
1305             _this->seekCompleted(finished);
1306         });
1307     }];
1308
1309     setDelayCallbacks(false);
1310 }
1311
1312 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1313 {
1314 #if PLATFORM(IOS)
1315     UNUSED_PARAM(volume);
1316     return;
1317 #else
1318     if (!metaDataAvailable())
1319         return;
1320
1321     [m_avPlayer.get() setVolume:volume];
1322 #endif
1323 }
1324
1325 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1326 {
1327     UNUSED_PARAM(closedCaptionsVisible);
1328
1329     if (!metaDataAvailable())
1330         return;
1331
1332     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1333 }
1334
1335 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1336 {
1337     setDelayCallbacks(true);
1338     m_cachedRate = rate;
1339     [m_avPlayer.get() setRate:rate];
1340     setDelayCallbacks(false);
1341 }
1342
1343 double MediaPlayerPrivateAVFoundationObjC::rate() const
1344 {
1345     if (!metaDataAvailable())
1346         return 0;
1347
1348     return m_cachedRate;
1349 }
1350
1351 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1352 {
1353     if (m_avPlayerItem)
1354         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1355 }
1356
1357 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1358 {
1359     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1360
1361     if (!m_avPlayerItem)
1362         return timeRanges;
1363
1364     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1365         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1366         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1367             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1368     }
1369     return timeRanges;
1370 }
1371
1372 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1373 {
1374     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1375         return MediaTime::zeroTime();
1376
1377     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1378     bool hasValidRange = false;
1379     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1380         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1381         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1382             continue;
1383
1384         hasValidRange = true;
1385         MediaTime startOfRange = toMediaTime(timeRange.start);
1386         if (minTimeSeekable > startOfRange)
1387             minTimeSeekable = startOfRange;
1388     }
1389     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1390 }
1391
1392 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1393 {
1394     if (!m_cachedSeekableRanges)
1395         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1396
1397     MediaTime maxTimeSeekable;
1398     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1399         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1400         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1401             continue;
1402         
1403         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1404         if (maxTimeSeekable < endOfRange)
1405             maxTimeSeekable = endOfRange;
1406     }
1407     return maxTimeSeekable;
1408 }
1409
1410 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1411 {
1412     if (!m_cachedLoadedRanges)
1413         return MediaTime::zeroTime();
1414
1415     MediaTime maxTimeLoaded;
1416     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1417         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1418         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1419             continue;
1420         
1421         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1422         if (maxTimeLoaded < endOfRange)
1423             maxTimeLoaded = endOfRange;
1424     }
1425
1426     return maxTimeLoaded;   
1427 }
1428
1429 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1430 {
1431     if (!metaDataAvailable())
1432         return 0;
1433
1434     if (m_cachedTotalBytes)
1435         return m_cachedTotalBytes;
1436
1437     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1438         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1439
1440     return m_cachedTotalBytes;
1441 }
1442
1443 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1444 {
1445     m_avAsset = asset;
1446 }
1447
1448 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1449 {
1450     if (!m_avAsset)
1451         return MediaPlayerAVAssetStatusDoesNotExist;
1452
1453     for (NSString *keyName in assetMetadataKeyNames()) {
1454         NSError *error = nil;
1455         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1456 #if !LOG_DISABLED
1457         if (error)
1458             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1459 #endif
1460
1461         if (keyStatus < AVKeyValueStatusLoaded)
1462             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1463         
1464         if (keyStatus == AVKeyValueStatusFailed)
1465             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1466
1467         if (keyStatus == AVKeyValueStatusCancelled)
1468             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1469     }
1470
1471     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1472         return MediaPlayerAVAssetStatusPlayable;
1473
1474     return MediaPlayerAVAssetStatusLoaded;
1475 }
1476
1477 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1478 {
1479     if (!m_avAsset)
1480         return 0;
1481
1482     NSError *error = nil;
1483     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1484     return [error code];
1485 }
1486
1487 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1488 {
1489     if (!metaDataAvailable() || context->paintingDisabled())
1490         return;
1491
1492     setDelayCallbacks(true);
1493     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1494
1495 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1496     if (videoOutputHasAvailableFrame())
1497         paintWithVideoOutput(context, rect);
1498     else
1499 #endif
1500         paintWithImageGenerator(context, rect);
1501
1502     END_BLOCK_OBJC_EXCEPTIONS;
1503     setDelayCallbacks(false);
1504
1505     m_videoFrameHasDrawn = true;
1506 }
1507
1508 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1509 {
1510     if (!metaDataAvailable() || context->paintingDisabled())
1511         return;
1512
1513     // We can ignore the request if we are already rendering to a layer.
1514     if (currentRenderingMode() == MediaRenderingToLayer)
1515         return;
1516
1517     // paint() is best effort, so only paint if we already have an image generator or video output available.
1518     if (!hasContextRenderer())
1519         return;
1520
1521     paintCurrentFrameInContext(context, rect);
1522 }
1523
1524 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1525 {
1526     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1527
1528     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1529     if (image) {
1530         GraphicsContextStateSaver stateSaver(*context);
1531         context->translate(rect.x(), rect.y() + rect.height());
1532         context->scale(FloatSize(1.0f, -1.0f));
1533         context->setImageInterpolationQuality(InterpolationLow);
1534         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1535         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1536         image = 0;
1537     }
1538 }
1539
1540 static HashSet<String> mimeTypeCache()
1541 {
1542     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1543     static bool typeListInitialized = false;
1544
1545     if (typeListInitialized)
1546         return cache;
1547     typeListInitialized = true;
1548
1549     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1550     for (NSString *mimeType in types)
1551         cache.add([mimeType lowercaseString]);
1552
1553     return cache;
1554
1555
1556 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1557 {
1558     if (!m_imageGenerator)
1559         createImageGenerator();
1560     ASSERT(m_imageGenerator);
1561
1562 #if !LOG_DISABLED
1563     double start = monotonicallyIncreasingTime();
1564 #endif
1565
1566     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1567     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1568     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1569
1570 #if !LOG_DISABLED
1571     double duration = monotonicallyIncreasingTime() - start;
1572     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1573 #endif
1574
1575     return image;
1576 }
1577
1578 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1579 {
1580     supportedTypes = mimeTypeCache();
1581
1582
1583 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1584 static bool keySystemIsSupported(const String& keySystem)
1585 {
1586     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1587         return true;
1588     return false;
1589 }
1590 #endif
1591
1592 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1593 {
1594 #if ENABLE(ENCRYPTED_MEDIA)
1595     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1596     // In addition to the steps in the current specification, this method must run the following steps:
1597
1598     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1599     //    If keySystem is null, continue to the next step.
1600     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1601         // "Clear Key" is only supported with HLS:
1602         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1603             return MediaPlayer::IsNotSupported;
1604
1605         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1606         if (!keySystemIsSupported(parameters.keySystem))
1607             return MediaPlayer::IsNotSupported;
1608
1609         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1610         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1611     }
1612
1613     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1614 #endif
1615
1616 #if ENABLE(MEDIA_SOURCE)
1617     if (parameters.isMediaSource)
1618         return MediaPlayer::IsNotSupported;
1619 #endif
1620
1621     if (!mimeTypeCache().contains(parameters.type))
1622         return MediaPlayer::IsNotSupported;
1623
1624     // The spec says:
1625     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1626     if (parameters.codecs.isEmpty())
1627         return MediaPlayer::MayBeSupported;
1628
1629     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1630     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1631 }
1632
1633 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1634 {
1635 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1636     if (!keySystem.isEmpty()) {
1637         // "Clear Key" is only supported with HLS:
1638         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1639             return MediaPlayer::IsNotSupported;
1640
1641         if (!keySystemIsSupported(keySystem))
1642             return false;
1643
1644         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1645             return false;
1646
1647         return true;
1648     }
1649 #else
1650     UNUSED_PARAM(keySystem);
1651     UNUSED_PARAM(mimeType);
1652 #endif
1653     return false;
1654 }
1655
1656 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1657 #if ENABLE(ENCRYPTED_MEDIA_V2)
1658 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1659 {
1660     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1661         [infoRequest setContentLength:keyData->byteLength()];
1662         [infoRequest setByteRangeAccessSupported:YES];
1663     }
1664
1665     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1666         long long start = [dataRequest currentOffset];
1667         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1668
1669         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1670             [request finishLoadingWithError:nil];
1671             return;
1672         }
1673
1674         ASSERT(start <= std::numeric_limits<int>::max());
1675         ASSERT(end <= std::numeric_limits<int>::max());
1676         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1677         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1678         [dataRequest respondWithData:nsData.get()];
1679     }
1680
1681     [request finishLoading];
1682 }
1683 #endif
1684
1685 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1686 {
1687     String scheme = [[[avRequest request] URL] scheme];
1688     String keyURI = [[[avRequest request] URL] absoluteString];
1689
1690 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1691     if (scheme == "skd") {
1692         // Create an initData with the following layout:
1693         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1694         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1695         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1696         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1697         initDataView->set<uint32_t>(0, keyURISize, true);
1698
1699         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1700         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1701
1702 #if ENABLE(ENCRYPTED_MEDIA)
1703         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1704 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1705         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1706         if (!player()->keyNeeded(initData.get()))
1707 #endif
1708             return false;
1709
1710         m_keyURIToRequestMap.set(keyURI, avRequest);
1711         return true;
1712 #if ENABLE(ENCRYPTED_MEDIA_V2)
1713     } else if (scheme == "clearkey") {
1714         String keyID = [[[avRequest request] URL] resourceSpecifier];
1715         StringView keyIDView(keyID);
1716         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1717
1718         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1719         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1720
1721         auto keyData = player()->cachedKeyForKeyId(keyID);
1722         if (keyData) {
1723             fulfillRequestWithKeyData(avRequest, keyData.get());
1724             return false;
1725         }
1726
1727         if (!player()->keyNeeded(initData.get()))
1728             return false;
1729
1730         m_keyURIToRequestMap.set(keyID, avRequest);
1731         return true;
1732 #endif
1733     }
1734 #endif
1735
1736     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1737     m_resourceLoaderMap.add(avRequest, resourceLoader);
1738     resourceLoader->startLoading();
1739     return true;
1740 }
1741
1742 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1743 {
1744 #if USE(CFNETWORK)
1745     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1746     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1747     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1748 #else
1749     AuthenticationChallenge challenge(nsChallenge);
1750 #endif
1751
1752     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1753 }
1754
1755 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1756 {
1757     String scheme = [[[avRequest request] URL] scheme];
1758
1759     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1760
1761     if (resourceLoader)
1762         resourceLoader->stopLoading();
1763 }
1764
1765 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1766 {
1767     m_resourceLoaderMap.remove(avRequest);
1768 }
1769 #endif
1770
1771 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1772 {
1773     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1774 }
1775
1776 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1777 {
1778     if (!metaDataAvailable())
1779         return timeValue;
1780
1781     // FIXME - impossible to implement until rdar://8721510 is fixed.
1782     return timeValue;
1783 }
1784
1785 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1786 {
1787 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1788     return 0;
1789 #else
1790     return 5;
1791 #endif
1792 }
1793
1794 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1795 {
1796     if (!m_videoLayer)
1797         return;
1798
1799 #if PLATFORM(IOS)
1800     // Do not attempt to change the video gravity while in full screen mode.
1801     // See setVideoFullscreenGravity().
1802     if (m_videoFullscreenLayer)
1803         return;
1804 #endif
1805
1806     [CATransaction begin];
1807     [CATransaction setDisableActions:YES];    
1808     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1809     [m_videoLayer.get() setVideoGravity:gravity];
1810     [CATransaction commit];
1811 }
1812
1813 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1814 {
1815     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1816         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1817     }];
1818     if (index == NSNotFound)
1819         return nil;
1820     return [tracks objectAtIndex:index];
1821 }
1822
1823 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1824 {
1825     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1826     m_languageOfPrimaryAudioTrack = String();
1827
1828     if (!m_avAsset)
1829         return;
1830
1831     setDelayCharacteristicsChangedNotification(true);
1832
1833     bool haveCCTrack = false;
1834     bool hasCaptions = false;
1835
1836     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1837     // asked about those fairly fequently.
1838     if (!m_avPlayerItem) {
1839         // We don't have a player item yet, so check with the asset because some assets support inspection
1840         // prior to becoming ready to play.
1841         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1842         setHasVideo(firstEnabledVideoTrack);
1843         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1844 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1845         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1846 #endif
1847
1848         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1849     } else {
1850         bool hasVideo = false;
1851         bool hasAudio = false;
1852         bool hasMetaData = false;
1853         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1854             if ([track isEnabled]) {
1855                 AVAssetTrack *assetTrack = [track assetTrack];
1856                 NSString *mediaType = [assetTrack mediaType];
1857                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1858                     hasVideo = true;
1859                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1860                     hasAudio = true;
1861                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1862 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1863                     hasCaptions = true;
1864 #endif
1865                     haveCCTrack = true;
1866                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1867                     hasMetaData = true;
1868                 }
1869             }
1870         }
1871
1872 #if ENABLE(VIDEO_TRACK)
1873         updateAudioTracks();
1874         updateVideoTracks();
1875
1876 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1877         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1878         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1879 #endif
1880 #endif
1881
1882         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1883         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1884         // when it is not.
1885         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1886
1887         setHasAudio(hasAudio);
1888 #if ENABLE(DATACUE_VALUE)
1889         if (hasMetaData)
1890             processMetadataTrack();
1891 #endif
1892     }
1893
1894 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1895     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1896     if (legibleGroup && m_cachedTracks) {
1897         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1898         if (hasCaptions)
1899             processMediaSelectionOptions();
1900     }
1901 #endif
1902
1903 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1904     if (!hasCaptions && haveCCTrack)
1905         processLegacyClosedCaptionsTracks();
1906 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1907     if (haveCCTrack)
1908         processLegacyClosedCaptionsTracks();
1909 #endif
1910
1911     setHasClosedCaptions(hasCaptions);
1912
1913     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1914         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1915
1916     sizeChanged();
1917
1918     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1919         characteristicsChanged();
1920
1921 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1922     if (m_provider)
1923         m_provider->setAudioTrack(firstEnabledTrack([m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1924 #endif
1925
1926     setDelayCharacteristicsChangedNotification(false);
1927 }
1928
1929 #if ENABLE(VIDEO_TRACK)
1930 template <typename RefT, typename PassRefT>
1931 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1932 {
1933     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1934         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1935     }]]]);
1936     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1937
1938     for (auto& oldItem : oldItems) {
1939         if (oldItem->playerItemTrack())
1940             [oldTracks addObject:oldItem->playerItemTrack()];
1941     }
1942
1943     // Find the added & removed AVPlayerItemTracks:
1944     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1945     [removedTracks minusSet:newTracks.get()];
1946
1947     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1948     [addedTracks minusSet:oldTracks.get()];
1949
1950     typedef Vector<RefT> ItemVector;
1951     ItemVector replacementItems;
1952     ItemVector addedItems;
1953     ItemVector removedItems;
1954     for (auto& oldItem : oldItems) {
1955         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1956             removedItems.append(oldItem);
1957         else
1958             replacementItems.append(oldItem);
1959     }
1960
1961     for (AVPlayerItemTrack* track in addedTracks.get())
1962         addedItems.append(itemFactory(track));
1963
1964     replacementItems.appendVector(addedItems);
1965     oldItems.swap(replacementItems);
1966
1967     for (auto& removedItem : removedItems)
1968         (player->*removedFunction)(removedItem);
1969
1970     for (auto& addedItem : addedItems)
1971         (player->*addedFunction)(addedItem);
1972 }
1973
1974 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1975 template <typename RefT, typename PassRefT>
1976 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1977 {
1978     group->updateOptions();
1979
1980     // Only add selection options which do not have an associated persistant track.
1981     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1982     for (auto& option : group->options()) {
1983         if (!option)
1984             continue;
1985         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1986         if (!avOption)
1987             continue;
1988         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1989             newSelectionOptions.add(option);
1990     }
1991
1992     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1993     for (auto& oldItem : oldItems) {
1994         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1995             oldSelectionOptions.add(option);
1996     }
1997
1998     // Find the added & removed AVMediaSelectionOptions:
1999     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2000     for (auto& oldOption : oldSelectionOptions) {
2001         if (!newSelectionOptions.contains(oldOption))
2002             removedSelectionOptions.add(oldOption);
2003     }
2004
2005     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2006     for (auto& newOption : newSelectionOptions) {
2007         if (!oldSelectionOptions.contains(newOption))
2008             addedSelectionOptions.add(newOption);
2009     }
2010
2011     typedef Vector<RefT> ItemVector;
2012     ItemVector replacementItems;
2013     ItemVector addedItems;
2014     ItemVector removedItems;
2015     for (auto& oldItem : oldItems) {
2016         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2017             removedItems.append(oldItem);
2018         else
2019             replacementItems.append(oldItem);
2020     }
2021
2022     for (auto& option : addedSelectionOptions)
2023         addedItems.append(itemFactory(*option.get()));
2024
2025     replacementItems.appendVector(addedItems);
2026     oldItems.swap(replacementItems);
2027     
2028     for (auto& removedItem : removedItems)
2029         (player->*removedFunction)(removedItem);
2030     
2031     for (auto& addedItem : addedItems)
2032         (player->*addedFunction)(addedItem);
2033 }
2034 #endif
2035
2036 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2037 {
2038 #if !LOG_DISABLED
2039     size_t count = m_audioTracks.size();
2040 #endif
2041
2042     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2043
2044 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2045     if (!m_audibleGroup) {
2046         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2047             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2048     }
2049
2050     if (m_audibleGroup)
2051         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2052 #endif
2053
2054     for (auto& track : m_audioTracks)
2055         track->resetPropertiesFromTrack();
2056
2057 #if !LOG_DISABLED
2058     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2059 #endif
2060 }
2061
2062 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2063 {
2064 #if !LOG_DISABLED
2065     size_t count = m_videoTracks.size();
2066 #endif
2067
2068     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2069
2070 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2071     if (!m_visualGroup) {
2072         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2073             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2074     }
2075
2076     if (m_visualGroup)
2077         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2078 #endif
2079
2080     for (auto& track : m_audioTracks)
2081         track->resetPropertiesFromTrack();
2082
2083 #if !LOG_DISABLED
2084     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2085 #endif
2086 }
2087
2088 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2089 {
2090 #if PLATFORM(IOS)
2091     if (m_videoFullscreenLayer)
2092         return true;
2093 #endif
2094     return false;
2095 }
2096
2097 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2098 {
2099 #if PLATFORM(IOS)
2100     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2101         return;
2102     
2103     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2104     [m_textTrackRepresentationLayer setFrame:textFrame];
2105 #endif
2106 }
2107
2108 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2109 {
2110 #if PLATFORM(IOS)
2111     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2112     if (representationLayer == m_textTrackRepresentationLayer) {
2113         syncTextTrackBounds();
2114         return;
2115     }
2116
2117     if (m_textTrackRepresentationLayer)
2118         [m_textTrackRepresentationLayer removeFromSuperlayer];
2119
2120     m_textTrackRepresentationLayer = representationLayer;
2121
2122     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2123         syncTextTrackBounds();
2124         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2125     }
2126
2127 #else
2128     UNUSED_PARAM(representation);
2129 #endif
2130 }
2131 #endif // ENABLE(VIDEO_TRACK)
2132
2133 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2134 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2135 {
2136     if (!m_provider) {
2137         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2138         m_provider->setAudioTrack(firstEnabledTrack([m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
2139     }
2140
2141     return m_provider.get();
2142 }
2143 #endif
2144
2145 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2146 {
2147     if (!m_avAsset)
2148         return;
2149
2150     setNaturalSize(m_cachedPresentationSize);
2151 }
2152     
2153 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2154 {
2155     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2156         return false;
2157     
2158     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2159     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2160     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2161 }
2162
2163 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2164 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2165 {
2166     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2167
2168     if (!m_avPlayerItem || m_videoOutput)
2169         return;
2170
2171 #if USE(VIDEOTOOLBOX)
2172     NSDictionary* attributes = nil;
2173 #else
2174     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2175                                 nil];
2176 #endif
2177     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2178     ASSERT(m_videoOutput);
2179
2180     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2181
2182     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2183
2184     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2185 }
2186
2187 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2188 {
2189     if (!m_videoOutput)
2190         return;
2191
2192     if (m_avPlayerItem)
2193         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2194     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2195
2196     m_videoOutput = 0;
2197 }
2198
2199 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2200 {
2201     if (!m_videoOutput)
2202         createVideoOutput();
2203     ASSERT(m_videoOutput);
2204
2205 #if !LOG_DISABLED
2206     double start = monotonicallyIncreasingTime();
2207 #endif
2208
2209     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2210
2211     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2212         return 0;
2213
2214     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2215     if (!buffer)
2216         return 0;
2217
2218 #if USE(VIDEOTOOLBOX)
2219     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2220     if (!m_pixelTransferSession) {
2221         VTPixelTransferSessionRef session = 0;
2222         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2223         m_pixelTransferSession = adoptCF(session);
2224     }
2225
2226     CVPixelBufferRef outputBuffer;
2227     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2228     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2229     buffer = adoptCF(outputBuffer);
2230 #endif
2231
2232 #if !LOG_DISABLED
2233     double duration = monotonicallyIncreasingTime() - start;
2234     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2235 #endif
2236
2237     return buffer;
2238 }
2239
2240 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2241 {
2242     if (!m_avPlayerItem)
2243         return false;
2244
2245     if (m_lastImage)
2246         return true;
2247
2248     if (!m_videoOutput)
2249         createVideoOutput();
2250
2251     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2252 }
2253
2254 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2255 {
2256     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2257     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2258     return CVPixelBufferGetBaseAddress(pixelBuffer);
2259 }
2260
2261 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2262 {
2263     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2264     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2265 }
2266
2267 static void CVPixelBufferReleaseInfoCallback(void* info)
2268 {
2269     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2270     CFRelease(pixelBuffer);
2271 }
2272
2273 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2274 {
2275     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2276     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2277
2278     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2279     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2280     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2281     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2282     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2283
2284     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2285     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2286     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2287
2288     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2289 }
2290
2291 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2292 {
2293     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2294
2295     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2296     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2297     // should be displayed.
2298     if (pixelBuffer)
2299         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2300 }
2301
2302 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2303 {
2304     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2305         waitForVideoOutputMediaDataWillChange();
2306
2307     updateLastImage();
2308
2309     if (!m_lastImage)
2310         return;
2311
2312     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2313     if (!firstEnabledVideoTrack)
2314         return;
2315
2316     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2317
2318     GraphicsContextStateSaver stateSaver(*context);
2319     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2320     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2321     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2322
2323     context->concatCTM(videoTransform);
2324     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2325
2326     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2327     // video frame, destroy it now that it is no longer needed.
2328     if (m_imageGenerator)
2329         destroyImageGenerator();
2330
2331 }
2332
2333 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2334 {
2335     updateLastImage();
2336     return m_lastImage.get();
2337 }
2338
2339 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2340 {
2341     if (!m_videoOutputSemaphore)
2342         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2343
2344     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2345
2346     // Wait for 1 second.
2347     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2348
2349     if (result)
2350         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2351 }
2352
2353 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2354 {
2355     dispatch_semaphore_signal(m_videoOutputSemaphore);
2356 }
2357 #endif
2358
2359 #if ENABLE(ENCRYPTED_MEDIA)
2360 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2361 {
2362     if (!keySystemIsSupported(keySystem))
2363         return MediaPlayer::KeySystemNotSupported;
2364
2365     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2366     String keyURI;
2367     String keyID;
2368     RefPtr<Uint8Array> certificate;
2369     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2370         return MediaPlayer::InvalidPlayerState;
2371
2372     if (!m_keyURIToRequestMap.contains(keyURI))
2373         return MediaPlayer::InvalidPlayerState;
2374
2375     String sessionID = createCanonicalUUIDString();
2376
2377     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2378
2379     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2380     NSString* assetStr = keyID;
2381     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2382     NSError* error = 0;
2383     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2384
2385     if (!keyRequest) {
2386         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2387         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2388         return MediaPlayer::NoError;
2389     }
2390
2391     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2392     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2393     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2394
2395     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2396     m_sessionIDToRequestMap.set(sessionID, avRequest);
2397     m_keyURIToRequestMap.remove(keyURI);
2398
2399     return MediaPlayer::NoError;
2400 }
2401
2402 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2403 {
2404     if (!keySystemIsSupported(keySystem))
2405         return MediaPlayer::KeySystemNotSupported;
2406
2407     if (!m_sessionIDToRequestMap.contains(sessionID))
2408         return MediaPlayer::InvalidPlayerState;
2409
2410     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2411     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2412     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2413     [avRequest.get() finishLoading];
2414     m_sessionIDToRequestMap.remove(sessionID);
2415
2416     player()->keyAdded(keySystem, sessionID);
2417
2418     UNUSED_PARAM(initDataPtr);
2419     UNUSED_PARAM(initDataLength);
2420     return MediaPlayer::NoError;
2421 }
2422
2423 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2424 {
2425     if (!keySystemIsSupported(keySystem))
2426         return MediaPlayer::KeySystemNotSupported;
2427
2428     if (!m_sessionIDToRequestMap.contains(sessionID))
2429         return MediaPlayer::InvalidPlayerState;
2430
2431     m_sessionIDToRequestMap.remove(sessionID);
2432     return MediaPlayer::NoError;
2433 }
2434 #endif
2435
2436 #if ENABLE(ENCRYPTED_MEDIA_V2)
2437 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2438 {
2439     return m_keyURIToRequestMap.take(keyURI);
2440 }
2441
2442 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2443 {
2444     Vector<String> fulfilledKeyIds;
2445
2446     for (auto& pair : m_keyURIToRequestMap) {
2447         const String& keyId = pair.key;
2448         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2449
2450         auto keyData = player()->cachedKeyForKeyId(keyId);
2451         if (!keyData)
2452             continue;
2453
2454         fulfillRequestWithKeyData(request.get(), keyData.get());
2455         fulfilledKeyIds.append(keyId);
2456     }
2457
2458     for (auto& keyId : fulfilledKeyIds)
2459         m_keyURIToRequestMap.remove(keyId);
2460 }
2461
2462 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2463 {
2464     if (!keySystemIsSupported(keySystem))
2465         return nullptr;
2466
2467     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2468 }
2469 #endif
2470
2471 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2472 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2473 {
2474 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2475     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2476 #endif
2477
2478     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2479     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2480
2481         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2482         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2483             continue;
2484
2485         bool newCCTrack = true;
2486         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2487             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2488                 continue;
2489
2490             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2491             if (track->avPlayerItemTrack() == playerItemTrack) {
2492                 removedTextTracks.remove(i - 1);
2493                 newCCTrack = false;
2494                 break;
2495             }
2496         }
2497
2498         if (!newCCTrack)
2499             continue;
2500         
2501         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2502     }
2503
2504     processNewAndRemovedTextTracks(removedTextTracks);
2505 }
2506 #endif
2507
2508 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2509 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2510 {
2511     if (!m_avAsset)
2512         return false;
2513
2514     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2515         return false;
2516
2517     return true;
2518 }
2519
2520 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2521 {
2522     if (!hasLoadedMediaSelectionGroups())
2523         return nil;
2524
2525     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2526 }
2527
2528 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2529 {
2530     if (!hasLoadedMediaSelectionGroups())
2531         return nil;
2532
2533     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2534 }
2535
2536 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2537 {
2538     if (!hasLoadedMediaSelectionGroups())
2539         return nil;
2540
2541     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2542 }
2543
2544 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2545 {
2546     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2547     if (!legibleGroup) {
2548         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2549         return;
2550     }
2551
2552     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2553     // but set the selected legible track to nil so text tracks will not be automatically configured.
2554     if (!m_textTracks.size())
2555         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2556
2557     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2558     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2559     for (AVMediaSelectionOptionType *option in legibleOptions) {
2560         bool newTrack = true;
2561         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2562             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2563                 continue;
2564             
2565             RetainPtr<AVMediaSelectionOptionType> currentOption;
2566 #if ENABLE(AVF_CAPTIONS)
2567             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2568                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2569                 currentOption = track->mediaSelectionOption();
2570             } else
2571 #endif
2572             {
2573                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2574                 currentOption = track->mediaSelectionOption();
2575             }
2576             
2577             if ([currentOption.get() isEqual:option]) {
2578                 removedTextTracks.remove(i - 1);
2579                 newTrack = false;
2580                 break;
2581             }
2582         }
2583         if (!newTrack)
2584             continue;
2585
2586 #if ENABLE(AVF_CAPTIONS)
2587         if ([option outOfBandSource]) {
2588             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2589             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2590             continue;
2591         }
2592 #endif
2593
2594         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2595     }
2596
2597     processNewAndRemovedTextTracks(removedTextTracks);
2598 }
2599
2600 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2601 {
2602     if (m_metadataTrack)
2603         return;
2604
2605     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2606     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2607     player()->addTextTrack(m_metadataTrack);
2608 }
2609
2610 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2611 {
2612     if (!m_currentTextTrack)
2613         return;
2614
2615     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2616 }
2617
2618 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2619 {
2620     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2621
2622     if (!m_currentTextTrack)
2623         return;
2624     
2625     m_currentTextTrack->resetCueValues();
2626 }
2627 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2628
2629 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2630 {
2631     if (m_currentTextTrack == track)
2632         return;
2633
2634     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2635         
2636     m_currentTextTrack = track;
2637
2638     if (track) {
2639         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2640             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2641 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2642 #if ENABLE(AVF_CAPTIONS)
2643         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2644             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2645 #endif
2646         else
2647             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2648 #endif
2649     } else {
2650 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2651         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2652 #endif
2653         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2654     }
2655
2656 }
2657
2658 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2659 {
2660     if (!m_languageOfPrimaryAudioTrack.isNull())
2661         return m_languageOfPrimaryAudioTrack;
2662
2663     if (!m_avPlayerItem.get())
2664         return emptyString();
2665
2666 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2667     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2668     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2669     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2670     if (currentlySelectedAudibleOption) {
2671         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2672         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2673
2674         return m_languageOfPrimaryAudioTrack;
2675     }
2676 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2677
2678     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2679     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2680     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2681     if (!tracks || [tracks count] != 1) {
2682         m_languageOfPrimaryAudioTrack = emptyString();
2683         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2684         return m_languageOfPrimaryAudioTrack;
2685     }
2686
2687     AVAssetTrack *track = [tracks objectAtIndex:0];
2688     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2689
2690 #if !LOG_DISABLED
2691     if (m_languageOfPrimaryAudioTrack == emptyString())
2692         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2693     else
2694         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2695 #endif
2696
2697     return m_languageOfPrimaryAudioTrack;
2698 }
2699
2700 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2701 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2702 {
2703     if (!m_avPlayer)
2704         return false;
2705
2706     bool wirelessTarget = m_avPlayer.get().externalPlaybackActive;
2707     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2708
2709     return wirelessTarget;
2710 }
2711
2712 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2713 {
2714     if (!m_avPlayer)
2715         return MediaPlayer::TargetTypeNone;
2716
2717 #if PLATFORM(IOS)
2718     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2719     case wkExternalPlaybackTypeNone:
2720         return MediaPlayer::TargetTypeNone;
2721     case wkExternalPlaybackTypeAirPlay:
2722         return MediaPlayer::TargetTypeAirPlay;
2723     case wkExternalPlaybackTypeTVOut:
2724         return MediaPlayer::TargetTypeTVOut;
2725     }
2726
2727     ASSERT_NOT_REACHED();
2728     return MediaPlayer::TargetTypeNone;
2729
2730 #else
2731     return MediaPlayer::TargetTypeAirPlay;
2732 #endif
2733 }
2734
2735 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2736 {
2737     if (!m_avPlayer)
2738         return emptyString();
2739
2740     String wirelessTargetName;
2741 #if !PLATFORM(IOS)
2742     if (m_outputContext)
2743         wirelessTargetName = m_outputContext.get().deviceName;
2744 #else
2745     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2746 #endif
2747     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2748
2749     return wirelessTargetName;
2750 }
2751
2752 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2753 {
2754     if (!m_avPlayer)
2755         return !m_allowsWirelessVideoPlayback;
2756
2757     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2758     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2759
2760     return !m_allowsWirelessVideoPlayback;
2761 }
2762
2763 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2764 {
2765     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2766     m_allowsWirelessVideoPlayback = !disabled;
2767     if (!m_avPlayer)
2768         return;
2769
2770     setDelayCallbacks(true);
2771     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2772     setDelayCallbacks(false);
2773 }
2774
2775 #if !PLATFORM(IOS)
2776 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2777 {
2778     MediaPlaybackTargetMac* macTarget = toMediaPlaybackTargetMac(&target.get());
2779
2780     m_outputContext = macTarget->outputContext();
2781     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p", this, m_outputContext.get());
2782
2783     if (!m_outputContext || !m_outputContext.get().deviceName)
2784         setShouldPlayToPlaybackTarget(false);
2785 }
2786
2787 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2788 {
2789     m_shouldPlayToPlaybackTarget = shouldPlay;
2790
2791     if (!m_avPlayer)
2792         return;
2793
2794     AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2795     RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2796     if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2797         return;
2798
2799     setDelayCallbacks(true);
2800     m_avPlayer.get().outputContext = newContext;
2801     setDelayCallbacks(false);
2802
2803     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, playing to target = %s", this, m_avPlayer.get().outputContext, boolString(shouldPlay));
2804 }
2805
2806 bool MediaPlayerPrivateAVFoundationObjC::isPlayingToWirelessPlaybackTarget()
2807 {
2808     if (!m_avPlayer)
2809         return false;
2810
2811     RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2812     return currentContext && currentContext.get().deviceName;
2813 }
2814 #endif // !PLATFORM(IOS)
2815
2816 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2817 {
2818     if (!m_avPlayer)
2819         return;
2820
2821 #if PLATFORM(IOS)
2822     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2823 #endif
2824 }
2825 #endif
2826
2827 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2828 {
2829     m_cachedItemStatus = status;
2830
2831     updateStates();
2832 }
2833
2834 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2835 {
2836     m_pendingStatusChanges++;
2837 }
2838
2839 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2840 {
2841     m_cachedLikelyToKeepUp = likelyToKeepUp;
2842
2843     ASSERT(m_pendingStatusChanges);
2844     if (!--m_pendingStatusChanges)
2845         updateStates();
2846 }
2847
2848 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2849 {
2850     m_pendingStatusChanges++;
2851 }
2852
2853 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2854 {
2855     m_cachedBufferEmpty = bufferEmpty;
2856
2857     ASSERT(m_pendingStatusChanges);
2858     if (!--m_pendingStatusChanges)
2859         updateStates();
2860 }
2861
2862 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2863 {
2864     m_pendingStatusChanges++;
2865 }
2866
2867 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2868 {
2869     m_cachedBufferFull = bufferFull;
2870
2871     ASSERT(m_pendingStatusChanges);
2872     if (!--m_pendingStatusChanges)
2873         updateStates();
2874 }
2875
2876 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2877 {
2878     m_cachedSeekableRanges = seekableRanges;
2879
2880     seekableTimeRangesChanged();
2881     updateStates();
2882 }
2883
2884 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2885 {
2886     m_cachedLoadedRanges = loadedRanges;
2887
2888     loadedTimeRangesChanged();
2889     updateStates();
2890 }
2891
2892 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2893 {
2894     m_cachedIsReadyForDisplay = isReady;
2895     if (!hasVideo() && isReady)
2896         tracksChanged();
2897     updateStates();
2898 }
2899
2900 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2901 {
2902     tracksChanged();
2903     updateStates();
2904 }
2905
2906 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2907 {
2908     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2909     if (m_shouldBufferData == shouldBuffer)
2910         return;
2911
2912     m_shouldBufferData = shouldBuffer;
2913     
2914     if (!m_avPlayer)
2915         return;
2916
2917     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2918 }
2919
2920 #if ENABLE(DATACUE_VALUE)
2921 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2922 {
2923     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2924     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2925     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2926     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2927     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2928
2929     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2930         return quickTimeUserData;
2931     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2932         return isoUserData;
2933     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2934         return quickTimeMetadata;
2935     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2936         return iTunesMetadata;
2937     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2938         return id3Metadata;
2939
2940     return emptyAtom;
2941 }
2942 #endif
2943
2944 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2945 {
2946     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2947
2948     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2949
2950 #if ENABLE(DATACUE_VALUE)
2951     if (seeking())
2952         return;
2953
2954     if (!m_metadataTrack)
2955         processMetadataTrack();
2956
2957     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2958         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2959         return;
2960     }
2961
2962     // Set the duration of all incomplete cues before adding new ones.
2963     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2964     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2965         MediaTime start = toMediaTime(item.time);
2966         if (start < earliestStartTime)
2967             earliestStartTime = start;
2968     }
2969     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2970
2971     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2972         MediaTime start = toMediaTime(item.time);
2973         MediaTime end = MediaTime::positiveInfiniteTime();
2974         if (CMTIME_IS_VALID(item.duration))
2975             end = start + toMediaTime(item.duration);
2976
2977         AtomicString type = nullAtom;
2978         if (item.keySpace)
2979             type = metadataType(item.keySpace);
2980
2981         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2982     }
2983 #endif
2984 }
2985
2986 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2987 {
2988     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2989         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2990
2991     NSArray *assetTracks = [m_avAsset tracks];
2992
2993     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2994         AVAssetTrack* assetTrack = [obj assetTrack];
2995
2996         if ([assetTracks containsObject:assetTrack])
2997             return YES;
2998
2999         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3000         if (!hasLoadedMediaSelectionGroups())
3001             return NO;
3002
3003         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3004             return NO;
3005
3006         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3007             return NO;
3008
3009         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3010             return NO;
3011
3012         return YES;
3013     }]];
3014
3015     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3016         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3017
3018     m_cachedTotalBytes = 0;
3019
3020     tracksChanged();
3021     updateStates();
3022 }
3023
3024 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3025 {
3026     m_cachedHasEnabledAudio = hasEnabledAudio;
3027
3028     tracksChanged();
3029     updateStates();
3030 }
3031
3032 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3033 {
3034     m_cachedPresentationSize = size;
3035
3036     sizeChanged();
3037     updateStates();
3038 }
3039
3040 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3041 {
3042     m_cachedDuration = duration;
3043
3044     invalidateCachedDuration();
3045 }
3046
3047 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3048 {
3049     m_cachedRate = rate;
3050
3051     updateStates();
3052     rateChanged();
3053 }
3054     
3055 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3056 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3057 {
3058     playbackTargetIsWirelessChanged();
3059 }
3060 #endif
3061
3062 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3063 {
3064     m_cachedCanPlayFastForward = newValue;
3065 }
3066
3067 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3068 {
3069     m_cachedCanPlayFastReverse = newValue;
3070 }
3071
3072 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3073 {
3074     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3075         return MediaPlayerPrivateAVFoundation::resolvedURL();
3076
3077     return URL([m_avAsset resolvedURL]);
3078 }
3079
3080 NSArray* assetMetadataKeyNames()
3081 {
3082     static NSArray* keys;
3083     if (!keys) {
3084         keys = [[NSArray alloc] initWithObjects:@"duration",
3085                     @"naturalSize",
3086                     @"preferredTransform",
3087                     @"preferredVolume",
3088                     @"preferredRate",
3089                     @"playable",
3090                     @"resolvedURL",
3091                     @"tracks",
3092                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3093                    nil];
3094     }
3095     return keys;
3096 }
3097
3098 NSArray* itemKVOProperties()
3099 {
3100     static NSArray* keys;
3101     if (!keys) {
3102         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3103                 @"status",
3104                 @"asset",
3105                 @"tracks",
3106                 @"seekableTimeRanges",
3107                 @"loadedTimeRanges",
3108                 @"playbackLikelyToKeepUp",
3109                 @"playbackBufferFull",
3110                 @"playbackBufferEmpty",
3111                 @"duration",
3112                 @"hasEnabledAudio",
3113                 @"timedMetadata",
3114                 @"canPlayFastForward",
3115                 @"canPlayFastReverse",
3116                 nil];
3117     }
3118     return keys;
3119 }
3120
3121 NSArray* assetTrackMetadataKeyNames()
3122 {
3123     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3124     return keys;
3125 }
3126
3127 NSArray* playerKVOProperties()
3128 {
3129     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3130 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3131                             @"externalPlaybackActive", @"allowsExternalPlayback",
3132 #endif
3133                             nil];
3134     return keys;
3135 }
3136 } // namespace WebCore
3137
3138 @implementation WebCoreAVFMovieObserver
3139
3140 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3141 {
3142     self = [super init];
3143     if (!self)
3144         return nil;
3145     m_callback = callback;
3146     return self;
3147 }
3148
3149 - (void)disconnect
3150 {
3151     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3152     m_callback = 0;
3153 }
3154
3155 - (void)metadataLoaded
3156 {
3157     if (!m_callback)
3158         return;
3159
3160     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3161 }
3162
3163 - (void)didEnd:(NSNotification *)unusedNotification
3164 {
3165     UNUSED_PARAM(unusedNotification);
3166     if (!m_callback)
3167         return;
3168     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3169 }
3170
3171 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3172 {
3173     UNUSED_PARAM(object);
3174     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3175
3176     if (!m_callback)
3177         return;
3178
3179     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3180
3181 #if !LOG_DISABLED
3182     if (willChange)
3183         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3184     else {
3185         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3186         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3187     }
3188 #endif
3189
3190     std::function<void ()> function;
3191
3192     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3193         if ([keyPath isEqualToString:@"readyForDisplay"])
3194             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3195     }
3196
3197     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3198         if ([keyPath isEqualToString:@"enabled"])
3199             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3200     }
3201
3202     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3203         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3204             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3205         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3206             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3207         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3208             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3209     }
3210
3211     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3212         // A value changed for an AVPlayerItem
3213         if ([keyPath isEqualToString:@"status"])
3214             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3215         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3216             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3217         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3218             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3219         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3220             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3221         else if ([keyPath isEqualToString:@"asset"])
3222             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3223         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3224             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3225         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3226             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3227         else if ([keyPath isEqualToString:@"tracks"])
3228             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3229         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3230             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3231         else if ([keyPath isEqualToString:@"presentationSize"])
3232             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3233         else if ([keyPath isEqualToString:@"duration"])
3234             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3235         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3236             MediaTime now;
3237             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3238             if (CMTIME_IS_NUMERIC(itemTime))
3239                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3240             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3241         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3242             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3243         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3244             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3245     }
3246
3247     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3248         // A value changed for an AVPlayer.
3249         if ([keyPath isEqualToString:@"rate"])
3250             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3251 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3252         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3253             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3254 #endif
3255     }
3256     
3257     if (!function)
3258         return;
3259
3260     auto weakThis = m_callback->createWeakPtr();
3261     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3262         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3263         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3264         if (!weakThis)
3265             return;
3266         function();
3267     }));
3268 }
3269
3270 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3271 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3272 {
3273     UNUSED_PARAM(output);
3274     UNUSED_PARAM(nativeSamples);
3275
3276     if (!m_callback)
3277         return;
3278
3279     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3280     RetainPtr<NSArray> strongStrings = strings;
3281     RetainPtr<NSArray> strongSamples = nativeSamples;
3282     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3283         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3284         if (!callback)
3285             return;
3286         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3287     });
3288 }
3289
3290 - (void)outputSequenceWasFlushed:(id)output
3291 {
3292     UNUSED_PARAM(output);
3293
3294     if (!m_callback)
3295         return;
3296     
3297     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3298     callOnMainThread([strongSelf] {
3299         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3300             callback->flushCues();
3301     });
3302 }
3303 #endif
3304
3305 @end
3306
3307 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3308 @implementation WebCoreAVFLoaderDelegate
3309
3310 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3311 {
3312     self = [super init];
3313     if (!self)
3314         return nil;
3315     m_callback = callback;
3316     return self;
3317 }
3318
3319 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3320 {
3321     UNUSED_PARAM(resourceLoader);
3322     if (!m_callback)
3323         return NO;
3324
3325     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3326     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3327     callOnMainThread([strongSelf, strongRequest] {
3328         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3329         if (!callback) {
3330             [strongRequest finishLoadingWithError:nil];
3331             return;
3332         }
3333
3334         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3335             [strongRequest finishLoadingWithError:nil];
3336     });
3337
3338     return YES;
3339 }
3340
3341 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3342 {
3343     UNUSED_PARAM(resourceLoader);
3344     if (!m_callback)
3345         return NO;
3346
3347     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3348         return NO;
3349
3350     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3351     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3352     callOnMainThread([strongSelf, strongChallenge] {
3353         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3354         if (!callback) {
3355             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3356             return;
3357         }
3358
3359         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3360             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3361     });
3362
3363     return YES;
3364 }
3365
3366 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3367 {
3368     UNUSED_PARAM(resourceLoader);
3369     if (!m_callback)
3370         return;
3371
3372     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3373     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3374     callOnMainThread([strongSelf, strongRequest] {
3375         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3376         if (callback)
3377             callback->didCancelLoadingRequest(strongRequest.get());
3378     });
3379 }
3380
3381 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3382 {
3383     m_callback = callback;
3384 }
3385 @end
3386 #endif
3387
3388 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3389 @implementation WebCoreAVFPullDelegate
3390 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3391 {
3392     self = [super init];
3393     if (self)
3394         m_callback = callback;
3395     return self;
3396 }
3397
3398 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3399 {
3400     m_callback = callback;
3401 }
3402
3403 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3404 {
3405     if (m_callback)
3406         m_callback->outputMediaDataWillChange(output);
3407 }
3408
3409 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3410 {
3411     UNUSED_PARAM(output);
3412     // No-op.
3413 }
3414 @end
3415 #endif
3416
3417 #endif