[iOS] cleanup AirPlay code
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioSourceProviderAVFObjC.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "Cookie.h"
38 #import "CoreMediaSoftLink.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "FloatConversion.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "OutOfBandTextTrackPrivateAVF.h"
48 #import "URL.h"
49 #import "Logging.h"
50 #import "MediaSelectionGroupAVFObjC.h"
51 #import "MediaTimeAVFoundation.h"
52 #import "PlatformTimeRanges.h"
53 #import "QuartzCoreSPI.h"
54 #import "SecurityOrigin.h"
55 #import "SerializedPlatformRepresentationMac.h"
56 #import "SoftLinking.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "UUID.h"
60 #import "VideoTrackPrivateAVFObjC.h"
61 #import "WebCoreAVFResourceLoader.h"
62 #import "WebCoreCALayerExtras.h"
63 #import "WebCoreSystemInterface.h"
64 #import <objc/runtime.h>
65 #import <runtime/DataView.h>
66 #import <runtime/JSCInlines.h>
67 #import <runtime/TypedArrayInlines.h>
68 #import <runtime/Uint16Array.h>
69 #import <runtime/Uint32Array.h>
70 #import <runtime/Uint8Array.h>
71 #import <wtf/CurrentTime.h>
72 #import <wtf/Functional.h>
73 #import <wtf/ListHashSet.h>
74 #import <wtf/NeverDestroyed.h>
75 #import <wtf/text/CString.h>
76 #import <wtf/text/StringBuilder.h>
77
78 #if ENABLE(AVF_CAPTIONS)
79 #include "TextTrack.h"
80 #endif
81
82 #import <AVFoundation/AVFoundation.h>
83 #if PLATFORM(IOS)
84 #import "WAKAppKitStubs.h"
85 #import <CoreImage/CoreImage.h>
86 #import <mach/mach_port.h>
87 #else
88 #import <Foundation/NSGeometry.h>
89 #import <QuartzCore/CoreImage.h>
90 #endif
91
92 #if USE(VIDEOTOOLBOX)
93 #import <CoreVideo/CoreVideo.h>
94 #import <VideoToolbox/VideoToolbox.h>
95 #endif
96
97 #if USE(CFNETWORK)
98 #include "CFNSURLConnectionSPI.h"
99 #endif
100
101 namespace std {
102 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
103     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
104 };
105 }
106
107 @interface WebVideoContainerLayer : CALayer
108 @end
109
110 @implementation WebVideoContainerLayer
111
112 - (void)setBounds:(CGRect)bounds
113 {
114     [super setBounds:bounds];
115     for (CALayer* layer in self.sublayers)
116         layer.frame = bounds;
117 }
118 @end
119
120 #if ENABLE(AVF_CAPTIONS)
121 // Note: This must be defined before our SOFT_LINK macros:
122 @class AVMediaSelectionOption;
123 @interface AVMediaSelectionOption (OutOfBandExtensions)
124 @property (nonatomic, readonly) NSString* outOfBandSource;
125 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
126 @end
127 #endif
128
129 #if PLATFORM(IOS)
130 @class AVPlayerItem;
131 @interface AVPlayerItem (WebKitExtensions)
132 @property (nonatomic, copy) NSString* dataYouTubeID;
133 @end
134 #endif
135
136 @interface AVURLAsset (WebKitExtensions)
137 @property (nonatomic, readonly) NSURL *resolvedURL;
138 @end
139
140 typedef AVPlayer AVPlayerType;
141 typedef AVPlayerItem AVPlayerItemType;
142 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
143 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
144 typedef AVMetadataItem AVMetadataItemType;
145 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
146 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
147
148 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
149 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
150 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
151 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
152
153 #if USE(VIDEOTOOLBOX)
154 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
155 #endif
156
157 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
158 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
159 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
160 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
161 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
162 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
163 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
164 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
165
166 #if USE(VIDEOTOOLBOX)
167 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
168 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
169 #endif
170
171 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
172 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
173 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
174 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
175 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
176 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
177 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
178
179 SOFT_LINK_CLASS(CoreImage, CIContext)
180 SOFT_LINK_CLASS(CoreImage, CIImage)
181
182 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
195 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
196
197 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
198
199 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
200
201 #define AVPlayer getAVPlayerClass()
202 #define AVPlayerItem getAVPlayerItemClass()
203 #define AVPlayerLayer getAVPlayerLayerClass()
204 #define AVURLAsset getAVURLAssetClass()
205 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
206 #define AVMetadataItem getAVMetadataItemClass()
207
208 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
209 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
210 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
211 #define AVMediaTypeVideo getAVMediaTypeVideo()
212 #define AVMediaTypeAudio getAVMediaTypeAudio()
213 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
214 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
215 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
216 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
217 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
218 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
219 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
220 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
221 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
222 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
223
224 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
225 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
226 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
227
228 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
229 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
230 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
231
232 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
233 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
234 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
235 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
236
237 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
238 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
239 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
240 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
241 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
242 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
243 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
244 #endif
245
246 #if ENABLE(AVF_CAPTIONS)
247 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
257
258 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
259 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
260 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
261 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
262 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
263 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
264 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
265 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
266 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
267 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
268 #endif
269
270 #if ENABLE(DATACUE_VALUE)
271 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
272 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
273 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
274 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
276
277 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
278 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
279 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
280 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
281 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
282 #endif
283
284 #if PLATFORM(IOS)
285 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
286
287 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
288 #endif
289
290 #define kCMTimeZero getkCMTimeZero()
291
292 using namespace WebCore;
293
294 enum MediaPlayerAVFoundationObservationContext {
295     MediaPlayerAVFoundationObservationContextPlayerItem,
296     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
297     MediaPlayerAVFoundationObservationContextPlayer,
298     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
299 };
300
301 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
302 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
303 #else
304 @interface WebCoreAVFMovieObserver : NSObject
305 #endif
306 {
307     MediaPlayerPrivateAVFoundationObjC* m_callback;
308     int m_delayCallbacks;
309 }
310 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
311 -(void)disconnect;
312 -(void)metadataLoaded;
313 -(void)didEnd:(NSNotification *)notification;
314 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
315 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
316 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
317 - (void)outputSequenceWasFlushed:(id)output;
318 #endif
319 @end
320
321 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
322 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
323     MediaPlayerPrivateAVFoundationObjC* m_callback;
324 }
325 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
326 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
327 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
328 @end
329 #endif
330
331 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
332 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
333     MediaPlayerPrivateAVFoundationObjC *m_callback;
334     dispatch_semaphore_t m_semaphore;
335 }
336 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
337 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
338 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
339 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
340 @end
341 #endif
342
343 namespace WebCore {
344
345 static NSArray *assetMetadataKeyNames();
346 static NSArray *itemKVOProperties();
347 static NSArray* assetTrackMetadataKeyNames();
348
349 #if !LOG_DISABLED
350 static const char *boolString(bool val)
351 {
352     return val ? "true" : "false";
353 }
354 #endif
355
356 #if ENABLE(ENCRYPTED_MEDIA_V2)
357 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
358 static PlayerToPrivateMapType& playerToPrivateMap()
359 {
360     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
361     return map;
362 };
363 #endif
364
365 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
366 static dispatch_queue_t globalLoaderDelegateQueue()
367 {
368     static dispatch_queue_t globalQueue;
369     static dispatch_once_t onceToken;
370     dispatch_once(&onceToken, ^{
371         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
372     });
373     return globalQueue;
374 }
375 #endif
376
377 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
378 static dispatch_queue_t globalPullDelegateQueue()
379 {
380     static dispatch_queue_t globalQueue;
381     static dispatch_once_t onceToken;
382     dispatch_once(&onceToken, ^{
383         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
384     });
385     return globalQueue;
386 }
387 #endif
388
389 #if USE(CFNETWORK)
390 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
391 public:
392     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
393     {
394         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
395     }
396
397     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
398     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
399
400 private:
401     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
402         : m_challenge(challenge)
403     {
404         ASSERT(m_challenge);
405     }
406
407     virtual void refAuthenticationClient() override { ref(); }
408     virtual void derefAuthenticationClient() override { deref(); }
409
410     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential)
411     {
412         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
413     }
414
415     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
416     {
417         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
418     }
419
420     virtual void receivedCancellation(const AuthenticationChallenge&)
421     {
422         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
423     }
424
425     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
426     {
427         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
428             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
429     }
430
431     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
432     {
433         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
434             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
435     }
436
437     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
438 };
439 #endif
440
441 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
442 {
443     if (isAvailable())
444         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
445             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
446 }
447
448 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
449     : MediaPlayerPrivateAVFoundation(player)
450     , m_weakPtrFactory(this)
451 #if PLATFORM(IOS)
452     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
453 #endif
454     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
455     , m_videoFrameHasDrawn(false)
456     , m_haveCheckedPlayability(false)
457 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
458     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
459     , m_videoOutputSemaphore(nullptr)
460 #endif
461 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
462     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
463 #endif
464     , m_currentTextTrack(0)
465     , m_cachedRate(0)
466     , m_cachedTotalBytes(0)
467     , m_pendingStatusChanges(0)
468     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
469     , m_cachedLikelyToKeepUp(false)
470     , m_cachedBufferEmpty(false)
471     , m_cachedBufferFull(false)
472     , m_cachedHasEnabledAudio(false)
473     , m_shouldBufferData(true)
474     , m_cachedIsReadyForDisplay(false)
475     , m_haveBeenAskedToCreateLayer(false)
476 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
477     , m_allowsWirelessVideoPlayback(true)
478 #endif
479 {
480 #if ENABLE(ENCRYPTED_MEDIA_V2)
481     playerToPrivateMap().set(player, this);
482 #endif
483 }
484
485 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
486 {
487 #if ENABLE(ENCRYPTED_MEDIA_V2)
488     playerToPrivateMap().remove(player());
489 #endif
490 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
491     [m_loaderDelegate.get() setCallback:0];
492     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
493
494     for (auto& pair : m_resourceLoaderMap)
495         pair.value->invalidate();
496 #endif
497 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
498     [m_videoOutputDelegate setCallback:0];
499     [m_videoOutput setDelegate:nil queue:0];
500     if (m_videoOutputSemaphore)
501         dispatch_release(m_videoOutputSemaphore);
502 #endif
503
504     if (m_videoLayer)
505         destroyVideoLayer();
506
507     cancelLoad();
508 }
509
510 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
511 {
512     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
513     tearDownVideoRendering();
514
515     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
516     [m_objcObserver.get() disconnect];
517
518     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
519     setIgnoreLoadStateChanges(true);
520     if (m_avAsset) {
521         [m_avAsset.get() cancelLoading];
522         m_avAsset = nil;
523     }
524
525     clearTextTracks();
526
527 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
528     if (m_legibleOutput) {
529         if (m_avPlayerItem)
530             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
531         m_legibleOutput = nil;
532     }
533 #endif
534
535     if (m_avPlayerItem) {
536         for (NSString *keyName in itemKVOProperties())
537             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
538         
539         m_avPlayerItem = nil;
540     }
541     if (m_avPlayer) {
542         if (m_timeObserver)
543             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
544         m_timeObserver = nil;
545         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
546 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
547         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
548 #endif
549         m_avPlayer = nil;
550     }
551
552     // Reset cached properties
553     m_pendingStatusChanges = 0;
554     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
555     m_cachedSeekableRanges = nullptr;
556     m_cachedLoadedRanges = nullptr;
557     m_cachedHasEnabledAudio = false;
558     m_cachedPresentationSize = FloatSize();
559     m_cachedDuration = MediaTime::zeroTime();
560
561     for (AVPlayerItemTrack *track in m_cachedTracks.get())
562         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
563     m_cachedTracks = nullptr;
564
565 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
566     if (m_provider)
567         m_provider->setPlayerItem(nullptr);
568 #endif
569
570     setIgnoreLoadStateChanges(false);
571 }
572
573 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
574 {
575     return m_haveBeenAskedToCreateLayer;
576 }
577
578 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
579 {
580 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
581     if (m_videoOutput)
582         return true;
583 #endif
584     return m_imageGenerator;
585 }
586
587 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
588 {
589 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
590     createVideoOutput();
591 #else
592     createImageGenerator();
593 #endif
594 }
595
596 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
597 {
598     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
599
600     if (!m_avAsset || m_imageGenerator)
601         return;
602
603     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
604
605     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
606     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
607     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
608     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
609
610     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
611 }
612
613 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
614 {
615 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
616     destroyVideoOutput();
617 #endif
618     destroyImageGenerator();
619 }
620
621 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
622 {
623     if (!m_imageGenerator)
624         return;
625
626     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
627
628     m_imageGenerator = 0;
629 }
630
631 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
632 {
633     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
634         return;
635
636     auto weakThis = createWeakPtr();
637     callOnMainThread([this, weakThis] {
638         if (!weakThis)
639             return;
640
641         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
642             return;
643         m_haveBeenAskedToCreateLayer = true;
644
645         if (!m_videoLayer)
646             createAVPlayerLayer();
647
648 #if USE(VIDEOTOOLBOX)
649         if (!m_videoOutput)
650             createVideoOutput();
651 #endif
652
653         player()->client().mediaPlayerRenderingModeChanged(player());
654     });
655 }
656
657 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
658 {
659     if (!m_avPlayer)
660         return;
661
662     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
663     [m_videoLayer setPlayer:m_avPlayer.get()];
664     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
665 #ifndef NDEBUG
666     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
667 #endif
668     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
669     updateVideoLayerGravity();
670     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
671     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
672     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
673
674 #if PLATFORM(IOS)
675     [m_videoLayer web_disableAllActions];
676     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
677 #ifndef NDEBUG
678     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
679 #endif
680     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
681     if (m_videoFullscreenLayer) {
682         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
683         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
684     } else {
685         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
686         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
687     }
688 #else
689     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
690 #endif
691 }
692
693 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
694 {
695     if (!m_videoLayer)
696         return;
697
698     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
699
700     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
701     [m_videoLayer.get() setPlayer:nil];
702
703 #if PLATFORM(IOS)
704     if (m_videoFullscreenLayer)
705         [m_videoLayer removeFromSuperlayer];
706     m_videoInlineLayer = nil;
707 #endif
708
709     m_videoLayer = nil;
710 }
711
712 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
713 {
714     if (currentRenderingMode() == MediaRenderingToLayer)
715         return m_cachedIsReadyForDisplay;
716
717     return m_videoFrameHasDrawn;
718 }
719
720 #if ENABLE(AVF_CAPTIONS)
721 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
722 {
723     // FIXME: Match these to correct types:
724     if (kind == PlatformTextTrack::Caption)
725         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
726
727     if (kind == PlatformTextTrack::Subtitle)
728         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
729
730     if (kind == PlatformTextTrack::Description)
731         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
732
733     if (kind == PlatformTextTrack::Forced)
734         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
735
736     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
737 }
738     
739 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
740 {
741     trackModeChanged();
742 }
743     
744 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
745 {
746     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
747     
748     for (auto& textTrack : m_textTracks) {
749         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
750             continue;
751         
752         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
753         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
754         
755         for (auto& track : outOfBandTrackSources) {
756             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
757             
758             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
759                 continue;
760             
761             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
762             if (track->mode() == PlatformTextTrack::Hidden)
763                 mode = InbandTextTrackPrivate::Hidden;
764             else if (track->mode() == PlatformTextTrack::Disabled)
765                 mode = InbandTextTrackPrivate::Disabled;
766             else if (track->mode() == PlatformTextTrack::Showing)
767                 mode = InbandTextTrackPrivate::Showing;
768             
769             textTrack->setMode(mode);
770             break;
771         }
772     }
773 }
774 #endif
775
776
777 static NSURL *canonicalURL(const String& url)
778 {
779     NSURL *cocoaURL = URL(ParsedURLString, url);
780     if (url.isEmpty())
781         return cocoaURL;
782
783     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
784     if (!request)
785         return cocoaURL;
786
787     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
788     if (!canonicalRequest)
789         return cocoaURL;
790
791     return [canonicalRequest URL];
792 }
793
794 #if PLATFORM(IOS)
795 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
796 {
797     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
798     [properties setDictionary:@{
799         NSHTTPCookieName: cookie.name,
800         NSHTTPCookieValue: cookie.value,
801         NSHTTPCookieDomain: cookie.domain,
802         NSHTTPCookiePath: cookie.path,
803         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
804     }];
805     if (cookie.secure)
806         [properties setObject:@YES forKey:NSHTTPCookieSecure];
807     if (cookie.session)
808         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
809
810     return [NSHTTPCookie cookieWithProperties:properties.get()];
811 }
812 #endif
813
814 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
815 {
816     if (m_avAsset)
817         return;
818
819     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
820
821     setDelayCallbacks(true);
822
823     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
824
825     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
826
827     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
828
829     String referrer = player()->referrer();
830     if (!referrer.isEmpty())
831         [headerFields.get() setObject:referrer forKey:@"Referer"];
832
833     String userAgent = player()->userAgent();
834     if (!userAgent.isEmpty())
835         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
836
837     if ([headerFields.get() count])
838         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
839
840     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
841         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
842
843     String identifier = player()->sourceApplicationIdentifier();
844     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
845         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
846
847 #if ENABLE(AVF_CAPTIONS)
848     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
849     if (!outOfBandTrackSources.isEmpty()) {
850         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
851         for (auto& trackSource : outOfBandTrackSources) {
852             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
853             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
854             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
855             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
856             [outOfBandTracks.get() addObject:@{
857                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
858                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
859                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
860                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
861                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
862                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
863             }];
864         }
865
866         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
867     }
868 #endif
869
870 #if PLATFORM(IOS)
871     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
872     if (!networkInterfaceName.isEmpty())
873         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
874 #endif
875
876 #if PLATFORM(IOS)
877     Vector<Cookie> cookies;
878     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
879         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
880         for (auto& cookie : cookies)
881             [nsCookies addObject:toNSHTTPCookie(cookie)];
882
883         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
884     }
885 #endif
886
887     NSURL *cocoaURL = canonicalURL(url);
888     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
889
890 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
891     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
892 #endif
893
894     m_haveCheckedPlayability = false;
895
896     setDelayCallbacks(false);
897 }
898
899 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
900 {
901     if (!m_avPlayer)
902         return;
903
904     if (pthread_main_np()) {
905         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
906         return;
907     }
908
909     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
910     RetainPtr<AVPlayerItemType> strongItem = item;
911     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
912         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
913     });
914 }
915
916 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
917 {
918     if (m_avPlayer)
919         return;
920
921     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
922
923     setDelayCallbacks(true);
924
925     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
926     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
927 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
928     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
929     updateDisableExternalPlayback();
930 #endif
931
932 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
933     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
934 #endif
935
936 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
937     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
938 #endif
939
940     if (player()->client().mediaPlayerIsVideo())
941         createAVPlayerLayer();
942
943     if (m_avPlayerItem)
944         setAVPlayerItem(m_avPlayerItem.get());
945
946     setDelayCallbacks(false);
947 }
948
949 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
950 {
951     if (m_avPlayerItem)
952         return;
953
954     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
955
956     setDelayCallbacks(true);
957
958     // Create the player item so we can load media data. 
959     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
960
961     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
962
963     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
964     for (NSString *keyName in itemKVOProperties())
965         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
966
967     if (m_avPlayer)
968         setAVPlayerItem(m_avPlayerItem.get());
969
970 #if PLATFORM(IOS)
971     AtomicString value;
972     if (player()->doesHaveAttribute("data-youtube-id", &value))
973         [m_avPlayerItem.get() setDataYouTubeID: value];
974  #endif
975
976 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
977     const NSTimeInterval legibleOutputAdvanceInterval = 2;
978
979     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
980     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
981     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
982
983     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
984     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
985     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
986     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
987 #endif
988
989 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
990     if (m_provider)
991         m_provider->setPlayerItem(m_avPlayerItem.get());
992 #endif
993
994     setDelayCallbacks(false);
995 }
996
997 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
998 {
999     if (m_haveCheckedPlayability)
1000         return;
1001     m_haveCheckedPlayability = true;
1002
1003     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1004     auto weakThis = createWeakPtr();
1005
1006     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1007         callOnMainThread([weakThis] {
1008             if (weakThis)
1009                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1010         });
1011     }];
1012 }
1013
1014 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1015 {
1016     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1017
1018     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1019     dispatch_group_enter(metadataLoadingGroup);
1020     auto weakThis = createWeakPtr();
1021     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1022
1023         callOnMainThread([weakThis, metadataLoadingGroup] {
1024             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1025                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1026                     dispatch_group_enter(metadataLoadingGroup);
1027                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1028                         dispatch_group_leave(metadataLoadingGroup);
1029                     }];
1030                 }
1031             }
1032             dispatch_group_leave(metadataLoadingGroup);
1033         });
1034     }];
1035
1036     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1037         callOnMainThread([weakThis] {
1038             if (weakThis)
1039                 [weakThis->m_objcObserver.get() metadataLoaded];
1040         });
1041
1042         dispatch_release(metadataLoadingGroup);
1043     });
1044 }
1045
1046 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1047 {
1048     if (!m_avPlayerItem)
1049         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1050
1051     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1052         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1053     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1054         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1055     if (m_cachedLikelyToKeepUp)
1056         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1057     if (m_cachedBufferFull)
1058         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1059     if (m_cachedBufferEmpty)
1060         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1061
1062     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1063 }
1064
1065 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1066 {
1067     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1068     PlatformMedia pm;
1069     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1070     pm.media.avfMediaPlayer = m_avPlayer.get();
1071     return pm;
1072 }
1073
1074 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1075 {
1076 #if PLATFORM(IOS)
1077     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1078 #else
1079     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1080 #endif
1081 }
1082
1083 #if PLATFORM(IOS)
1084 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1085 {
1086     if (m_videoFullscreenLayer == videoFullscreenLayer)
1087         return;
1088
1089     m_videoFullscreenLayer = videoFullscreenLayer;
1090
1091     [CATransaction begin];
1092     [CATransaction setDisableActions:YES];
1093     
1094     CALayer *oldRootLayer = videoFullscreenLayer;
1095     while (oldRootLayer.superlayer)
1096         oldRootLayer = oldRootLayer.superlayer;
1097
1098     CALayer *newRootLayer = nil;
1099     
1100     if (m_videoFullscreenLayer && m_videoLayer) {
1101         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1102         [m_videoLayer removeFromSuperlayer];
1103         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1104         newRootLayer = m_videoFullscreenLayer.get();
1105     } else if (m_videoInlineLayer && m_videoLayer) {
1106         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1107         [m_videoLayer removeFromSuperlayer];
1108         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1109         newRootLayer = m_videoInlineLayer.get();
1110     } else if (m_videoLayer)
1111         [m_videoLayer removeFromSuperlayer];
1112
1113     while (newRootLayer.superlayer)
1114         newRootLayer = newRootLayer.superlayer;
1115
1116     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1117         mach_port_t fencePort = 0;
1118         for (CAContext *context in [CAContext allContexts]) {
1119             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1120                 if (!fencePort)
1121                     fencePort = [context createFencePort];
1122                 else
1123                     [context setFencePort:fencePort];
1124             }
1125         }
1126         mach_port_deallocate(mach_task_self(), fencePort);
1127     }
1128     [CATransaction commit];
1129
1130     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1131         syncTextTrackBounds();
1132         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1133     }
1134 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1135     updateDisableExternalPlayback();
1136 #endif
1137 }
1138
1139 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1140 {
1141     m_videoFullscreenFrame = frame;
1142     if (!m_videoFullscreenLayer)
1143         return;
1144
1145     if (m_videoLayer) {
1146         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1147         [CATransaction begin];
1148         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1149         [CATransaction commit];
1150         [m_videoLayer web_disableAllActions];
1151     }
1152     syncTextTrackBounds();
1153 }
1154
1155 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1156 {
1157     m_videoFullscreenGravity = gravity;
1158     if (!m_videoLayer)
1159         return;
1160
1161     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1162     if (gravity == MediaPlayer::VideoGravityResize)
1163         videoGravity = AVLayerVideoGravityResize;
1164     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1165         videoGravity = AVLayerVideoGravityResizeAspect;
1166     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1167         videoGravity = AVLayerVideoGravityResizeAspectFill;
1168     else
1169         ASSERT_NOT_REACHED();
1170
1171     [m_videoLayer setVideoGravity:videoGravity];
1172 }
1173
1174 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1175 {
1176     if (m_currentMetaData)
1177         return m_currentMetaData.get();
1178     return nil;
1179 }
1180
1181 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1182 {
1183     if (!m_avPlayerItem)
1184         return emptyString();
1185     
1186     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1187     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1188
1189     return logString.get();
1190 }
1191
1192 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1193 {
1194     if (!m_avPlayerItem)
1195         return emptyString();
1196
1197     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1198     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1199
1200     return logString.get();
1201 }
1202 #endif
1203
1204 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1205 {
1206     [CATransaction begin];
1207     [CATransaction setDisableActions:YES];    
1208     if (m_videoLayer)
1209         [m_videoLayer.get() setHidden:!isVisible];
1210     [CATransaction commit];
1211 }
1212     
1213 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1214 {
1215     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1216     if (!metaDataAvailable())
1217         return;
1218
1219     setDelayCallbacks(true);
1220     m_cachedRate = requestedRate();
1221     [m_avPlayer.get() setRate:requestedRate()];
1222     setDelayCallbacks(false);
1223 }
1224
1225 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1226 {
1227     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1228     if (!metaDataAvailable())
1229         return;
1230
1231     setDelayCallbacks(true);
1232     m_cachedRate = 0;
1233     [m_avPlayer.get() setRate:0];
1234     setDelayCallbacks(false);
1235 }
1236
1237 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1238 {
1239     // Do not ask the asset for duration before it has been loaded or it will fetch the
1240     // answer synchronously.
1241     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1242         return MediaTime::invalidTime();
1243     
1244     CMTime cmDuration;
1245     
1246     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1247     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1248         cmDuration = [m_avPlayerItem.get() duration];
1249     else
1250         cmDuration = [m_avAsset.get() duration];
1251
1252     if (CMTIME_IS_NUMERIC(cmDuration))
1253         return toMediaTime(cmDuration);
1254
1255     if (CMTIME_IS_INDEFINITE(cmDuration))
1256         return MediaTime::positiveInfiniteTime();
1257
1258     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1259     return MediaTime::invalidTime();
1260 }
1261
1262 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1263 {
1264     if (!metaDataAvailable() || !m_avPlayerItem)
1265         return MediaTime::zeroTime();
1266
1267     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1268     if (CMTIME_IS_NUMERIC(itemTime))
1269         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1270
1271     return MediaTime::zeroTime();
1272 }
1273
1274 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1275 {
1276     // setCurrentTime generates several event callbacks, update afterwards.
1277     setDelayCallbacks(true);
1278
1279     if (m_metadataTrack)
1280         m_metadataTrack->flushPartialCues();
1281
1282     CMTime cmTime = toCMTime(time);
1283     CMTime cmBefore = toCMTime(negativeTolerance);
1284     CMTime cmAfter = toCMTime(positiveTolerance);
1285
1286     auto weakThis = createWeakPtr();
1287
1288     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1289         callOnMainThread([weakThis, finished] {
1290             auto _this = weakThis.get();
1291             if (!_this)
1292                 return;
1293
1294             _this->seekCompleted(finished);
1295         });
1296     }];
1297
1298     setDelayCallbacks(false);
1299 }
1300
1301 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1302 {
1303 #if PLATFORM(IOS)
1304     UNUSED_PARAM(volume);
1305     return;
1306 #else
1307     if (!metaDataAvailable())
1308         return;
1309
1310     [m_avPlayer.get() setVolume:volume];
1311 #endif
1312 }
1313
1314 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1315 {
1316     UNUSED_PARAM(closedCaptionsVisible);
1317
1318     if (!metaDataAvailable())
1319         return;
1320
1321     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1322 }
1323
1324 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1325
1326 {
1327     setDelayCallbacks(true);
1328     m_cachedRate = rate;
1329     [m_avPlayer.get() setRate:rate];
1330     setDelayCallbacks(false);
1331 }
1332
1333 double MediaPlayerPrivateAVFoundationObjC::rate() const
1334 {
1335     if (!metaDataAvailable())
1336         return 0;
1337
1338     return m_cachedRate;
1339 }
1340
1341 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1342 {
1343     auto timeRanges = PlatformTimeRanges::create();
1344
1345     if (!m_avPlayerItem)
1346         return timeRanges;
1347
1348     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1349         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1350         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1351             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1352     }
1353     return timeRanges;
1354 }
1355
1356 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1357 {
1358     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1359         return MediaTime::zeroTime();
1360
1361     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1362     bool hasValidRange = false;
1363     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1364         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1365         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1366             continue;
1367
1368         hasValidRange = true;
1369         MediaTime startOfRange = toMediaTime(timeRange.start);
1370         if (minTimeSeekable > startOfRange)
1371             minTimeSeekable = startOfRange;
1372     }
1373     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1374 }
1375
1376 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1377 {
1378     if (!m_cachedSeekableRanges)
1379         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1380
1381     MediaTime maxTimeSeekable;
1382     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1383         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1384         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1385             continue;
1386         
1387         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1388         if (maxTimeSeekable < endOfRange)
1389             maxTimeSeekable = endOfRange;
1390     }
1391     return maxTimeSeekable;
1392 }
1393
1394 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1395 {
1396     if (!m_cachedLoadedRanges)
1397         return MediaTime::zeroTime();
1398
1399     MediaTime maxTimeLoaded;
1400     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1401         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1402         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1403             continue;
1404         
1405         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1406         if (maxTimeLoaded < endOfRange)
1407             maxTimeLoaded = endOfRange;
1408     }
1409
1410     return maxTimeLoaded;   
1411 }
1412
1413 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1414 {
1415     if (!metaDataAvailable())
1416         return 0;
1417
1418     if (m_cachedTotalBytes)
1419         return m_cachedTotalBytes;
1420
1421     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1422         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1423
1424     return m_cachedTotalBytes;
1425 }
1426
1427 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1428 {
1429     m_avAsset = asset;
1430 }
1431
1432 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1433 {
1434     if (!m_avAsset)
1435         return MediaPlayerAVAssetStatusDoesNotExist;
1436
1437     for (NSString *keyName in assetMetadataKeyNames()) {
1438         NSError *error = nil;
1439         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1440 #if !LOG_DISABLED
1441         if (error)
1442             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1443 #endif
1444
1445         if (keyStatus < AVKeyValueStatusLoaded)
1446             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1447         
1448         if (keyStatus == AVKeyValueStatusFailed)
1449             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1450
1451         if (keyStatus == AVKeyValueStatusCancelled)
1452             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1453     }
1454
1455     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1456         return MediaPlayerAVAssetStatusPlayable;
1457
1458     return MediaPlayerAVAssetStatusLoaded;
1459 }
1460
1461 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1462 {
1463     if (!m_avAsset)
1464         return 0;
1465
1466     NSError *error = nil;
1467     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1468     return [error code];
1469 }
1470
1471 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1472 {
1473     if (!metaDataAvailable() || context->paintingDisabled())
1474         return;
1475
1476     setDelayCallbacks(true);
1477     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1478
1479 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1480     if (videoOutputHasAvailableFrame())
1481         paintWithVideoOutput(context, rect);
1482     else
1483 #endif
1484         paintWithImageGenerator(context, rect);
1485
1486     END_BLOCK_OBJC_EXCEPTIONS;
1487     setDelayCallbacks(false);
1488
1489     m_videoFrameHasDrawn = true;
1490 }
1491
1492 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1493 {
1494     if (!metaDataAvailable() || context->paintingDisabled())
1495         return;
1496
1497     // We can ignore the request if we are already rendering to a layer.
1498     if (currentRenderingMode() == MediaRenderingToLayer)
1499         return;
1500
1501     // paint() is best effort, so only paint if we already have an image generator or video output available.
1502     if (!hasContextRenderer())
1503         return;
1504
1505     paintCurrentFrameInContext(context, rect);
1506 }
1507
1508 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1509 {
1510     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1511
1512     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1513     if (image) {
1514         GraphicsContextStateSaver stateSaver(*context);
1515         context->translate(rect.x(), rect.y() + rect.height());
1516         context->scale(FloatSize(1.0f, -1.0f));
1517         context->setImageInterpolationQuality(InterpolationLow);
1518         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1519         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1520         image = 0;
1521     }
1522 }
1523
1524 static HashSet<String> mimeTypeCache()
1525 {
1526     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1527     static bool typeListInitialized = false;
1528
1529     if (typeListInitialized)
1530         return cache;
1531     typeListInitialized = true;
1532
1533     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1534     for (NSString *mimeType in types)
1535         cache.add([mimeType lowercaseString]);
1536
1537     return cache;
1538
1539
1540 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1541 {
1542     if (!m_imageGenerator)
1543         createImageGenerator();
1544     ASSERT(m_imageGenerator);
1545
1546 #if !LOG_DISABLED
1547     double start = monotonicallyIncreasingTime();
1548 #endif
1549
1550     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1551     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1552     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1553
1554 #if !LOG_DISABLED
1555     double duration = monotonicallyIncreasingTime() - start;
1556     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1557 #endif
1558
1559     return image;
1560 }
1561
1562 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1563 {
1564     supportedTypes = mimeTypeCache();
1565
1566
1567 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1568 static bool keySystemIsSupported(const String& keySystem)
1569 {
1570     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1571         return true;
1572     return false;
1573 }
1574 #endif
1575
1576 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1577 {
1578 #if ENABLE(ENCRYPTED_MEDIA)
1579     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1580     // In addition to the steps in the current specification, this method must run the following steps:
1581
1582     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1583     //    If keySystem is null, continue to the next step.
1584     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1585         // "Clear Key" is only supported with HLS:
1586         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1587             return MediaPlayer::IsNotSupported;
1588
1589         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1590         if (!keySystemIsSupported(parameters.keySystem))
1591             return MediaPlayer::IsNotSupported;
1592
1593         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1594         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1595     }
1596
1597     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1598 #endif
1599
1600 #if ENABLE(MEDIA_SOURCE)
1601     if (parameters.isMediaSource)
1602         return MediaPlayer::IsNotSupported;
1603 #endif
1604
1605     if (!mimeTypeCache().contains(parameters.type))
1606         return MediaPlayer::IsNotSupported;
1607
1608     // The spec says:
1609     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1610     if (parameters.codecs.isEmpty())
1611         return MediaPlayer::MayBeSupported;
1612
1613     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1614     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1615 }
1616
1617 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1618 {
1619 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1620     if (!keySystem.isEmpty()) {
1621         // "Clear Key" is only supported with HLS:
1622         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1623             return MediaPlayer::IsNotSupported;
1624
1625         if (!keySystemIsSupported(keySystem))
1626             return false;
1627
1628         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1629             return false;
1630
1631         return true;
1632     }
1633 #else
1634     UNUSED_PARAM(keySystem);
1635     UNUSED_PARAM(mimeType);
1636 #endif
1637     return false;
1638 }
1639
1640 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1641 #if ENABLE(ENCRYPTED_MEDIA_V2)
1642 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1643 {
1644     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1645         [infoRequest setContentLength:keyData->byteLength()];
1646         [infoRequest setByteRangeAccessSupported:YES];
1647     }
1648
1649     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1650         long long start = [dataRequest currentOffset];
1651         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1652
1653         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1654             [request finishLoadingWithError:nil];
1655             return;
1656         }
1657
1658         ASSERT(start <= std::numeric_limits<int>::max());
1659         ASSERT(end <= std::numeric_limits<int>::max());
1660         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1661         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1662         [dataRequest respondWithData:nsData.get()];
1663     }
1664
1665     [request finishLoading];
1666 }
1667 #endif
1668
1669 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1670 {
1671     String scheme = [[[avRequest request] URL] scheme];
1672     String keyURI = [[[avRequest request] URL] absoluteString];
1673
1674 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1675     if (scheme == "skd") {
1676         // Create an initData with the following layout:
1677         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1678         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1679         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1680         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1681         initDataView->set<uint32_t>(0, keyURISize, true);
1682
1683         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1684         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1685
1686 #if ENABLE(ENCRYPTED_MEDIA)
1687         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1688 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1689         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1690         if (!player()->keyNeeded(initData.get()))
1691 #endif
1692             return false;
1693
1694         m_keyURIToRequestMap.set(keyURI, avRequest);
1695         return true;
1696 #if ENABLE(ENCRYPTED_MEDIA_V2)
1697     } else if (scheme == "clearkey") {
1698         String keyID = [[[avRequest request] URL] resourceSpecifier];
1699         StringView keyIDView(keyID);
1700         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1701
1702         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1703         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1704
1705         auto keyData = player()->cachedKeyForKeyId(keyID);
1706         if (keyData) {
1707             fulfillRequestWithKeyData(avRequest, keyData.get());
1708             return false;
1709         }
1710
1711         if (!player()->keyNeeded(initData.get()))
1712             return false;
1713
1714         m_keyURIToRequestMap.set(keyID, avRequest);
1715         return true;
1716 #endif
1717     }
1718 #endif
1719
1720     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1721     m_resourceLoaderMap.add(avRequest, resourceLoader);
1722     resourceLoader->startLoading();
1723     return true;
1724 }
1725
1726 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1727 {
1728 #if USE(CFNETWORK)
1729     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1730     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1731     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1732 #else
1733     AuthenticationChallenge challenge(nsChallenge);
1734 #endif
1735
1736     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1737 }
1738
1739 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1740 {
1741     String scheme = [[[avRequest request] URL] scheme];
1742
1743     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1744
1745     if (resourceLoader)
1746         resourceLoader->stopLoading();
1747 }
1748
1749 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1750 {
1751     m_resourceLoaderMap.remove(avRequest);
1752 }
1753 #endif
1754
1755 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1756 {
1757     return AVFoundationLibrary() && CoreMediaLibrary();
1758 }
1759
1760 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1761 {
1762     if (!metaDataAvailable())
1763         return timeValue;
1764
1765     // FIXME - impossible to implement until rdar://8721510 is fixed.
1766     return timeValue;
1767 }
1768
1769 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1770 {
1771 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1772     return 0;
1773 #else
1774     return 5;
1775 #endif
1776 }
1777
1778 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1779 {
1780     if (!m_videoLayer)
1781         return;
1782
1783 #if PLATFORM(IOS)
1784     // Do not attempt to change the video gravity while in full screen mode.
1785     // See setVideoFullscreenGravity().
1786     if (m_videoFullscreenLayer)
1787         return;
1788 #endif
1789
1790     [CATransaction begin];
1791     [CATransaction setDisableActions:YES];    
1792     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1793     [m_videoLayer.get() setVideoGravity:gravity];
1794     [CATransaction commit];
1795 }
1796
1797 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1798 {
1799     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1800         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1801     }];
1802     if (index == NSNotFound)
1803         return nil;
1804     return [tracks objectAtIndex:index];
1805 }
1806
1807 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1808 {
1809     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1810     m_languageOfPrimaryAudioTrack = String();
1811
1812     if (!m_avAsset)
1813         return;
1814
1815     setDelayCharacteristicsChangedNotification(true);
1816
1817     bool haveCCTrack = false;
1818     bool hasCaptions = false;
1819
1820     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1821     // asked about those fairly fequently.
1822     if (!m_avPlayerItem) {
1823         // We don't have a player item yet, so check with the asset because some assets support inspection
1824         // prior to becoming ready to play.
1825         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1826         setHasVideo(firstEnabledVideoTrack);
1827         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1828 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1829         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1830 #endif
1831
1832         presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
1833     } else {
1834         bool hasVideo = false;
1835         bool hasAudio = false;
1836         bool hasMetaData = false;
1837         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1838             if ([track isEnabled]) {
1839                 AVAssetTrack *assetTrack = [track assetTrack];
1840                 NSString *mediaType = [assetTrack mediaType];
1841                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1842                     hasVideo = true;
1843                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1844                     hasAudio = true;
1845                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1846 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1847                     hasCaptions = true;
1848 #endif
1849                     haveCCTrack = true;
1850                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1851                     hasMetaData = true;
1852                 }
1853             }
1854         }
1855
1856 #if ENABLE(VIDEO_TRACK)
1857         updateAudioTracks();
1858         updateVideoTracks();
1859
1860 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1861         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1862         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1863 #endif
1864 #endif
1865
1866         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1867         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1868         // when it is not.
1869         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1870
1871         setHasAudio(hasAudio);
1872 #if ENABLE(DATACUE_VALUE)
1873         if (hasMetaData)
1874             processMetadataTrack();
1875 #endif
1876     }
1877
1878 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1879     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1880     if (legibleGroup && m_cachedTracks) {
1881         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1882         if (hasCaptions)
1883             processMediaSelectionOptions();
1884     }
1885 #endif
1886
1887 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1888     if (!hasCaptions && haveCCTrack)
1889         processLegacyClosedCaptionsTracks();
1890 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1891     if (haveCCTrack)
1892         processLegacyClosedCaptionsTracks();
1893 #endif
1894
1895     setHasClosedCaptions(hasCaptions);
1896
1897     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1898         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1899
1900     sizeChanged();
1901
1902     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1903         characteristicsChanged();
1904
1905     setDelayCharacteristicsChangedNotification(false);
1906 }
1907
1908 #if ENABLE(VIDEO_TRACK)
1909 template <typename RefT, typename PassRefT>
1910 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1911 {
1912     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1913         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1914     }]]]);
1915     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1916
1917     for (auto& oldItem : oldItems) {
1918         if (oldItem->playerItemTrack())
1919             [oldTracks addObject:oldItem->playerItemTrack()];
1920     }
1921
1922     // Find the added & removed AVPlayerItemTracks:
1923     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1924     [removedTracks minusSet:newTracks.get()];
1925
1926     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1927     [addedTracks minusSet:oldTracks.get()];
1928
1929     typedef Vector<RefT> ItemVector;
1930     ItemVector replacementItems;
1931     ItemVector addedItems;
1932     ItemVector removedItems;
1933     for (auto& oldItem : oldItems) {
1934         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1935             removedItems.append(oldItem);
1936         else
1937             replacementItems.append(oldItem);
1938     }
1939
1940     for (AVPlayerItemTrack* track in addedTracks.get())
1941         addedItems.append(itemFactory(track));
1942
1943     replacementItems.appendVector(addedItems);
1944     oldItems.swap(replacementItems);
1945
1946     for (auto& removedItem : removedItems)
1947         (player->*removedFunction)(removedItem);
1948
1949     for (auto& addedItem : addedItems)
1950         (player->*addedFunction)(addedItem);
1951 }
1952
1953 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1954 template <typename RefT, typename PassRefT>
1955 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1956 {
1957     group->updateOptions();
1958
1959     // Only add selection options which do not have an associated persistant track.
1960     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1961     for (auto& option : group->options()) {
1962         if (!option)
1963             continue;
1964         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1965         if (!avOption)
1966             continue;
1967         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1968             newSelectionOptions.add(option);
1969     }
1970
1971     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1972     for (auto& oldItem : oldItems) {
1973         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1974             oldSelectionOptions.add(option);
1975     }
1976
1977     // Find the added & removed AVMediaSelectionOptions:
1978     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1979     for (auto& oldOption : oldSelectionOptions) {
1980         if (!newSelectionOptions.contains(oldOption))
1981             removedSelectionOptions.add(oldOption);
1982     }
1983
1984     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1985     for (auto& newOption : newSelectionOptions) {
1986         if (!oldSelectionOptions.contains(newOption))
1987             addedSelectionOptions.add(newOption);
1988     }
1989
1990     typedef Vector<RefT> ItemVector;
1991     ItemVector replacementItems;
1992     ItemVector addedItems;
1993     ItemVector removedItems;
1994     for (auto& oldItem : oldItems) {
1995         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1996             removedItems.append(oldItem);
1997         else
1998             replacementItems.append(oldItem);
1999     }
2000
2001     for (auto& option : addedSelectionOptions)
2002         addedItems.append(itemFactory(*option.get()));
2003
2004     replacementItems.appendVector(addedItems);
2005     oldItems.swap(replacementItems);
2006     
2007     for (auto& removedItem : removedItems)
2008         (player->*removedFunction)(removedItem);
2009     
2010     for (auto& addedItem : addedItems)
2011         (player->*addedFunction)(addedItem);
2012 }
2013 #endif
2014
2015 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2016 {
2017 #if !LOG_DISABLED
2018     size_t count = m_audioTracks.size();
2019 #endif
2020
2021     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2022
2023 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2024     if (!m_audibleGroup) {
2025         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2026             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2027     }
2028
2029     if (m_audibleGroup)
2030         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2031 #endif
2032
2033     for (auto& track : m_audioTracks)
2034         track->resetPropertiesFromTrack();
2035
2036 #if !LOG_DISABLED
2037     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2038 #endif
2039 }
2040
2041 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2042 {
2043 #if !LOG_DISABLED
2044     size_t count = m_videoTracks.size();
2045 #endif
2046
2047     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2048
2049 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2050     if (!m_visualGroup) {
2051         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2052             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2053     }
2054
2055     if (m_visualGroup)
2056         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2057 #endif
2058
2059     for (auto& track : m_audioTracks)
2060         track->resetPropertiesFromTrack();
2061
2062 #if !LOG_DISABLED
2063     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2064 #endif
2065 }
2066
2067 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2068 {
2069 #if PLATFORM(IOS)
2070     if (m_videoFullscreenLayer)
2071         return true;
2072 #endif
2073     return false;
2074 }
2075
2076 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2077 {
2078 #if PLATFORM(IOS)
2079     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2080         return;
2081     
2082     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2083     [m_textTrackRepresentationLayer setFrame:textFrame];
2084 #endif
2085 }
2086
2087 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2088 {
2089 #if PLATFORM(IOS)
2090     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2091     if (representationLayer == m_textTrackRepresentationLayer) {
2092         syncTextTrackBounds();
2093         return;
2094     }
2095
2096     if (m_textTrackRepresentationLayer)
2097         [m_textTrackRepresentationLayer removeFromSuperlayer];
2098
2099     m_textTrackRepresentationLayer = representationLayer;
2100
2101     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2102         syncTextTrackBounds();
2103         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2104     }
2105
2106 #else
2107     UNUSED_PARAM(representation);
2108 #endif
2109 }
2110 #endif // ENABLE(VIDEO_TRACK)
2111
2112 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2113 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2114 {
2115     if (!m_provider)
2116         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2117     return m_provider.get();
2118 }
2119 #endif
2120
2121 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2122 {
2123     if (!m_avAsset)
2124         return;
2125
2126     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
2127 }
2128     
2129 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2130 {
2131     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2132         return false;
2133     
2134     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2135     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2136     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2137 }
2138
2139 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2140 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2141 {
2142     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2143
2144     if (!m_avPlayerItem || m_videoOutput)
2145         return;
2146
2147 #if USE(VIDEOTOOLBOX)
2148     NSDictionary* attributes = nil;
2149 #else
2150     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2151                                 nil];
2152 #endif
2153     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2154     ASSERT(m_videoOutput);
2155
2156     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2157
2158     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2159
2160     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2161 }
2162
2163 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2164 {
2165     if (!m_videoOutput)
2166         return;
2167
2168     if (m_avPlayerItem)
2169         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2170     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2171
2172     m_videoOutput = 0;
2173 }
2174
2175 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2176 {
2177     if (!m_videoOutput)
2178         createVideoOutput();
2179     ASSERT(m_videoOutput);
2180
2181 #if !LOG_DISABLED
2182     double start = monotonicallyIncreasingTime();
2183 #endif
2184
2185     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2186
2187     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2188         return 0;
2189
2190     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2191     if (!buffer)
2192         return 0;
2193
2194 #if USE(VIDEOTOOLBOX)
2195     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2196     if (!m_pixelTransferSession) {
2197         VTPixelTransferSessionRef session = 0;
2198         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2199         m_pixelTransferSession = adoptCF(session);
2200     }
2201
2202     CVPixelBufferRef outputBuffer;
2203     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2204     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2205     buffer = adoptCF(outputBuffer);
2206 #endif
2207
2208 #if !LOG_DISABLED
2209     double duration = monotonicallyIncreasingTime() - start;
2210     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2211 #endif
2212
2213     return buffer;
2214 }
2215
2216 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2217 {
2218     if (!m_avPlayerItem)
2219         return false;
2220
2221     if (m_lastImage)
2222         return true;
2223
2224     if (!m_videoOutput)
2225         createVideoOutput();
2226
2227     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2228 }
2229
2230 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2231 {
2232     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2233     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2234     return CVPixelBufferGetBaseAddress(pixelBuffer);
2235 }
2236
2237 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2238 {
2239     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2240     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2241 }
2242
2243 static void CVPixelBufferReleaseInfoCallback(void* info)
2244 {
2245     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2246     CFRelease(pixelBuffer);
2247 }
2248
2249 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2250 {
2251     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2252     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2253
2254     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2255     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2256     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2257     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2258     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2259
2260     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2261     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2262     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2263
2264     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2265 }
2266
2267 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2268 {
2269     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2270
2271     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2272     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2273     // should be displayed.
2274     if (pixelBuffer)
2275         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2276 }
2277
2278 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
2279 {
2280     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2281         waitForVideoOutputMediaDataWillChange();
2282
2283     updateLastImage();
2284
2285     if (!m_lastImage)
2286         return;
2287
2288     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2289     if (!firstEnabledVideoTrack)
2290         return;
2291
2292     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2293
2294     GraphicsContextStateSaver stateSaver(*context);
2295     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2296     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2297     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2298
2299     context->concatCTM(videoTransform);
2300     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2301
2302     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2303     // video frame, destroy it now that it is no longer needed.
2304     if (m_imageGenerator)
2305         destroyImageGenerator();
2306
2307 }
2308
2309 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2310 {
2311     updateLastImage();
2312     return m_lastImage.get();
2313 }
2314
2315 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2316 {
2317     if (!m_videoOutputSemaphore)
2318         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2319
2320     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2321
2322     // Wait for 1 second.
2323     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2324
2325     if (result)
2326         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2327 }
2328
2329 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2330 {
2331     dispatch_semaphore_signal(m_videoOutputSemaphore);
2332 }
2333 #endif
2334
2335 #if ENABLE(ENCRYPTED_MEDIA)
2336 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2337 {
2338     if (!keySystemIsSupported(keySystem))
2339         return MediaPlayer::KeySystemNotSupported;
2340
2341     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2342     String keyURI;
2343     String keyID;
2344     RefPtr<Uint8Array> certificate;
2345     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2346         return MediaPlayer::InvalidPlayerState;
2347
2348     if (!m_keyURIToRequestMap.contains(keyURI))
2349         return MediaPlayer::InvalidPlayerState;
2350
2351     String sessionID = createCanonicalUUIDString();
2352
2353     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2354
2355     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2356     NSString* assetStr = keyID;
2357     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2358     NSError* error = 0;
2359     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2360
2361     if (!keyRequest) {
2362         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2363         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2364         return MediaPlayer::NoError;
2365     }
2366
2367     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2368     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2369     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2370
2371     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2372     m_sessionIDToRequestMap.set(sessionID, avRequest);
2373     m_keyURIToRequestMap.remove(keyURI);
2374
2375     return MediaPlayer::NoError;
2376 }
2377
2378 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2379 {
2380     if (!keySystemIsSupported(keySystem))
2381         return MediaPlayer::KeySystemNotSupported;
2382
2383     if (!m_sessionIDToRequestMap.contains(sessionID))
2384         return MediaPlayer::InvalidPlayerState;
2385
2386     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2387     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2388     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2389     [avRequest.get() finishLoading];
2390     m_sessionIDToRequestMap.remove(sessionID);
2391
2392     player()->keyAdded(keySystem, sessionID);
2393
2394     UNUSED_PARAM(initDataPtr);
2395     UNUSED_PARAM(initDataLength);
2396     return MediaPlayer::NoError;
2397 }
2398
2399 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2400 {
2401     if (!keySystemIsSupported(keySystem))
2402         return MediaPlayer::KeySystemNotSupported;
2403
2404     if (!m_sessionIDToRequestMap.contains(sessionID))
2405         return MediaPlayer::InvalidPlayerState;
2406
2407     m_sessionIDToRequestMap.remove(sessionID);
2408     return MediaPlayer::NoError;
2409 }
2410 #endif
2411
2412 #if ENABLE(ENCRYPTED_MEDIA_V2)
2413 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2414 {
2415     return m_keyURIToRequestMap.take(keyURI);
2416 }
2417
2418 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2419 {
2420     Vector<String> fulfilledKeyIds;
2421
2422     for (auto& pair : m_keyURIToRequestMap) {
2423         const String& keyId = pair.key;
2424         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2425
2426         auto keyData = player()->cachedKeyForKeyId(keyId);
2427         if (!keyData)
2428             continue;
2429
2430         fulfillRequestWithKeyData(request.get(), keyData.get());
2431         fulfilledKeyIds.append(keyId);
2432     }
2433
2434     for (auto& keyId : fulfilledKeyIds)
2435         m_keyURIToRequestMap.remove(keyId);
2436 }
2437
2438 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2439 {
2440     if (!keySystemIsSupported(keySystem))
2441         return nullptr;
2442
2443     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2444 }
2445 #endif
2446
2447 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2448 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2449 {
2450 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2451     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2452 #endif
2453
2454     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2455     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2456
2457         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2458         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2459             continue;
2460
2461         bool newCCTrack = true;
2462         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2463             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2464                 continue;
2465
2466             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2467             if (track->avPlayerItemTrack() == playerItemTrack) {
2468                 removedTextTracks.remove(i - 1);
2469                 newCCTrack = false;
2470                 break;
2471             }
2472         }
2473
2474         if (!newCCTrack)
2475             continue;
2476         
2477         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2478     }
2479
2480     processNewAndRemovedTextTracks(removedTextTracks);
2481 }
2482 #endif
2483
2484 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2485 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2486 {
2487     if (!m_avAsset)
2488         return false;
2489
2490     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2491         return false;
2492
2493     return true;
2494 }
2495
2496 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2497 {
2498     if (!hasLoadedMediaSelectionGroups())
2499         return nil;
2500
2501     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2502 }
2503
2504 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2505 {
2506     if (!hasLoadedMediaSelectionGroups())
2507         return nil;
2508
2509     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2510 }
2511
2512 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2513 {
2514     if (!hasLoadedMediaSelectionGroups())
2515         return nil;
2516
2517     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2518 }
2519
2520 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2521 {
2522     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2523     if (!legibleGroup) {
2524         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2525         return;
2526     }
2527
2528     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2529     // but set the selected legible track to nil so text tracks will not be automatically configured.
2530     if (!m_textTracks.size())
2531         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2532
2533     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2534     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2535     for (AVMediaSelectionOptionType *option in legibleOptions) {
2536         bool newTrack = true;
2537         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2538             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2539                 continue;
2540             
2541             RetainPtr<AVMediaSelectionOptionType> currentOption;
2542 #if ENABLE(AVF_CAPTIONS)
2543             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2544                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2545                 currentOption = track->mediaSelectionOption();
2546             } else
2547 #endif
2548             {
2549                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2550                 currentOption = track->mediaSelectionOption();
2551             }
2552             
2553             if ([currentOption.get() isEqual:option]) {
2554                 removedTextTracks.remove(i - 1);
2555                 newTrack = false;
2556                 break;
2557             }
2558         }
2559         if (!newTrack)
2560             continue;
2561
2562 #if ENABLE(AVF_CAPTIONS)
2563         if ([option outOfBandSource]) {
2564             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2565             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2566             continue;
2567         }
2568 #endif
2569
2570         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2571     }
2572
2573     processNewAndRemovedTextTracks(removedTextTracks);
2574 }
2575
2576 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2577 {
2578     if (m_metadataTrack)
2579         return;
2580
2581     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2582     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2583     player()->addTextTrack(m_metadataTrack);
2584 }
2585
2586 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2587 {
2588     if (!m_currentTextTrack)
2589         return;
2590
2591     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2592 }
2593
2594 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2595 {
2596     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2597
2598     if (!m_currentTextTrack)
2599         return;
2600     
2601     m_currentTextTrack->resetCueValues();
2602 }
2603 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2604
2605 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2606 {
2607     if (m_currentTextTrack == track)
2608         return;
2609
2610     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2611         
2612     m_currentTextTrack = track;
2613
2614     if (track) {
2615         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2616             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2617 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2618 #if ENABLE(AVF_CAPTIONS)
2619         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2620             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2621 #endif
2622         else
2623             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2624 #endif
2625     } else {
2626 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2627         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2628 #endif
2629         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2630     }
2631
2632 }
2633
2634 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2635 {
2636     if (!m_languageOfPrimaryAudioTrack.isNull())
2637         return m_languageOfPrimaryAudioTrack;
2638
2639     if (!m_avPlayerItem.get())
2640         return emptyString();
2641
2642 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2643     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2644     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2645     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2646     if (currentlySelectedAudibleOption) {
2647         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2648         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2649
2650         return m_languageOfPrimaryAudioTrack;
2651     }
2652 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2653
2654     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2655     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2656     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2657     if (!tracks || [tracks count] != 1) {
2658         m_languageOfPrimaryAudioTrack = emptyString();
2659         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2660         return m_languageOfPrimaryAudioTrack;
2661     }
2662
2663     AVAssetTrack *track = [tracks objectAtIndex:0];
2664     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2665
2666 #if !LOG_DISABLED
2667     if (m_languageOfPrimaryAudioTrack == emptyString())
2668         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2669     else
2670         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2671 #endif
2672
2673     return m_languageOfPrimaryAudioTrack;
2674 }
2675
2676 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2677 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2678 {
2679     if (!m_avPlayer)
2680         return false;
2681
2682     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2683     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2684     return wirelessTarget;
2685 }
2686
2687 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2688 {
2689     if (!m_avPlayer)
2690         return MediaPlayer::TargetTypeNone;
2691
2692     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2693     case wkExternalPlaybackTypeNone:
2694         return MediaPlayer::TargetTypeNone;
2695     case wkExternalPlaybackTypeAirPlay:
2696         return MediaPlayer::TargetTypeAirPlay;
2697     case wkExternalPlaybackTypeTVOut:
2698         return MediaPlayer::TargetTypeTVOut;
2699     }
2700
2701     ASSERT_NOT_REACHED();
2702     return MediaPlayer::TargetTypeNone;
2703 }
2704
2705 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2706 {
2707     if (!m_avPlayer)
2708         return emptyString();
2709     
2710     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2711     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2712
2713     return wirelessTargetName;
2714 }
2715
2716 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2717 {
2718     if (!m_avPlayer)
2719         return !m_allowsWirelessVideoPlayback;
2720     
2721     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2722     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2723
2724     return !m_allowsWirelessVideoPlayback;
2725 }
2726
2727 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2728 {
2729     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2730     m_allowsWirelessVideoPlayback = !disabled;
2731     if (!m_avPlayer)
2732         return;
2733     
2734     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2735 }
2736
2737 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2738 {
2739     if (!m_avPlayer)
2740         return;
2741
2742     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2743 }
2744 #endif
2745
2746 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2747 {
2748     m_cachedItemStatus = status;
2749
2750     updateStates();
2751 }
2752
2753 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2754 {
2755     m_pendingStatusChanges++;
2756 }
2757
2758 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2759 {
2760     m_cachedLikelyToKeepUp = likelyToKeepUp;
2761
2762     ASSERT(m_pendingStatusChanges);
2763     if (!--m_pendingStatusChanges)
2764         updateStates();
2765 }
2766
2767 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2768 {
2769     m_pendingStatusChanges++;
2770 }
2771
2772 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2773 {
2774     m_cachedBufferEmpty = bufferEmpty;
2775
2776     ASSERT(m_pendingStatusChanges);
2777     if (!--m_pendingStatusChanges)
2778         updateStates();
2779 }
2780
2781 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2782 {
2783     m_pendingStatusChanges++;
2784 }
2785
2786 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2787 {
2788     m_cachedBufferFull = bufferFull;
2789
2790     ASSERT(m_pendingStatusChanges);
2791     if (!--m_pendingStatusChanges)
2792         updateStates();
2793 }
2794
2795 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2796 {
2797     m_cachedSeekableRanges = seekableRanges;
2798
2799     seekableTimeRangesChanged();
2800     updateStates();
2801 }
2802
2803 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2804 {
2805     m_cachedLoadedRanges = loadedRanges;
2806
2807     loadedTimeRangesChanged();
2808     updateStates();
2809 }
2810
2811 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2812 {
2813     m_cachedIsReadyForDisplay = isReady;
2814     if (!hasVideo() && isReady)
2815         tracksChanged();
2816     updateStates();
2817 }
2818
2819 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2820 {
2821     tracksChanged();
2822     updateStates();
2823 }
2824
2825 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2826 {
2827     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2828     if (m_shouldBufferData == shouldBuffer)
2829         return;
2830
2831     m_shouldBufferData = shouldBuffer;
2832     
2833     if (!m_avPlayer)
2834         return;
2835
2836     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2837 }
2838
2839 #if ENABLE(DATACUE_VALUE)
2840 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2841 {
2842     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2843     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2844     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2845     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2846     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2847
2848     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2849         return quickTimeUserData;
2850     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2851         return isoUserData;
2852     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2853         return quickTimeMetadata;
2854     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2855         return iTunesMetadata;
2856     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2857         return id3Metadata;
2858
2859     return emptyAtom;
2860 }
2861 #endif
2862
2863 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2864 {
2865     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2866
2867     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2868
2869 #if ENABLE(DATACUE_VALUE)
2870     if (seeking())
2871         return;
2872
2873     if (!m_metadataTrack)
2874         processMetadataTrack();
2875
2876     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2877         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2878         return;
2879     }
2880
2881     // Set the duration of all incomplete cues before adding new ones.
2882     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2883     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2884         MediaTime start = toMediaTime(item.time);
2885         if (start < earliestStartTime)
2886             earliestStartTime = start;
2887     }
2888     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2889
2890     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2891         MediaTime start = toMediaTime(item.time);
2892         MediaTime end = MediaTime::positiveInfiniteTime();
2893         if (CMTIME_IS_VALID(item.duration))
2894             end = start + toMediaTime(item.duration);
2895
2896         AtomicString type = nullAtom;
2897         if (item.keySpace)
2898             type = metadataType(item.keySpace);
2899
2900         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2901     }
2902 #endif
2903 }
2904
2905 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2906 {
2907     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2908         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2909
2910     NSArray *assetTracks = [m_avAsset tracks];
2911
2912     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2913         AVAssetTrack* assetTrack = [obj assetTrack];
2914
2915         if ([assetTracks containsObject:assetTrack])
2916             return YES;
2917
2918         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2919         if (!hasLoadedMediaSelectionGroups())
2920             return NO;
2921
2922         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2923             return NO;
2924
2925         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2926             return NO;
2927
2928         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2929             return NO;
2930
2931         return YES;
2932     }]];
2933
2934     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2935         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2936
2937     m_cachedTotalBytes = 0;
2938
2939     tracksChanged();
2940     updateStates();
2941 }
2942
2943 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2944 {
2945     m_cachedHasEnabledAudio = hasEnabledAudio;
2946
2947     tracksChanged();
2948     updateStates();
2949 }
2950
2951 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2952 {
2953     m_cachedPresentationSize = size;
2954
2955     sizeChanged();
2956     updateStates();
2957 }
2958
2959 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
2960 {
2961     m_cachedDuration = duration;
2962
2963     invalidateCachedDuration();
2964 }
2965
2966 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2967 {
2968     m_cachedRate = rate;
2969
2970     updateStates();
2971     rateChanged();
2972 }
2973     
2974 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2975 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2976 {
2977     playbackTargetIsWirelessChanged();
2978 }
2979 #endif
2980
2981 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
2982 {
2983     m_cachedCanPlayFastForward = newValue;
2984 }
2985
2986 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
2987 {
2988     m_cachedCanPlayFastReverse = newValue;
2989 }
2990
2991 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
2992 {
2993     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2994         return MediaPlayerPrivateAVFoundation::resolvedURL();
2995
2996     return URL([m_avAsset resolvedURL]);
2997 }
2998
2999 NSArray* assetMetadataKeyNames()
3000 {
3001     static NSArray* keys;
3002     if (!keys) {
3003         keys = [[NSArray alloc] initWithObjects:@"duration",
3004                     @"naturalSize",
3005                     @"preferredTransform",
3006                     @"preferredVolume",
3007                     @"preferredRate",
3008                     @"playable",
3009                     @"resolvedURL",
3010                     @"tracks",
3011                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3012                    nil];
3013     }
3014     return keys;
3015 }
3016
3017 NSArray* itemKVOProperties()
3018 {
3019     static NSArray* keys;
3020     if (!keys) {
3021         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3022                 @"status",
3023                 @"asset",
3024                 @"tracks",
3025                 @"seekableTimeRanges",
3026                 @"loadedTimeRanges",
3027                 @"playbackLikelyToKeepUp",
3028                 @"playbackBufferFull",
3029                 @"playbackBufferEmpty",
3030                 @"duration",
3031                 @"hasEnabledAudio",
3032                 @"timedMetadata",
3033                 @"canPlayFastForward",
3034                 @"canPlayFastReverse",
3035                 nil];
3036     }
3037     return keys;
3038 }
3039
3040 NSArray* assetTrackMetadataKeyNames()
3041 {
3042     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3043     return keys;
3044 }
3045
3046 } // namespace WebCore
3047
3048 @implementation WebCoreAVFMovieObserver
3049
3050 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3051 {
3052     self = [super init];
3053     if (!self)
3054         return nil;
3055     m_callback = callback;
3056     return self;
3057 }
3058
3059 - (void)disconnect
3060 {
3061     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3062     m_callback = 0;
3063 }
3064
3065 - (void)metadataLoaded
3066 {
3067     if (!m_callback)
3068         return;
3069
3070     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3071 }
3072
3073 - (void)didEnd:(NSNotification *)unusedNotification
3074 {
3075     UNUSED_PARAM(unusedNotification);
3076     if (!m_callback)
3077         return;
3078     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3079 }
3080
3081 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3082 {
3083     UNUSED_PARAM(object);
3084     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3085
3086     if (!m_callback)
3087         return;
3088
3089     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3090
3091 #if !LOG_DISABLED
3092     if (willChange)
3093         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3094     else {
3095         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3096         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3097     }
3098 #endif
3099
3100     WTF::Function<void ()> function;
3101
3102     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3103         if ([keyPath isEqualToString:@"readyForDisplay"])
3104             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3105     }
3106
3107     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3108         if ([keyPath isEqualToString:@"enabled"])
3109             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3110     }
3111
3112     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3113         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3114             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3115         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3116             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3117         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3118             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3119     }
3120
3121     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3122         // A value changed for an AVPlayerItem
3123         if ([keyPath isEqualToString:@"status"])
3124             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3125         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3126             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3127         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3128             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3129         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3130             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3131         else if ([keyPath isEqualToString:@"asset"])
3132             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
3133         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3134             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3135         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3136             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3137         else if ([keyPath isEqualToString:@"tracks"])
3138             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3139         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3140             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3141         else if ([keyPath isEqualToString:@"presentationSize"])
3142             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3143         else if ([keyPath isEqualToString:@"duration"])
3144             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3145         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3146             MediaTime now;
3147             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3148             if (CMTIME_IS_NUMERIC(itemTime))
3149                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3150             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3151         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3152             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3153         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3154             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3155     }
3156
3157     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3158         // A value changed for an AVPlayer.
3159         if ([keyPath isEqualToString:@"rate"])
3160             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3161 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3162         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
3163             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3164 #endif
3165     }
3166     
3167     if (function.isNull())
3168         return;
3169
3170     auto weakThis = m_callback->createWeakPtr();
3171     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3172         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3173         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3174         if (!weakThis)
3175             return;
3176         function();
3177     }));
3178 }
3179
3180 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3181 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3182 {
3183     UNUSED_PARAM(output);
3184     UNUSED_PARAM(nativeSamples);
3185
3186     if (!m_callback)
3187         return;
3188
3189     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3190     RetainPtr<NSArray> strongStrings = strings;
3191     RetainPtr<NSArray> strongSamples = nativeSamples;
3192     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3193         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3194         if (!callback)
3195             return;
3196         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3197     });
3198 }
3199
3200 - (void)outputSequenceWasFlushed:(id)output
3201 {
3202     UNUSED_PARAM(output);
3203
3204     if (!m_callback)
3205         return;
3206     
3207     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3208     callOnMainThread([strongSelf] {
3209         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3210             callback->flushCues();
3211     });
3212 }
3213 #endif
3214
3215 @end
3216
3217 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3218 @implementation WebCoreAVFLoaderDelegate
3219
3220 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3221 {
3222     self = [super init];
3223     if (!self)
3224         return nil;
3225     m_callback = callback;
3226     return self;
3227 }
3228
3229 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3230 {
3231     UNUSED_PARAM(resourceLoader);
3232     if (!m_callback)
3233         return NO;
3234
3235     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3236     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3237     callOnMainThread([strongSelf, strongRequest] {
3238         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3239         if (!callback) {
3240             [strongRequest finishLoadingWithError:nil];
3241             return;
3242         }
3243
3244         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3245             [strongRequest finishLoadingWithError:nil];
3246     });
3247
3248     return YES;
3249 }
3250
3251 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3252 {
3253     UNUSED_PARAM(resourceLoader);
3254     if (!m_callback)
3255         return NO;
3256
3257     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3258         return NO;
3259
3260     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3261     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3262     callOnMainThread([strongSelf, strongChallenge] {
3263         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3264         if (!callback) {
3265             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3266             return;
3267         }
3268
3269         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3270             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3271     });
3272
3273     return YES;
3274 }
3275
3276 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3277 {
3278     UNUSED_PARAM(resourceLoader);
3279     if (!m_callback)
3280         return;
3281
3282     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3283     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3284     callOnMainThread([strongSelf, strongRequest] {
3285         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3286         if (callback)
3287             callback->didCancelLoadingRequest(strongRequest.get());
3288     });
3289 }
3290
3291 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3292 {
3293     m_callback = callback;
3294 }
3295 @end
3296 #endif
3297
3298 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3299 @implementation WebCoreAVFPullDelegate
3300 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3301 {
3302     self = [super init];
3303     if (self)
3304         m_callback = callback;
3305     return self;
3306 }
3307
3308 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3309 {
3310     m_callback = callback;
3311 }
3312
3313 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3314 {
3315     if (m_callback)
3316         m_callback->outputMediaDataWillChange(output);
3317 }
3318
3319 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3320 {
3321     UNUSED_PARAM(output);
3322     // No-op.
3323 }
3324 @end
3325 #endif
3326
3327 #endif