REGRESSION?(r202466): http/tests/security/canvas-remote-read-remote-video-redirect...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationMIMETypeCache.h"
32 #import "AVFoundationSPI.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "MediaTimeAVFoundation.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "Settings.h"
61 #import "TextEncoding.h"
62 #import "TextTrackRepresentation.h"
63 #import "TextureCacheCV.h"
64 #import "URL.h"
65 #import "UUID.h"
66 #import "VideoTextureCopierCV.h"
67 #import "VideoTrackPrivateAVFObjC.h"
68 #import "WebCoreAVFResourceLoader.h"
69 #import "WebCoreCALayerExtras.h"
70 #import "WebCoreNSURLSession.h"
71 #import "WebCoreSystemInterface.h"
72 #import <functional>
73 #import <map>
74 #import <objc/runtime.h>
75 #import <runtime/DataView.h>
76 #import <runtime/JSCInlines.h>
77 #import <runtime/TypedArrayInlines.h>
78 #import <runtime/Uint16Array.h>
79 #import <runtime/Uint32Array.h>
80 #import <runtime/Uint8Array.h>
81 #import <wtf/BlockObjCExceptions.h>
82 #import <wtf/CurrentTime.h>
83 #import <wtf/ListHashSet.h>
84 #import <wtf/NeverDestroyed.h>
85 #import <wtf/OSObjectPtr.h>
86 #import <wtf/text/CString.h>
87 #import <wtf/text/StringBuilder.h>
88
89 #if ENABLE(AVF_CAPTIONS)
90 #include "TextTrack.h"
91 #endif
92
93 #import <AVFoundation/AVFoundation.h>
94
95 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
96 #import "VideoFullscreenLayerManager.h"
97 #endif
98
99 #if PLATFORM(IOS)
100 #import "WAKAppKitStubs.h"
101 #import <CoreImage/CoreImage.h>
102 #import <mach/mach_port.h>
103 #else
104 #import <Foundation/NSGeometry.h>
105 #import <QuartzCore/CoreImage.h>
106 #endif
107
108 #if USE(VIDEOTOOLBOX)
109 #import <CoreVideo/CoreVideo.h>
110 #import <VideoToolbox/VideoToolbox.h>
111 #endif
112
113 #if USE(CFNETWORK)
114 #include "CFNSURLConnectionSPI.h"
115 #endif
116
117 #import "CoreVideoSoftLink.h"
118
119 namespace std {
120 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
121     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
122 };
123 }
124
125 #if ENABLE(AVF_CAPTIONS)
126 // Note: This must be defined before our SOFT_LINK macros:
127 @class AVMediaSelectionOption;
128 @interface AVMediaSelectionOption (OutOfBandExtensions)
129 @property (nonatomic, readonly) NSString* outOfBandSource;
130 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145
146 #pragma mark - Soft Linking
147
148 // Soft-linking headers must be included last since they #define functions, constants, etc.
149 #import "CoreMediaSoftLink.h"
150
151 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
152
153 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
154
155 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
156 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
157 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
158 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
159 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
160 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
161 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
162 SOFT_LINK_CLASS(AVFoundation, AVAssetCache)
163
164 SOFT_LINK_CLASS(CoreImage, CIContext)
165 SOFT_LINK_CLASS(CoreImage, CIImage)
166
167 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
168 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
169 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
182
183 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
184
185 #define AVPlayer getAVPlayerClass()
186 #define AVPlayerItem getAVPlayerItemClass()
187 #define AVPlayerLayer getAVPlayerLayerClass()
188 #define AVURLAsset getAVURLAssetClass()
189 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
190 #define AVMetadataItem getAVMetadataItemClass()
191
192 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
193 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
194 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
195 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
196 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
197 #define AVMediaTypeVideo getAVMediaTypeVideo()
198 #define AVMediaTypeAudio getAVMediaTypeAudio()
199 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
200 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
201 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
202 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
203 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
204 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
205 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
206 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
207 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
208
209 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
210 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
211 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
212
213 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
214 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
215 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
216
217 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
218 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
219 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
220 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
221
222 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
223 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
224 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
225 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
226 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
227 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
228 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
229 #endif
230
231 #if ENABLE(AVF_CAPTIONS)
232 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
233 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
234 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
235 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
236 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
237 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
238 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
239 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
240 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
241 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
242 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
245
246 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
247 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
248 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
249 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
250 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
251 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
252 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
253 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
254 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
255 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
256 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
257 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
258 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
259 #endif
260
261 #if ENABLE(DATACUE_VALUE)
262 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
263 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
264 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
265 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
266 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
267
268 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
269 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
270 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
271 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
272 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
273 #endif
274
275 #if PLATFORM(IOS)
276 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
277 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
278 #endif
279
280 SOFT_LINK_FRAMEWORK(MediaToolbox)
281 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
282
283 using namespace WebCore;
284
285 enum MediaPlayerAVFoundationObservationContext {
286     MediaPlayerAVFoundationObservationContextPlayerItem,
287     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
288     MediaPlayerAVFoundationObservationContextPlayer,
289     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
290 };
291
292 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
293 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
294 #else
295 @interface WebCoreAVFMovieObserver : NSObject
296 #endif
297 {
298     MediaPlayerPrivateAVFoundationObjC* m_callback;
299     int m_delayCallbacks;
300 }
301 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
302 -(void)disconnect;
303 -(void)metadataLoaded;
304 -(void)didEnd:(NSNotification *)notification;
305 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
307 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
308 - (void)outputSequenceWasFlushed:(id)output;
309 #endif
310 @end
311
312 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
313 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
314     MediaPlayerPrivateAVFoundationObjC* m_callback;
315 }
316 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
317 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
318 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
319 @end
320 #endif
321
322 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
323 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
324     MediaPlayerPrivateAVFoundationObjC *m_callback;
325     dispatch_semaphore_t m_semaphore;
326 }
327 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
328 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
329 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
330 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
331 @end
332 #endif
333
334 namespace WebCore {
335
336 static NSArray *assetMetadataKeyNames();
337 static NSArray *itemKVOProperties();
338 static NSArray *assetTrackMetadataKeyNames();
339 static NSArray *playerKVOProperties();
340 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
341
342 #if !LOG_DISABLED
343 static const char *boolString(bool val)
344 {
345     return val ? "true" : "false";
346 }
347 #endif
348
349 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
350 static dispatch_queue_t globalLoaderDelegateQueue()
351 {
352     static dispatch_queue_t globalQueue;
353     static dispatch_once_t onceToken;
354     dispatch_once(&onceToken, ^{
355         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
356     });
357     return globalQueue;
358 }
359 #endif
360
361 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
362 static dispatch_queue_t globalPullDelegateQueue()
363 {
364     static dispatch_queue_t globalQueue;
365     static dispatch_once_t onceToken;
366     dispatch_once(&onceToken, ^{
367         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
368     });
369     return globalQueue;
370 }
371 #endif
372
373 #if USE(CFNETWORK)
374 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
375 public:
376     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
377     {
378         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
379     }
380
381     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
382     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
383
384 private:
385     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
386         : m_challenge(challenge)
387     {
388         ASSERT(m_challenge);
389     }
390
391     void refAuthenticationClient() override { ref(); }
392     void derefAuthenticationClient() override { deref(); }
393
394     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
395     {
396         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
397     }
398
399     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
400     {
401         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
402     }
403
404     void receivedCancellation(const AuthenticationChallenge&) override
405     {
406         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
407     }
408
409     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
410     {
411         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
412             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
413     }
414
415     void receivedChallengeRejection(const AuthenticationChallenge&) override
416     {
417         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
418             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
419     }
420
421     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
422 };
423 #endif
424
425 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
426 {
427     if (!isAvailable())
428         return;
429
430     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
431             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
432     AVFoundationMIMETypeCache::singleton().loadTypes();
433 }
434
435 static AVAssetCache *assetCacheForPath(const String& path)
436 {
437     NSURL *assetCacheURL;
438     
439     if (path.isEmpty())
440         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
441     else
442         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
443
444     return [getAVAssetCacheClass() assetCacheWithURL:assetCacheURL];
445 }
446
447 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
448 {
449     HashSet<RefPtr<SecurityOrigin>> origins;
450     for (NSString *key in [assetCacheForPath(path) allKeys]) {
451         URL keyAsURL = URL(URL(), key);
452         if (keyAsURL.isValid())
453             origins.add(SecurityOrigin::create(keyAsURL));
454     }
455     return origins;
456 }
457
458 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
459 {
460     ASSERT(date);
461     using namespace std::chrono;
462
463     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
464 }
465
466 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
467 {
468     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
469     
470     AVAssetCache* assetCache = assetCacheForPath(path);
471     
472     for (NSString *key in [assetCache allKeys]) {
473         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
474             [assetCache removeEntryForKey:key];
475     }
476
477     NSFileManager *fileManager = [NSFileManager defaultManager];
478     NSURL *baseURL = [assetCache URL];
479
480     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
481         [fileManager removeItemAtURL:baseURL error:nil];
482         return;
483     }
484     
485     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
486     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
487         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
488         errorHandler:nil];
489     
490     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
491     for (NSURL *fileURL : enumerator) {
492         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
493     
494         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
495             continue;
496         
497         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
498             continue;
499         
500         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
501             continue;
502         
503         [urlsToDelete addObject:fileURL];
504     }
505     
506     for (NSURL *fileURL in urlsToDelete.get())
507         [fileManager removeItemAtURL:fileURL error:nil];
508 }
509
510 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
511 {
512     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
513     AVAssetCache* assetCache = assetCacheForPath(path);
514     for (NSString *key in [assetCache allKeys]) {
515         URL keyAsURL = URL(URL(), key);
516         if (keyAsURL.isValid()) {
517             if (origins.contains(SecurityOrigin::create(keyAsURL)))
518                 [assetCache removeEntryForKey:key];
519         }
520     }
521 }
522
523 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
524     : MediaPlayerPrivateAVFoundation(player)
525     , m_weakPtrFactory(this)
526 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
527     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
528     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
529 #endif
530     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
531     , m_videoFrameHasDrawn(false)
532     , m_haveCheckedPlayability(false)
533 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
534     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
535     , m_videoOutputSemaphore(nullptr)
536 #endif
537 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
538     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
539 #endif
540     , m_currentTextTrack(0)
541     , m_cachedRate(0)
542     , m_cachedTotalBytes(0)
543     , m_pendingStatusChanges(0)
544     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
545     , m_cachedLikelyToKeepUp(false)
546     , m_cachedBufferEmpty(false)
547     , m_cachedBufferFull(false)
548     , m_cachedHasEnabledAudio(false)
549     , m_shouldBufferData(true)
550     , m_cachedIsReadyForDisplay(false)
551     , m_haveBeenAskedToCreateLayer(false)
552 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
553     , m_allowsWirelessVideoPlayback(true)
554 #endif
555 {
556 }
557
558 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
559 {
560 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
561     [m_loaderDelegate.get() setCallback:0];
562     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
563
564     for (auto& pair : m_resourceLoaderMap)
565         pair.value->invalidate();
566 #endif
567 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
568     [m_videoOutputDelegate setCallback:0];
569     [m_videoOutput setDelegate:nil queue:0];
570     if (m_videoOutputSemaphore)
571         dispatch_release(m_videoOutputSemaphore);
572 #endif
573
574     if (m_videoLayer)
575         destroyVideoLayer();
576
577     cancelLoad();
578 }
579
580 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
581 {
582     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
583     tearDownVideoRendering();
584
585     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
586     [m_objcObserver.get() disconnect];
587
588     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
589     setIgnoreLoadStateChanges(true);
590     if (m_avAsset) {
591         [m_avAsset.get() cancelLoading];
592         m_avAsset = nil;
593     }
594
595     clearTextTracks();
596
597 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
598     if (m_legibleOutput) {
599         if (m_avPlayerItem)
600             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
601         m_legibleOutput = nil;
602     }
603 #endif
604
605     if (m_avPlayerItem) {
606         for (NSString *keyName in itemKVOProperties())
607             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
608         
609         m_avPlayerItem = nil;
610     }
611     if (m_avPlayer) {
612         if (m_timeObserver)
613             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
614         m_timeObserver = nil;
615
616         for (NSString *keyName in playerKVOProperties())
617             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
618         m_avPlayer = nil;
619     }
620
621     // Reset cached properties
622     m_pendingStatusChanges = 0;
623     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
624     m_cachedSeekableRanges = nullptr;
625     m_cachedLoadedRanges = nullptr;
626     m_cachedHasEnabledAudio = false;
627     m_cachedPresentationSize = FloatSize();
628     m_cachedDuration = MediaTime::zeroTime();
629
630     for (AVPlayerItemTrack *track in m_cachedTracks.get())
631         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
632     m_cachedTracks = nullptr;
633
634 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
635     if (m_provider) {
636         m_provider->setPlayerItem(nullptr);
637         m_provider->setAudioTrack(nullptr);
638     }
639 #endif
640
641     setIgnoreLoadStateChanges(false);
642 }
643
644 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
645 {
646     return m_haveBeenAskedToCreateLayer;
647 }
648
649 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
650 {
651 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
652     if (m_videoOutput)
653         return true;
654 #endif
655     return m_imageGenerator;
656 }
657
658 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
659 {
660 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
661     createVideoOutput();
662 #else
663     createImageGenerator();
664 #endif
665 }
666
667 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
668 {
669     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
670
671     if (!m_avAsset || m_imageGenerator)
672         return;
673
674     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
675
676     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
677     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
678     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
679     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
680
681     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
682 }
683
684 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
685 {
686 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
687     destroyVideoOutput();
688     destroyOpenGLVideoOutput();
689 #endif
690     destroyImageGenerator();
691 }
692
693 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
694 {
695     if (!m_imageGenerator)
696         return;
697
698     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
699
700     m_imageGenerator = 0;
701 }
702
703 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
704 {
705     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
706         return;
707
708     callOnMainThread([this, weakThis = createWeakPtr()] {
709         if (!weakThis)
710             return;
711
712         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
713             return;
714         m_haveBeenAskedToCreateLayer = true;
715
716         if (!m_videoLayer)
717             createAVPlayerLayer();
718
719 #if USE(VIDEOTOOLBOX)
720         if (!m_videoOutput)
721             createVideoOutput();
722 #endif
723
724         player()->client().mediaPlayerRenderingModeChanged(player());
725     });
726 }
727
728 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
729 {
730     if (!m_avPlayer)
731         return;
732
733     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
734     [m_videoLayer setPlayer:m_avPlayer.get()];
735     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
736
737 #ifndef NDEBUG
738     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
739 #endif
740     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
741     updateVideoLayerGravity();
742     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
743     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
744     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
745
746 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
747     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
748
749 #if PLATFORM(IOS)
750     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
751         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
752 #endif
753 #else
754     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
755 #endif
756 }
757
758 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
759 {
760     if (!m_videoLayer)
761         return;
762
763     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
764
765     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
766     [m_videoLayer setPlayer:nil];
767
768 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
769     m_videoFullscreenLayerManager->didDestroyVideoLayer();
770 #endif
771
772     m_videoLayer = nil;
773 }
774
775 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
776 {
777     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
778     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
779
780     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
781     if (!date)
782         return MediaTime::invalidTime();
783
784     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
785
786     // Rounding due to second offset error when subtracting.
787     return MediaTime::createWithDouble(round(date - currentTime));
788 }
789
790 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
791 {
792     if (currentRenderingMode() == MediaRenderingToLayer)
793         return m_cachedIsReadyForDisplay;
794
795     return m_videoFrameHasDrawn;
796 }
797
798 #if ENABLE(AVF_CAPTIONS)
799 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
800 {
801     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
802     if (manualSelectionMode)
803         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
804
805     // FIXME: Match these to correct types:
806     if (kind == PlatformTextTrack::Caption)
807         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
808
809     if (kind == PlatformTextTrack::Subtitle)
810         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
811
812     if (kind == PlatformTextTrack::Description)
813         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
814
815     if (kind == PlatformTextTrack::Forced)
816         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
817
818     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
819 }
820     
821 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
822 {
823     trackModeChanged();
824 }
825     
826 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
827 {
828     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
829     
830     for (auto& textTrack : m_textTracks) {
831         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
832             continue;
833         
834         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
835         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
836         
837         for (auto& track : outOfBandTrackSources) {
838             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
839             
840             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
841                 continue;
842             
843             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
844             if (track->mode() == PlatformTextTrack::Hidden)
845                 mode = InbandTextTrackPrivate::Hidden;
846             else if (track->mode() == PlatformTextTrack::Disabled)
847                 mode = InbandTextTrackPrivate::Disabled;
848             else if (track->mode() == PlatformTextTrack::Showing)
849                 mode = InbandTextTrackPrivate::Showing;
850             
851             textTrack->setMode(mode);
852             break;
853         }
854     }
855 }
856 #endif
857
858
859 static NSURL *canonicalURL(const String& url)
860 {
861     NSURL *cocoaURL = URL(ParsedURLString, url);
862     if (url.isEmpty())
863         return cocoaURL;
864
865     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
866     if (!request)
867         return cocoaURL;
868
869     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
870     if (!canonicalRequest)
871         return cocoaURL;
872
873     return [canonicalRequest URL];
874 }
875
876 #if PLATFORM(IOS)
877 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
878 {
879     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
880     [properties setDictionary:@{
881         NSHTTPCookieName: cookie.name,
882         NSHTTPCookieValue: cookie.value,
883         NSHTTPCookieDomain: cookie.domain,
884         NSHTTPCookiePath: cookie.path,
885         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
886     }];
887     if (cookie.secure)
888         [properties setObject:@YES forKey:NSHTTPCookieSecure];
889     if (cookie.session)
890         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
891
892     return [NSHTTPCookie cookieWithProperties:properties.get()];
893 }
894 #endif
895
896 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
897 {
898     if (m_avAsset)
899         return;
900
901     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
902
903     setDelayCallbacks(true);
904
905     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
906
907     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
908
909     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
910
911     String referrer = player()->referrer();
912     if (!referrer.isEmpty())
913         [headerFields.get() setObject:referrer forKey:@"Referer"];
914
915     String userAgent = player()->userAgent();
916     if (!userAgent.isEmpty())
917         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
918
919     if ([headerFields.get() count])
920         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
921
922     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
923         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
924
925 #if PLATFORM(IOS)
926     // FIXME: rdar://problem/20354688
927     String identifier = player()->sourceApplicationIdentifier();
928     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
929         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
930 #endif
931
932 #if ENABLE(AVF_CAPTIONS)
933     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
934     if (!outOfBandTrackSources.isEmpty()) {
935         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
936         for (auto& trackSource : outOfBandTrackSources) {
937             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
938             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
939             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
940             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
941             [outOfBandTracks.get() addObject:@{
942                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
943                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
944                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
945                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
946                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
947                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
948             }];
949         }
950
951         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
952     }
953 #endif
954
955 #if PLATFORM(IOS)
956     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
957     if (!networkInterfaceName.isEmpty())
958         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
959 #endif
960
961 #if PLATFORM(IOS)
962     Vector<Cookie> cookies;
963     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
964         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
965         for (auto& cookie : cookies)
966             [nsCookies addObject:toNSHTTPCookie(cookie)];
967
968         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
969     }
970 #endif
971
972     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
973     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
974     
975     if (usePersistentCache)
976         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
977
978     NSURL *cocoaURL = canonicalURL(url);
979     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
980
981 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
982     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
983     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
984
985 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
986     if (Settings::isAVFoundationNSURLSessionEnabled()
987         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
988         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
989         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
990         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
991         if (mediaResourceLoader)
992             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
993     }
994 #endif
995
996 #endif
997
998     m_haveCheckedPlayability = false;
999
1000     setDelayCallbacks(false);
1001 }
1002
1003 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1004 {
1005     if (!m_avPlayer)
1006         return;
1007
1008     if (pthread_main_np()) {
1009         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1010         return;
1011     }
1012
1013     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1014     RetainPtr<AVPlayerItemType> strongItem = item;
1015     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1016         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1017     });
1018 }
1019
1020 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1021 {
1022     if (m_avPlayer)
1023         return;
1024
1025     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1026
1027     setDelayCallbacks(true);
1028
1029     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
1030     for (NSString *keyName in playerKVOProperties())
1031         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1032
1033 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1034     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1035 #endif
1036
1037 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1038     updateDisableExternalPlayback();
1039     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1040 #endif
1041
1042 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1043     if (m_shouldPlayToPlaybackTarget) {
1044         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1045         m_shouldPlayToPlaybackTarget = false;
1046         setShouldPlayToPlaybackTarget(true);
1047     }
1048 #endif
1049
1050     if (m_muted) {
1051         // Clear m_muted so setMuted doesn't return without doing anything.
1052         m_muted = false;
1053         [m_avPlayer.get() setMuted:m_muted];
1054     }
1055
1056     if (player()->client().mediaPlayerIsVideo())
1057         createAVPlayerLayer();
1058
1059     if (m_avPlayerItem)
1060         setAVPlayerItem(m_avPlayerItem.get());
1061
1062     setDelayCallbacks(false);
1063 }
1064
1065 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1066 {
1067     if (m_avPlayerItem)
1068         return;
1069
1070     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1071
1072     setDelayCallbacks(true);
1073
1074     // Create the player item so we can load media data. 
1075     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
1076
1077     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1078
1079     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1080     for (NSString *keyName in itemKVOProperties())
1081         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1082
1083     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1084
1085     if (m_avPlayer)
1086         setAVPlayerItem(m_avPlayerItem.get());
1087
1088 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1089     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1090
1091     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1092     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1093     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1094
1095     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1096     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1097     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1098     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1099 #endif
1100
1101 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1102     if (m_provider) {
1103         m_provider->setPlayerItem(m_avPlayerItem.get());
1104         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1105     }
1106 #endif
1107
1108     setDelayCallbacks(false);
1109 }
1110
1111 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1112 {
1113     if (m_haveCheckedPlayability)
1114         return;
1115     m_haveCheckedPlayability = true;
1116
1117     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1118     auto weakThis = createWeakPtr();
1119
1120     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1121         callOnMainThread([weakThis] {
1122             if (weakThis)
1123                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1124         });
1125     }];
1126 }
1127
1128 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1129 {
1130     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1131
1132     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1133     dispatch_group_enter(metadataLoadingGroup.get());
1134     auto weakThis = createWeakPtr();
1135     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1136
1137         callOnMainThread([weakThis, metadataLoadingGroup] {
1138             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1139                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1140                     dispatch_group_enter(metadataLoadingGroup.get());
1141                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1142                         dispatch_group_leave(metadataLoadingGroup.get());
1143                     }];
1144                 }
1145             }
1146             dispatch_group_leave(metadataLoadingGroup.get());
1147         });
1148     }];
1149
1150     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1151         callOnMainThread([weakThis] {
1152             if (weakThis)
1153                 [weakThis->m_objcObserver.get() metadataLoaded];
1154         });
1155     });
1156 }
1157
1158 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1159 {
1160     if (!m_avPlayerItem)
1161         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1162
1163     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1164         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1165     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1166         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1167     if (m_cachedLikelyToKeepUp)
1168         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1169     if (m_cachedBufferFull)
1170         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1171     if (m_cachedBufferEmpty)
1172         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1173
1174     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1175 }
1176
1177 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1178 {
1179     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1180     PlatformMedia pm;
1181     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1182     pm.media.avfMediaPlayer = m_avPlayer.get();
1183     return pm;
1184 }
1185
1186 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1187 {
1188 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1189     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1190 #else
1191     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1192 #endif
1193 }
1194
1195 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1196 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, std::function<void()> completionHandler)
1197 {
1198     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1199         completionHandler();
1200         return;
1201     }
1202
1203     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, completionHandler);
1204
1205     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1206         syncTextTrackBounds();
1207         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1208     }
1209
1210     updateDisableExternalPlayback();
1211 }
1212
1213 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1214 {
1215     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1216     syncTextTrackBounds();
1217 }
1218
1219 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1220 {
1221     m_videoFullscreenGravity = gravity;
1222
1223     if (!m_videoLayer)
1224         return;
1225
1226     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1227     if (gravity == MediaPlayer::VideoGravityResize)
1228         videoGravity = AVLayerVideoGravityResize;
1229     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1230         videoGravity = AVLayerVideoGravityResizeAspect;
1231     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1232         videoGravity = AVLayerVideoGravityResizeAspectFill;
1233     else
1234         ASSERT_NOT_REACHED();
1235     
1236     if ([m_videoLayer videoGravity] == videoGravity)
1237         return;
1238
1239     [m_videoLayer setVideoGravity:videoGravity];
1240     syncTextTrackBounds();
1241 }
1242
1243 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1244 {
1245 #if PLATFORM(IOS)
1246     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1247         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1248     updateDisableExternalPlayback();
1249 #else
1250     UNUSED_PARAM(mode);
1251 #endif
1252 }
1253
1254 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1255
1256 #if PLATFORM(IOS)
1257 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1258 {
1259     if (m_currentMetaData)
1260         return m_currentMetaData.get();
1261     return nil;
1262 }
1263
1264 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1265 {
1266     if (!m_avPlayerItem)
1267         return emptyString();
1268     
1269     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1270     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1271
1272     return logString.get();
1273 }
1274
1275 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1276 {
1277     if (!m_avPlayerItem)
1278         return emptyString();
1279
1280     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1281     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1282
1283     return logString.get();
1284 }
1285 #endif
1286
1287 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1288 {
1289     [CATransaction begin];
1290     [CATransaction setDisableActions:YES];    
1291     if (m_videoLayer)
1292         [m_videoLayer.get() setHidden:!isVisible];
1293     [CATransaction commit];
1294 }
1295     
1296 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1297 {
1298     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1299     if (!metaDataAvailable())
1300         return;
1301
1302     setDelayCallbacks(true);
1303     m_cachedRate = requestedRate();
1304     [m_avPlayer.get() setRate:requestedRate()];
1305     setDelayCallbacks(false);
1306 }
1307
1308 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1309 {
1310     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1311     if (!metaDataAvailable())
1312         return;
1313
1314     setDelayCallbacks(true);
1315     m_cachedRate = 0;
1316     [m_avPlayer.get() setRate:0];
1317     setDelayCallbacks(false);
1318 }
1319
1320 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1321 {
1322     // Do not ask the asset for duration before it has been loaded or it will fetch the
1323     // answer synchronously.
1324     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1325         return MediaTime::invalidTime();
1326     
1327     CMTime cmDuration;
1328     
1329     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1330     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1331         cmDuration = [m_avPlayerItem.get() duration];
1332     else
1333         cmDuration = [m_avAsset.get() duration];
1334
1335     if (CMTIME_IS_NUMERIC(cmDuration))
1336         return toMediaTime(cmDuration);
1337
1338     if (CMTIME_IS_INDEFINITE(cmDuration))
1339         return MediaTime::positiveInfiniteTime();
1340
1341     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1342     return MediaTime::invalidTime();
1343 }
1344
1345 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1346 {
1347     if (!metaDataAvailable() || !m_avPlayerItem)
1348         return MediaTime::zeroTime();
1349
1350     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1351     if (CMTIME_IS_NUMERIC(itemTime))
1352         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1353
1354     return MediaTime::zeroTime();
1355 }
1356
1357 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1358 {
1359     // setCurrentTime generates several event callbacks, update afterwards.
1360     setDelayCallbacks(true);
1361
1362     if (m_metadataTrack)
1363         m_metadataTrack->flushPartialCues();
1364
1365     CMTime cmTime = toCMTime(time);
1366     CMTime cmBefore = toCMTime(negativeTolerance);
1367     CMTime cmAfter = toCMTime(positiveTolerance);
1368
1369     auto weakThis = createWeakPtr();
1370
1371     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1372
1373     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1374         callOnMainThread([weakThis, finished] {
1375             auto _this = weakThis.get();
1376             if (!_this)
1377                 return;
1378
1379             _this->seekCompleted(finished);
1380         });
1381     }];
1382
1383     setDelayCallbacks(false);
1384 }
1385
1386 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1387 {
1388 #if PLATFORM(IOS)
1389     UNUSED_PARAM(volume);
1390     return;
1391 #else
1392     if (!m_avPlayer)
1393         return;
1394
1395     [m_avPlayer.get() setVolume:volume];
1396 #endif
1397 }
1398
1399 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1400 {
1401     if (m_muted == muted)
1402         return;
1403
1404     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setMuted(%p) - set to %s", this, boolString(muted));
1405
1406     m_muted = muted;
1407
1408     if (!m_avPlayer)
1409         return;
1410
1411     [m_avPlayer.get() setMuted:m_muted];
1412 }
1413
1414 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1415 {
1416     UNUSED_PARAM(closedCaptionsVisible);
1417
1418     if (!metaDataAvailable())
1419         return;
1420
1421     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1422 }
1423
1424 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1425 {
1426     setDelayCallbacks(true);
1427     m_cachedRate = rate;
1428     [m_avPlayer.get() setRate:rate];
1429     setDelayCallbacks(false);
1430 }
1431
1432 double MediaPlayerPrivateAVFoundationObjC::rate() const
1433 {
1434     if (!metaDataAvailable())
1435         return 0;
1436
1437     return m_cachedRate;
1438 }
1439
1440 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1441 {
1442     if (m_avPlayerItem)
1443         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1444 }
1445
1446 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1447 {
1448     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1449
1450     if (!m_avPlayerItem)
1451         return timeRanges;
1452
1453     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1454         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1455         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1456             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1457     }
1458     return timeRanges;
1459 }
1460
1461 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1462 {
1463     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1464         return MediaTime::zeroTime();
1465
1466     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1467     bool hasValidRange = false;
1468     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1469         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1470         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1471             continue;
1472
1473         hasValidRange = true;
1474         MediaTime startOfRange = toMediaTime(timeRange.start);
1475         if (minTimeSeekable > startOfRange)
1476             minTimeSeekable = startOfRange;
1477     }
1478     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1479 }
1480
1481 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1482 {
1483     if (!m_cachedSeekableRanges)
1484         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1485
1486     MediaTime maxTimeSeekable;
1487     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1488         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1489         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1490             continue;
1491         
1492         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1493         if (maxTimeSeekable < endOfRange)
1494             maxTimeSeekable = endOfRange;
1495     }
1496     return maxTimeSeekable;
1497 }
1498
1499 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1500 {
1501     if (!m_cachedLoadedRanges)
1502         return MediaTime::zeroTime();
1503
1504     MediaTime maxTimeLoaded;
1505     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1506         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1507         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1508             continue;
1509         
1510         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1511         if (maxTimeLoaded < endOfRange)
1512             maxTimeLoaded = endOfRange;
1513     }
1514
1515     return maxTimeLoaded;   
1516 }
1517
1518 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1519 {
1520     if (!metaDataAvailable())
1521         return 0;
1522
1523     if (m_cachedTotalBytes)
1524         return m_cachedTotalBytes;
1525
1526     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1527         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1528
1529     return m_cachedTotalBytes;
1530 }
1531
1532 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1533 {
1534     m_avAsset = asset;
1535 }
1536
1537 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1538 {
1539     if (!m_avAsset)
1540         return MediaPlayerAVAssetStatusDoesNotExist;
1541
1542     for (NSString *keyName in assetMetadataKeyNames()) {
1543         NSError *error = nil;
1544         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1545 #if !LOG_DISABLED
1546         if (error)
1547             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1548 #endif
1549
1550         if (keyStatus < AVKeyValueStatusLoaded)
1551             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1552         
1553         if (keyStatus == AVKeyValueStatusFailed)
1554             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1555
1556         if (keyStatus == AVKeyValueStatusCancelled)
1557             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1558     }
1559
1560     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1561         return MediaPlayerAVAssetStatusPlayable;
1562
1563     return MediaPlayerAVAssetStatusLoaded;
1564 }
1565
1566 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1567 {
1568     if (!m_avAsset)
1569         return 0;
1570
1571     NSError *error = nil;
1572     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1573     return [error code];
1574 }
1575
1576 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1577 {
1578     if (!metaDataAvailable() || context.paintingDisabled())
1579         return;
1580
1581     setDelayCallbacks(true);
1582     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1583
1584 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1585     if (videoOutputHasAvailableFrame())
1586         paintWithVideoOutput(context, rect);
1587     else
1588 #endif
1589         paintWithImageGenerator(context, rect);
1590
1591     END_BLOCK_OBJC_EXCEPTIONS;
1592     setDelayCallbacks(false);
1593
1594     m_videoFrameHasDrawn = true;
1595 }
1596
1597 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1598 {
1599     if (!metaDataAvailable() || context.paintingDisabled())
1600         return;
1601
1602     // We can ignore the request if we are already rendering to a layer.
1603     if (currentRenderingMode() == MediaRenderingToLayer)
1604         return;
1605
1606     // paint() is best effort, so only paint if we already have an image generator or video output available.
1607     if (!hasContextRenderer())
1608         return;
1609
1610     paintCurrentFrameInContext(context, rect);
1611 }
1612
1613 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1614 {
1615     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1616
1617     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1618     if (image) {
1619         GraphicsContextStateSaver stateSaver(context);
1620         context.translate(rect.x(), rect.y() + rect.height());
1621         context.scale(FloatSize(1.0f, -1.0f));
1622         context.setImageInterpolationQuality(InterpolationLow);
1623         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1624         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1625     }
1626 }
1627
1628 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1629 {
1630     if (!m_imageGenerator)
1631         createImageGenerator();
1632     ASSERT(m_imageGenerator);
1633
1634 #if !LOG_DISABLED
1635     double start = monotonicallyIncreasingTime();
1636 #endif
1637
1638     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1639     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1640     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1641
1642 #if !LOG_DISABLED
1643     double duration = monotonicallyIncreasingTime() - start;
1644     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1645 #endif
1646
1647     return image;
1648 }
1649
1650 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1651 {
1652     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::getSupportedTypes");
1653     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1654
1655
1656 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1657 static bool keySystemIsSupported(const String& keySystem)
1658 {
1659     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1660         return true;
1661     return false;
1662 }
1663 #endif
1664
1665 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1666 {
1667 #if ENABLE(ENCRYPTED_MEDIA)
1668     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1669     // In addition to the steps in the current specification, this method must run the following steps:
1670
1671     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1672     //    If keySystem is null, continue to the next step.
1673     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1674         // "Clear Key" is only supported with HLS:
1675         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1676             return MediaPlayer::IsNotSupported;
1677
1678         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1679         if (!keySystemIsSupported(parameters.keySystem))
1680             return MediaPlayer::IsNotSupported;
1681
1682         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1683         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1684     }
1685
1686     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1687 #endif
1688
1689 #if ENABLE(MEDIA_SOURCE)
1690     if (parameters.isMediaSource)
1691         return MediaPlayer::IsNotSupported;
1692 #endif
1693 #if ENABLE(MEDIA_STREAM)
1694     if (parameters.isMediaStream)
1695         return MediaPlayer::IsNotSupported;
1696 #endif
1697     if (isUnsupportedMIMEType(parameters.type))
1698         return MediaPlayer::IsNotSupported;
1699
1700     if (!staticMIMETypeList().contains(parameters.type) && !AVFoundationMIMETypeCache::singleton().types().contains(parameters.type))
1701         return MediaPlayer::IsNotSupported;
1702
1703     // The spec says:
1704     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1705     if (parameters.codecs.isEmpty())
1706         return MediaPlayer::MayBeSupported;
1707
1708     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1709     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1710 }
1711
1712 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1713 {
1714 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1715     if (!keySystem.isEmpty()) {
1716         // "Clear Key" is only supported with HLS:
1717         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1718             return MediaPlayer::IsNotSupported;
1719
1720         if (!keySystemIsSupported(keySystem))
1721             return false;
1722
1723         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1724             return false;
1725
1726         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1727             return false;
1728
1729         return true;
1730     }
1731 #else
1732     UNUSED_PARAM(keySystem);
1733     UNUSED_PARAM(mimeType);
1734 #endif
1735     return false;
1736 }
1737
1738 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1739 #if ENABLE(ENCRYPTED_MEDIA_V2)
1740 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1741 {
1742     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1743         [infoRequest setContentLength:keyData->byteLength()];
1744         [infoRequest setByteRangeAccessSupported:YES];
1745     }
1746
1747     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1748         long long start = [dataRequest currentOffset];
1749         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1750
1751         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1752             [request finishLoadingWithError:nil];
1753             return;
1754         }
1755
1756         ASSERT(start <= std::numeric_limits<int>::max());
1757         ASSERT(end <= std::numeric_limits<int>::max());
1758         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1759         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1760         [dataRequest respondWithData:nsData.get()];
1761     }
1762
1763     [request finishLoading];
1764 }
1765 #endif
1766
1767 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1768 {
1769     String scheme = [[[avRequest request] URL] scheme];
1770     String keyURI = [[[avRequest request] URL] absoluteString];
1771
1772 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1773     if (scheme == "skd") {
1774         // Create an initData with the following layout:
1775         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1776         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1777         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1778         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1779         initDataView->set<uint32_t>(0, keyURISize, true);
1780
1781         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1782         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1783
1784 #if ENABLE(ENCRYPTED_MEDIA)
1785         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1786 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1787         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1788         if (!player()->keyNeeded(initData.get()))
1789 #endif
1790             return false;
1791
1792         m_keyURIToRequestMap.set(keyURI, avRequest);
1793         return true;
1794 #if ENABLE(ENCRYPTED_MEDIA_V2)
1795     } else if (scheme == "clearkey") {
1796         String keyID = [[[avRequest request] URL] resourceSpecifier];
1797         StringView keyIDView(keyID);
1798         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1799
1800         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1801         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1802
1803         auto keyData = player()->cachedKeyForKeyId(keyID);
1804         if (keyData) {
1805             fulfillRequestWithKeyData(avRequest, keyData.get());
1806             return false;
1807         }
1808
1809         if (!player()->keyNeeded(initData.get()))
1810             return false;
1811
1812         m_keyURIToRequestMap.set(keyID, avRequest);
1813         return true;
1814 #endif
1815     }
1816 #endif
1817
1818     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1819     m_resourceLoaderMap.add(avRequest, resourceLoader);
1820     resourceLoader->startLoading();
1821     return true;
1822 }
1823
1824 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1825 {
1826 #if USE(CFNETWORK)
1827     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1828     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1829     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1830 #else
1831     AuthenticationChallenge challenge(nsChallenge);
1832 #endif
1833
1834     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1835 }
1836
1837 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1838 {
1839     String scheme = [[[avRequest request] URL] scheme];
1840
1841     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1842
1843     if (resourceLoader)
1844         resourceLoader->stopLoading();
1845 }
1846
1847 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1848 {
1849     m_resourceLoaderMap.remove(avRequest);
1850 }
1851 #endif
1852
1853 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1854 {
1855     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1856 }
1857
1858 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1859 {
1860     if (!metaDataAvailable())
1861         return timeValue;
1862
1863     // FIXME - impossible to implement until rdar://8721510 is fixed.
1864     return timeValue;
1865 }
1866
1867 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1868 {
1869 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1870     return 0;
1871 #else
1872     return 5;
1873 #endif
1874 }
1875
1876 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1877 {
1878     if (!m_videoLayer)
1879         return;
1880
1881 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1882     // Do not attempt to change the video gravity while in full screen mode.
1883     // See setVideoFullscreenGravity().
1884     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1885         return;
1886 #endif
1887
1888     [CATransaction begin];
1889     [CATransaction setDisableActions:YES];    
1890     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1891     [m_videoLayer.get() setVideoGravity:gravity];
1892     [CATransaction commit];
1893 }
1894
1895 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1896 {
1897     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1898         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1899     }];
1900     if (index == NSNotFound)
1901         return nil;
1902     return [tracks objectAtIndex:index];
1903 }
1904
1905 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1906 {
1907     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1908     m_languageOfPrimaryAudioTrack = String();
1909
1910     if (!m_avAsset)
1911         return;
1912
1913     setDelayCharacteristicsChangedNotification(true);
1914
1915     bool haveCCTrack = false;
1916     bool hasCaptions = false;
1917
1918     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1919     // asked about those fairly fequently.
1920     if (!m_avPlayerItem) {
1921         // We don't have a player item yet, so check with the asset because some assets support inspection
1922         // prior to becoming ready to play.
1923         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1924         setHasVideo(firstEnabledVideoTrack);
1925         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1926 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1927         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1928 #endif
1929
1930         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1931     } else {
1932         bool hasVideo = false;
1933         bool hasAudio = false;
1934         bool hasMetaData = false;
1935         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1936             if ([track isEnabled]) {
1937                 AVAssetTrack *assetTrack = [track assetTrack];
1938                 NSString *mediaType = [assetTrack mediaType];
1939                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1940                     hasVideo = true;
1941                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1942                     hasAudio = true;
1943                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1944 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1945                     hasCaptions = true;
1946 #endif
1947                     haveCCTrack = true;
1948                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1949                     hasMetaData = true;
1950                 }
1951             }
1952         }
1953
1954 #if ENABLE(VIDEO_TRACK)
1955         updateAudioTracks();
1956         updateVideoTracks();
1957
1958 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1959         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1960         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1961 #endif
1962 #endif
1963
1964         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1965         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1966         // when it is not.
1967         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1968
1969         setHasAudio(hasAudio);
1970 #if ENABLE(DATACUE_VALUE)
1971         if (hasMetaData)
1972             processMetadataTrack();
1973 #endif
1974     }
1975
1976 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1977     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1978     if (legibleGroup && m_cachedTracks) {
1979         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1980         if (hasCaptions)
1981             processMediaSelectionOptions();
1982     }
1983 #endif
1984
1985 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1986     if (!hasCaptions && haveCCTrack)
1987         processLegacyClosedCaptionsTracks();
1988 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1989     if (haveCCTrack)
1990         processLegacyClosedCaptionsTracks();
1991 #endif
1992
1993     setHasClosedCaptions(hasCaptions);
1994
1995     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1996         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1997
1998     sizeChanged();
1999
2000     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2001         characteristicsChanged();
2002
2003 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2004     if (m_provider)
2005         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2006 #endif
2007
2008     setDelayCharacteristicsChangedNotification(false);
2009 }
2010
2011 #if ENABLE(VIDEO_TRACK)
2012 template <typename RefT, typename PassRefT>
2013 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2014 {
2015     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2016         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2017     }]]]);
2018     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2019
2020     for (auto& oldItem : oldItems) {
2021         if (oldItem->playerItemTrack())
2022             [oldTracks addObject:oldItem->playerItemTrack()];
2023     }
2024
2025     // Find the added & removed AVPlayerItemTracks:
2026     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2027     [removedTracks minusSet:newTracks.get()];
2028
2029     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2030     [addedTracks minusSet:oldTracks.get()];
2031
2032     typedef Vector<RefT> ItemVector;
2033     ItemVector replacementItems;
2034     ItemVector addedItems;
2035     ItemVector removedItems;
2036     for (auto& oldItem : oldItems) {
2037         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2038             removedItems.append(oldItem);
2039         else
2040             replacementItems.append(oldItem);
2041     }
2042
2043     for (AVPlayerItemTrack* track in addedTracks.get())
2044         addedItems.append(itemFactory(track));
2045
2046     replacementItems.appendVector(addedItems);
2047     oldItems.swap(replacementItems);
2048
2049     for (auto& removedItem : removedItems)
2050         (player->*removedFunction)(removedItem);
2051
2052     for (auto& addedItem : addedItems)
2053         (player->*addedFunction)(addedItem);
2054 }
2055
2056 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2057 template <typename RefT, typename PassRefT>
2058 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2059 {
2060     group->updateOptions(characteristics);
2061
2062     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2063     for (auto& option : group->options()) {
2064         if (!option)
2065             continue;
2066         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2067         if (!avOption)
2068             continue;
2069         newSelectionOptions.add(option);
2070     }
2071
2072     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2073     for (auto& oldItem : oldItems) {
2074         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2075             oldSelectionOptions.add(option);
2076     }
2077
2078     // Find the added & removed AVMediaSelectionOptions:
2079     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2080     for (auto& oldOption : oldSelectionOptions) {
2081         if (!newSelectionOptions.contains(oldOption))
2082             removedSelectionOptions.add(oldOption);
2083     }
2084
2085     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2086     for (auto& newOption : newSelectionOptions) {
2087         if (!oldSelectionOptions.contains(newOption))
2088             addedSelectionOptions.add(newOption);
2089     }
2090
2091     typedef Vector<RefT> ItemVector;
2092     ItemVector replacementItems;
2093     ItemVector addedItems;
2094     ItemVector removedItems;
2095     for (auto& oldItem : oldItems) {
2096         if (!oldItem->mediaSelectionOption())
2097             removedItems.append(oldItem);
2098         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2099             removedItems.append(oldItem);
2100         else
2101             replacementItems.append(oldItem);
2102     }
2103
2104     for (auto& option : addedSelectionOptions)
2105         addedItems.append(itemFactory(*option.get()));
2106
2107     replacementItems.appendVector(addedItems);
2108     oldItems.swap(replacementItems);
2109     
2110     for (auto& removedItem : removedItems)
2111         (player->*removedFunction)(removedItem);
2112     
2113     for (auto& addedItem : addedItems)
2114         (player->*addedFunction)(addedItem);
2115 }
2116 #endif
2117
2118 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2119 {
2120 #if !LOG_DISABLED
2121     size_t count = m_audioTracks.size();
2122 #endif
2123
2124 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2125     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2126     if (!m_audibleGroup) {
2127         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2128             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2129     }
2130
2131     if (m_audibleGroup)
2132         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2133     else
2134 #endif
2135         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2136
2137     for (auto& track : m_audioTracks)
2138         track->resetPropertiesFromTrack();
2139
2140 #if !LOG_DISABLED
2141     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2142 #endif
2143 }
2144
2145 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2146 {
2147 #if !LOG_DISABLED
2148     size_t count = m_videoTracks.size();
2149 #endif
2150
2151     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2152
2153 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2154     if (!m_visualGroup) {
2155         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2156             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2157     }
2158
2159     if (m_visualGroup)
2160         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2161 #endif
2162
2163     for (auto& track : m_audioTracks)
2164         track->resetPropertiesFromTrack();
2165
2166 #if !LOG_DISABLED
2167     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2168 #endif
2169 }
2170
2171 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2172 {
2173 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2174     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2175         return true;
2176 #endif
2177     return false;
2178 }
2179
2180 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2181 {
2182 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2183     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2184         return;
2185
2186     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2187     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2188     [m_textTrackRepresentationLayer setFrame:textFrame];
2189 #endif
2190 }
2191
2192 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2193 {
2194 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2195     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2196     if (representationLayer == m_textTrackRepresentationLayer) {
2197         syncTextTrackBounds();
2198         return;
2199     }
2200
2201     if (m_textTrackRepresentationLayer)
2202         [m_textTrackRepresentationLayer removeFromSuperlayer];
2203
2204     m_textTrackRepresentationLayer = representationLayer;
2205
2206     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2207         syncTextTrackBounds();
2208         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2209     }
2210
2211 #else
2212     UNUSED_PARAM(representation);
2213 #endif
2214 }
2215 #endif // ENABLE(VIDEO_TRACK)
2216
2217 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2218 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2219 {
2220     if (!m_provider) {
2221         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2222         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2223     }
2224
2225     return m_provider.get();
2226 }
2227 #endif
2228
2229 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2230 {
2231     if (!m_avAsset)
2232         return;
2233
2234     setNaturalSize(m_cachedPresentationSize);
2235 }
2236     
2237 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2238 {
2239     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2240         return false;
2241     
2242     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2243     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2244     if (!resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get()))
2245         return false;
2246
2247 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2248     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2249     if (Settings::isAVFoundationNSURLSessionEnabled() && [resourceLoader respondsToSelector:@selector(URLSession)]) {
2250         WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2251         if ([session respondsToSelector:@selector(hasSingleSecurityOrigin)])
2252             return session.hasSingleSecurityOrigin;
2253     }
2254 #endif
2255     return true;
2256 }
2257
2258 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2259 {
2260 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2261     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2262     if (!Settings::isAVFoundationNSURLSessionEnabled()
2263         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2264         return false;
2265
2266     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2267     if ([session respondsToSelector:@selector(didPassCORSAccessChecks)])
2268         return session.didPassCORSAccessChecks;
2269 #endif
2270     return false;
2271 }
2272
2273 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2274 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2275 {
2276     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2277
2278     if (!m_avPlayerItem || m_videoOutput)
2279         return;
2280
2281 #if USE(VIDEOTOOLBOX)
2282     NSDictionary* attributes = nil;
2283 #else
2284     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2285                                 nil];
2286 #endif
2287     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2288     ASSERT(m_videoOutput);
2289
2290     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2291
2292     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2293
2294     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2295 }
2296
2297 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2298 {
2299     if (!m_videoOutput)
2300         return;
2301
2302     if (m_avPlayerItem)
2303         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2304     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2305
2306     m_videoOutput = 0;
2307 }
2308
2309 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2310 {
2311     if (!m_videoOutput)
2312         createVideoOutput();
2313     ASSERT(m_videoOutput);
2314
2315     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2316
2317     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2318         return 0;
2319
2320     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2321 }
2322
2323 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2324 {
2325     if (!m_avPlayerItem)
2326         return false;
2327
2328     if (m_lastImage)
2329         return true;
2330
2331     if (!m_videoOutput)
2332         createVideoOutput();
2333
2334     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2335 }
2336
2337 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2338 {
2339     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2340
2341     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2342     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2343     // should be displayed.
2344     if (!pixelBuffer)
2345         return;
2346
2347     if (!m_pixelBufferConformer) {
2348 #if USE(VIDEOTOOLBOX)
2349         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2350 #else
2351         NSDictionary *attributes = nil;
2352 #endif
2353         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2354     }
2355
2356 #if !LOG_DISABLED
2357     double start = monotonicallyIncreasingTime();
2358 #endif
2359
2360     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2361
2362 #if !LOG_DISABLED
2363     double duration = monotonicallyIncreasingTime() - start;
2364     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2365 #endif
2366 }
2367
2368 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2369 {
2370     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2371         waitForVideoOutputMediaDataWillChange();
2372
2373     updateLastImage();
2374
2375     if (!m_lastImage)
2376         return;
2377
2378     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2379     if (!firstEnabledVideoTrack)
2380         return;
2381
2382     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2383
2384     GraphicsContextStateSaver stateSaver(context);
2385     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2386     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2387     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2388
2389     context.concatCTM(videoTransform);
2390     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2391
2392     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2393     // video frame, destroy it now that it is no longer needed.
2394     if (m_imageGenerator)
2395         destroyImageGenerator();
2396
2397 }
2398
2399 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2400 {
2401     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2402
2403     if (!m_avPlayerItem || m_openGLVideoOutput)
2404         return;
2405
2406 #if PLATFORM(IOS)
2407     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2408 #else
2409     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2410 #endif
2411     m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2412     ASSERT(m_openGLVideoOutput);
2413
2414     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2415
2416     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2417 }
2418
2419 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2420 {
2421     if (!m_openGLVideoOutput)
2422         return;
2423
2424     if (m_avPlayerItem)
2425         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2426     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2427
2428     m_openGLVideoOutput = 0;
2429 }
2430
2431 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2432 {
2433     if (!m_openGLVideoOutput)
2434         return;
2435
2436     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2437     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2438         return;
2439
2440     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2441 }
2442
2443 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2444 {
2445     if (flipY || premultiplyAlpha)
2446         return false;
2447
2448     ASSERT(context);
2449
2450     if (!m_openGLVideoOutput)
2451         createOpenGLVideoOutput();
2452
2453     updateLastOpenGLImage();
2454
2455     if (!m_lastOpenGLImage)
2456         return false;
2457
2458     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2459     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2460
2461     if (!m_textureCache) {
2462         m_textureCache = TextureCacheCV::create(*context);
2463         if (!m_textureCache)
2464             return false;
2465     }
2466
2467     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2468
2469     if (!m_videoTextureCopier)
2470         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2471
2472     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2473 }
2474
2475 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2476 {
2477     updateLastImage();
2478     return m_lastImage;
2479 }
2480
2481 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2482 {
2483     if (!m_videoOutputSemaphore)
2484         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2485
2486     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2487
2488     // Wait for 1 second.
2489     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2490
2491     if (result)
2492         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2493 }
2494
2495 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2496 {
2497     dispatch_semaphore_signal(m_videoOutputSemaphore);
2498 }
2499 #endif
2500
2501 #if ENABLE(ENCRYPTED_MEDIA)
2502 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2503 {
2504     if (!keySystemIsSupported(keySystem))
2505         return MediaPlayer::KeySystemNotSupported;
2506
2507     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2508     String keyURI;
2509     String keyID;
2510     RefPtr<Uint8Array> certificate;
2511     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2512         return MediaPlayer::InvalidPlayerState;
2513
2514     if (!m_keyURIToRequestMap.contains(keyURI))
2515         return MediaPlayer::InvalidPlayerState;
2516
2517     String sessionID = createCanonicalUUIDString();
2518
2519     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2520
2521     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2522     NSString* assetStr = keyID;
2523     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2524     NSError* error = 0;
2525     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2526
2527     if (!keyRequest) {
2528         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2529         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2530         return MediaPlayer::NoError;
2531     }
2532
2533     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2534     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2535     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2536
2537     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2538     m_sessionIDToRequestMap.set(sessionID, avRequest);
2539     m_keyURIToRequestMap.remove(keyURI);
2540
2541     return MediaPlayer::NoError;
2542 }
2543
2544 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2545 {
2546     if (!keySystemIsSupported(keySystem))
2547         return MediaPlayer::KeySystemNotSupported;
2548
2549     if (!m_sessionIDToRequestMap.contains(sessionID))
2550         return MediaPlayer::InvalidPlayerState;
2551
2552     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2553     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2554     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2555     [avRequest.get() finishLoading];
2556     m_sessionIDToRequestMap.remove(sessionID);
2557
2558     player()->keyAdded(keySystem, sessionID);
2559
2560     UNUSED_PARAM(initDataPtr);
2561     UNUSED_PARAM(initDataLength);
2562     return MediaPlayer::NoError;
2563 }
2564
2565 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2566 {
2567     if (!keySystemIsSupported(keySystem))
2568         return MediaPlayer::KeySystemNotSupported;
2569
2570     if (!m_sessionIDToRequestMap.contains(sessionID))
2571         return MediaPlayer::InvalidPlayerState;
2572
2573     m_sessionIDToRequestMap.remove(sessionID);
2574     return MediaPlayer::NoError;
2575 }
2576 #endif
2577
2578 #if ENABLE(ENCRYPTED_MEDIA_V2)
2579 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2580 {
2581     return m_keyURIToRequestMap.take(keyURI);
2582 }
2583
2584 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2585 {
2586     Vector<String> fulfilledKeyIds;
2587
2588     for (auto& pair : m_keyURIToRequestMap) {
2589         const String& keyId = pair.key;
2590         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2591
2592         auto keyData = player()->cachedKeyForKeyId(keyId);
2593         if (!keyData)
2594             continue;
2595
2596         fulfillRequestWithKeyData(request.get(), keyData.get());
2597         fulfilledKeyIds.append(keyId);
2598     }
2599
2600     for (auto& keyId : fulfilledKeyIds)
2601         m_keyURIToRequestMap.remove(keyId);
2602 }
2603
2604 void MediaPlayerPrivateAVFoundationObjC::removeSession(CDMSession& session)
2605 {
2606     ASSERT_UNUSED(session, &session == m_session);
2607     m_session = nullptr;
2608 }
2609
2610 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2611 {
2612     if (!keySystemIsSupported(keySystem))
2613         return nullptr;
2614     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2615     m_session = session->createWeakPtr();
2616     return WTFMove(session);
2617 }
2618
2619 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2620 {
2621     if (m_session && newValue)
2622         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2623 }
2624 #endif
2625
2626 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2627 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2628 {
2629 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2630     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2631 #endif
2632
2633     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2634     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2635
2636         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2637         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2638             continue;
2639
2640         bool newCCTrack = true;
2641         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2642             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2643                 continue;
2644
2645             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2646             if (track->avPlayerItemTrack() == playerItemTrack) {
2647                 removedTextTracks.remove(i - 1);
2648                 newCCTrack = false;
2649                 break;
2650             }
2651         }
2652
2653         if (!newCCTrack)
2654             continue;
2655         
2656         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2657     }
2658
2659     processNewAndRemovedTextTracks(removedTextTracks);
2660 }
2661 #endif
2662
2663 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2664 {
2665     if (!m_avAsset)
2666         return nil;
2667
2668     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2669         return nil;
2670
2671     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2672 }
2673
2674 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2675 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2676 {
2677     if (!m_avAsset)
2678         return false;
2679
2680     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2681         return false;
2682
2683     return true;
2684 }
2685
2686 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2687 {
2688     if (!hasLoadedMediaSelectionGroups())
2689         return nil;
2690
2691     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2692 }
2693
2694 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2695 {
2696     if (!hasLoadedMediaSelectionGroups())
2697         return nil;
2698
2699     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2700 }
2701
2702 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2703 {
2704     if (!hasLoadedMediaSelectionGroups())
2705         return nil;
2706
2707     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2708 }
2709
2710 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2711 {
2712     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2713     if (!legibleGroup) {
2714         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2715         return;
2716     }
2717
2718     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2719     // but set the selected legible track to nil so text tracks will not be automatically configured.
2720     if (!m_textTracks.size())
2721         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2722
2723     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2724     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2725     for (AVMediaSelectionOptionType *option in legibleOptions) {
2726         bool newTrack = true;
2727         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2728             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2729                 continue;
2730             
2731             RetainPtr<AVMediaSelectionOptionType> currentOption;
2732 #if ENABLE(AVF_CAPTIONS)
2733             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2734                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2735                 currentOption = track->mediaSelectionOption();
2736             } else
2737 #endif
2738             {
2739                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2740                 currentOption = track->mediaSelectionOption();
2741             }
2742             
2743             if ([currentOption.get() isEqual:option]) {
2744                 removedTextTracks.remove(i - 1);
2745                 newTrack = false;
2746                 break;
2747             }
2748         }
2749         if (!newTrack)
2750             continue;
2751
2752 #if ENABLE(AVF_CAPTIONS)
2753         if ([option outOfBandSource]) {
2754             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2755             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2756             continue;
2757         }
2758 #endif
2759
2760         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2761     }
2762
2763     processNewAndRemovedTextTracks(removedTextTracks);
2764 }
2765
2766 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2767 {
2768     if (m_metadataTrack)
2769         return;
2770
2771     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2772     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2773     player()->addTextTrack(m_metadataTrack);
2774 }
2775
2776 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2777 {
2778     ASSERT(time >= MediaTime::zeroTime());
2779
2780     if (!m_currentTextTrack)
2781         return;
2782
2783     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2784 }
2785
2786 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2787 {
2788     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2789
2790     if (!m_currentTextTrack)
2791         return;
2792     
2793     m_currentTextTrack->resetCueValues();
2794 }
2795 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2796
2797 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2798 {
2799     if (m_currentTextTrack == track)
2800         return;
2801
2802     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2803         
2804     m_currentTextTrack = track;
2805
2806     if (track) {
2807         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2808             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2809 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2810 #if ENABLE(AVF_CAPTIONS)
2811         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2812             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2813 #endif
2814         else
2815             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2816 #endif
2817     } else {
2818 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2819         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2820 #endif
2821         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2822     }
2823
2824 }
2825
2826 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2827 {
2828     if (!m_languageOfPrimaryAudioTrack.isNull())
2829         return m_languageOfPrimaryAudioTrack;
2830
2831     if (!m_avPlayerItem.get())
2832         return emptyString();
2833
2834 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2835     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2836     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2837     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2838     if (currentlySelectedAudibleOption) {
2839         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2840         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2841
2842         return m_languageOfPrimaryAudioTrack;
2843     }
2844 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2845
2846     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2847     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2848     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2849     if (!tracks || [tracks count] != 1) {
2850         m_languageOfPrimaryAudioTrack = emptyString();
2851         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2852         return m_languageOfPrimaryAudioTrack;
2853     }
2854
2855     AVAssetTrack *track = [tracks objectAtIndex:0];
2856     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2857
2858 #if !LOG_DISABLED
2859     if (m_languageOfPrimaryAudioTrack == emptyString())
2860         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2861     else
2862         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2863 #endif
2864
2865     return m_languageOfPrimaryAudioTrack;
2866 }
2867
2868 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2869 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2870 {
2871     bool wirelessTarget = false;
2872
2873 #if !PLATFORM(IOS)
2874     if (m_playbackTarget) {
2875         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2876             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2877         else
2878             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2879     }
2880 #else
2881     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2882 #endif
2883
2884     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2885
2886     return wirelessTarget;
2887 }
2888
2889 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2890 {
2891     if (!m_avPlayer)
2892         return MediaPlayer::TargetTypeNone;
2893
2894 #if PLATFORM(IOS)
2895     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2896     case wkExternalPlaybackTypeNone:
2897         return MediaPlayer::TargetTypeNone;
2898     case wkExternalPlaybackTypeAirPlay:
2899         return MediaPlayer::TargetTypeAirPlay;
2900     case wkExternalPlaybackTypeTVOut:
2901         return MediaPlayer::TargetTypeTVOut;
2902     }
2903
2904     ASSERT_NOT_REACHED();
2905     return MediaPlayer::TargetTypeNone;
2906
2907 #else
2908     return MediaPlayer::TargetTypeAirPlay;
2909 #endif
2910 }
2911
2912 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2913 {
2914     if (!m_avPlayer)
2915         return emptyString();
2916
2917     String wirelessTargetName;
2918 #if !PLATFORM(IOS)
2919     if (m_playbackTarget)
2920         wirelessTargetName = m_playbackTarget->deviceName();
2921 #else
2922     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2923 #endif
2924     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2925
2926     return wirelessTargetName;
2927 }
2928
2929 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2930 {
2931     if (!m_avPlayer)
2932         return !m_allowsWirelessVideoPlayback;
2933
2934     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2935     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2936
2937     return !m_allowsWirelessVideoPlayback;
2938 }
2939
2940 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2941 {
2942     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2943     m_allowsWirelessVideoPlayback = !disabled;
2944     if (!m_avPlayer)
2945         return;
2946
2947     setDelayCallbacks(true);
2948     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2949     setDelayCallbacks(false);
2950 }
2951
2952 #if !PLATFORM(IOS)
2953 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2954 {
2955     m_playbackTarget = WTFMove(target);
2956
2957     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2958
2959     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2960
2961     if (!m_playbackTarget->hasActiveRoute())
2962         setShouldPlayToPlaybackTarget(false);
2963 }
2964
2965 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2966 {
2967     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2968         return;
2969
2970     m_shouldPlayToPlaybackTarget = shouldPlay;
2971
2972     if (!m_playbackTarget)
2973         return;
2974
2975     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2976         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2977
2978         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2979
2980         if (!m_avPlayer)
2981             return;
2982
2983         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2984         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2985             return;
2986
2987         setDelayCallbacks(true);
2988         m_avPlayer.get().outputContext = newContext;
2989         setDelayCallbacks(false);
2990
2991         return;
2992     }
2993
2994     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2995
2996     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2997
2998     setDelayCallbacks(true);
2999     auto weakThis = createWeakPtr();
3000     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
3001         if (!weakThis)
3002             return;
3003         weakThis->playbackTargetIsWirelessDidChange();
3004     }));
3005     setDelayCallbacks(false);
3006 }
3007 #endif // !PLATFORM(IOS)
3008
3009 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
3010 {
3011     if (!m_avPlayer)
3012         return;
3013
3014 #if PLATFORM(IOS)
3015     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
3016 #endif
3017 }
3018 #endif
3019
3020 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3021 {
3022     m_cachedItemStatus = status;
3023
3024     updateStates();
3025 }
3026
3027 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3028 {
3029     m_pendingStatusChanges++;
3030 }
3031
3032 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3033 {
3034     m_cachedLikelyToKeepUp = likelyToKeepUp;
3035
3036     ASSERT(m_pendingStatusChanges);
3037     if (!--m_pendingStatusChanges)
3038         updateStates();
3039 }
3040
3041 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3042 {
3043     m_pendingStatusChanges++;
3044 }
3045
3046 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3047 {
3048     m_cachedBufferEmpty = bufferEmpty;
3049
3050     ASSERT(m_pendingStatusChanges);
3051     if (!--m_pendingStatusChanges)
3052         updateStates();
3053 }
3054
3055 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3056 {
3057     m_pendingStatusChanges++;
3058 }
3059
3060 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3061 {
3062     m_cachedBufferFull = bufferFull;
3063
3064     ASSERT(m_pendingStatusChanges);
3065     if (!--m_pendingStatusChanges)
3066         updateStates();
3067 }
3068
3069 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3070 {
3071     m_cachedSeekableRanges = seekableRanges;
3072
3073     seekableTimeRangesChanged();
3074     updateStates();
3075 }
3076
3077 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3078 {
3079     m_cachedLoadedRanges = loadedRanges;
3080
3081     loadedTimeRangesChanged();
3082     updateStates();
3083 }
3084
3085 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3086 {
3087     m_cachedIsReadyForDisplay = isReady;
3088     if (!hasVideo() && isReady)
3089         tracksChanged();
3090     updateStates();
3091 }
3092
3093 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3094 {
3095     tracksChanged();
3096     updateStates();
3097 }
3098
3099 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3100 {
3101     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
3102     if (m_shouldBufferData == shouldBuffer)
3103         return;
3104
3105     m_shouldBufferData = shouldBuffer;
3106     
3107     if (!m_avPlayer)
3108         return;
3109
3110     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3111 }
3112
3113 #if ENABLE(DATACUE_VALUE)
3114 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3115 {
3116     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3117     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3118     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3119     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3120     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3121
3122     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3123         return quickTimeUserData;
3124     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3125         return isoUserData;
3126     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3127         return quickTimeMetadata;
3128     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3129         return iTunesMetadata;
3130     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3131         return id3Metadata;
3132
3133     return emptyAtom;
3134 }
3135 #endif
3136
3137 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3138 {
3139     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3140
3141     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3142
3143 #if ENABLE(DATACUE_VALUE)
3144     if (seeking())
3145         return;
3146
3147     if (!m_metadataTrack)
3148         processMetadataTrack();
3149
3150     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3151         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3152         return;
3153     }
3154
3155     // Set the duration of all incomplete cues before adding new ones.
3156     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3157     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3158         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3159         if (start < earliestStartTime)
3160             earliestStartTime = start;
3161     }
3162     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3163
3164     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3165         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3166         MediaTime end = MediaTime::positiveInfiniteTime();
3167         if (CMTIME_IS_VALID(item.duration))
3168             end = start + toMediaTime(item.duration);
3169
3170         AtomicString type = nullAtom;
3171         if (item.keySpace)
3172             type = metadataType(item.keySpace);
3173
3174         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3175     }
3176 #endif
3177 }
3178
3179 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3180 {
3181     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3182         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3183
3184     NSArray *assetTracks = [m_avAsset tracks];
3185
3186     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3187         AVAssetTrack* assetTrack = [obj assetTrack];
3188
3189         if ([assetTracks containsObject:assetTrack])
3190             return YES;
3191
3192         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3193         if (!hasLoadedMediaSelectionGroups())
3194             return NO;
3195
3196         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3197             return NO;
3198
3199         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3200             return NO;
3201
3202         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3203             return NO;
3204
3205         return YES;
3206     }]];
3207
3208     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3209         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3210
3211     m_cachedTotalBytes = 0;
3212
3213     tracksChanged();
3214     updateStates();
3215 }
3216
3217 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3218 {
3219     m_cachedHasEnabledAudio = hasEnabledAudio;
3220
3221     tracksChanged();
3222     updateStates();
3223 }
3224
3225 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3226 {
3227     m_cachedPresentationSize = size;
3228
3229     sizeChanged();
3230     updateStates();
3231 }
3232
3233 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3234 {
3235     m_cachedDuration = duration;
3236
3237     invalidateCachedDuration();
3238 }
3239
3240 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3241 {
3242     m_cachedRate = rate;
3243
3244     updateStates();
3245     rateChanged();
3246 }
3247     
3248 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3249 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3250 {
3251     playbackTargetIsWirelessChanged();
3252 }
3253 #endif
3254
3255 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3256 {
3257     m_cachedCanPlayFastForward = newValue;
3258 }
3259
3260 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3261 {
3262     m_cachedCanPlayFastReverse = newValue;
3263 }
3264
3265 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3266 {
3267     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3268         return MediaPlayerPrivateAVFoundation::resolvedURL();
3269
3270     return URL([m_avAsset resolvedURL]);
3271 }
3272
3273 NSArray* assetMetadataKeyNames()
3274 {
3275     static NSArray* keys;
3276     if (!keys) {
3277         keys = [[NSArray alloc] initWithObjects:@"duration",
3278                     @"naturalSize",
3279                     @"preferredTransform",
3280                     @"preferredVolume",
3281                     @"preferredRate",
3282                     @"playable",
3283                     @"resolvedURL",
3284                     @"tracks",
3285                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3286                    nil];
3287     }
3288     return keys;
3289 }
3290
3291 NSArray* itemKVOProperties()
3292 {
3293     static NSArray* keys;
3294     if (!keys) {
3295         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3296                 @"status",
3297                 @"asset",
3298                 @"tracks",
3299                 @"seekableTimeRanges",
3300                 @"loadedTimeRanges",
3301                 @"playbackLikelyToKeepUp",
3302                 @"playbackBufferFull",
3303                 @"playbackBufferEmpty",
3304                 @"duration",
3305                 @"hasEnabledAudio",
3306                 @"timedMetadata",
3307                 @"canPlayFastForward",
3308                 @"canPlayFastReverse",
3309                 nil];
3310     }
3311     return keys;
3312 }
3313
3314 NSArray* assetTrackMetadataKeyNames()
3315 {
3316     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3317     return keys;
3318 }
3319
3320 NSArray* playerKVOProperties()
3321 {
3322     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3323 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3324                             @"externalPlaybackActive", @"allowsExternalPlayback",
3325 #endif
3326 #if ENABLE(ENCRYPTED_MEDIA_V2)
3327                             @"outputObscuredDueToInsufficientExternalProtection",
3328 #endif
3329                             nil];
3330     return keys;
3331 }
3332 } // namespace WebCore
3333
3334 @implementation WebCoreAVFMovieObserver
3335
3336 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3337 {
3338     self = [super init];
3339     if (!self)
3340         return nil;
3341     m_callback = callback;
3342     return self;
3343 }
3344
3345 - (void)disconnect
3346 {
3347     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3348     m_callback = 0;
3349 }
3350
3351 - (void)metadataLoaded
3352 {
3353     if (!m_callback)
3354         return;
3355
3356     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3357 }
3358
3359 - (void)didEnd:(NSNotification *)unusedNotification
3360 {
3361     UNUSED_PARAM(unusedNotification);
3362     if (!m_callback)
3363         return;
3364     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3365 }
3366
3367 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3368 {
3369     UNUSED_PARAM(object);
3370     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3371
3372     if (!m_callback)
3373         return;
3374
3375     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3376
3377 #if !LOG_DISABLED
3378     if (willChange)
3379         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3380     else {
3381         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3382         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3383     }
3384 #endif
3385
3386     std::function<void ()> function;
3387
3388     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3389         if ([keyPath isEqualToString:@"readyForDisplay"])
3390             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3391     }
3392
3393     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3394         if ([keyPath isEqualToString:@"enabled"])
3395             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3396     }
3397
3398     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3399         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3400             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3401         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3402             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3403         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3404             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3405     }
3406
3407     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3408         // A value changed for an AVPlayerItem
3409         if ([keyPath isEqualToString:@"status"])
3410             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3411         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3412             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3413         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3414             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3415         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3416             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3417         else if ([keyPath isEqualToString:@"asset"])
3418             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3419         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3420             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3421         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3422             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3423         else if ([keyPath isEqualToString:@"tracks"])
3424             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3425         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3426             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3427         else if ([keyPath isEqualToString:@"presentationSize"])
3428             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3429         else if ([keyPath isEqualToString:@"duration"])
3430             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3431         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3432             MediaTime now;
3433             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3434             if (CMTIME_IS_NUMERIC(itemTime))
3435                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3436             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3437         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3438             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3439         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3440             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3441     }
3442
3443     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3444         // A value changed for an AVPlayer.
3445         if ([keyPath isEqualToString:@"rate"])
3446             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3447 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3448         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3449             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3450 #endif
3451 #if ENABLE(ENCRYPTED_MEDIA_V2)
3452         else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3453             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged, m_callback, [newValue boolValue]);
3454 #endif
3455     }
3456     
3457     if (!function)
3458         return;
3459
3460     auto weakThis = m_callback->createWeakPtr();
3461     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3462         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3463         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().