revert r202466 r202546 this causes regressions in media loading with temporary redirects.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationMIMETypeCache.h"
32 #import "AVFoundationSPI.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "MediaTimeAVFoundation.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "Settings.h"
61 #import "TextEncoding.h"
62 #import "TextTrackRepresentation.h"
63 #import "TextureCacheCV.h"
64 #import "URL.h"
65 #import "UUID.h"
66 #import "VideoTextureCopierCV.h"
67 #import "VideoTrackPrivateAVFObjC.h"
68 #import "WebCoreAVFResourceLoader.h"
69 #import "WebCoreCALayerExtras.h"
70 #import "WebCoreNSURLSession.h"
71 #import "WebCoreSystemInterface.h"
72 #import <functional>
73 #import <map>
74 #import <objc/runtime.h>
75 #import <runtime/DataView.h>
76 #import <runtime/JSCInlines.h>
77 #import <runtime/TypedArrayInlines.h>
78 #import <runtime/Uint16Array.h>
79 #import <runtime/Uint32Array.h>
80 #import <runtime/Uint8Array.h>
81 #import <wtf/BlockObjCExceptions.h>
82 #import <wtf/CurrentTime.h>
83 #import <wtf/ListHashSet.h>
84 #import <wtf/NeverDestroyed.h>
85 #import <wtf/OSObjectPtr.h>
86 #import <wtf/text/CString.h>
87 #import <wtf/text/StringBuilder.h>
88
89 #if ENABLE(AVF_CAPTIONS)
90 #include "TextTrack.h"
91 #endif
92
93 #import <AVFoundation/AVFoundation.h>
94
95 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
96 #import "VideoFullscreenLayerManager.h"
97 #endif
98
99 #if PLATFORM(IOS)
100 #import "WAKAppKitStubs.h"
101 #import <CoreImage/CoreImage.h>
102 #import <mach/mach_port.h>
103 #else
104 #import <Foundation/NSGeometry.h>
105 #import <QuartzCore/CoreImage.h>
106 #endif
107
108 #if USE(VIDEOTOOLBOX)
109 #import <CoreVideo/CoreVideo.h>
110 #import <VideoToolbox/VideoToolbox.h>
111 #endif
112
113 #if USE(CFNETWORK)
114 #include "CFNSURLConnectionSPI.h"
115 #endif
116
117 #import "CoreVideoSoftLink.h"
118
119 namespace std {
120 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
121     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
122 };
123 }
124
125 #if ENABLE(AVF_CAPTIONS)
126 // Note: This must be defined before our SOFT_LINK macros:
127 @class AVMediaSelectionOption;
128 @interface AVMediaSelectionOption (OutOfBandExtensions)
129 @property (nonatomic, readonly) NSString* outOfBandSource;
130 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145 typedef AVAssetCache AVAssetCacheType;
146
147 #pragma mark - Soft Linking
148
149 // Soft-linking headers must be included last since they #define functions, constants, etc.
150 #import "CoreMediaSoftLink.h"
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
153
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155
156 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
164
165 SOFT_LINK_CLASS(CoreImage, CIContext)
166 SOFT_LINK_CLASS(CoreImage, CIImage)
167
168 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
169 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
183
184 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
185
186 #define AVPlayer initAVPlayer()
187 #define AVPlayerItem initAVPlayerItem()
188 #define AVPlayerLayer initAVPlayerLayer()
189 #define AVURLAsset initAVURLAsset()
190 #define AVAssetImageGenerator initAVAssetImageGenerator()
191 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
192 #define AVMetadataItem initAVMetadataItem()
193 #define AVAssetCache initAVAssetCache()
194
195 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
196 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
197 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
198 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
199 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
200 #define AVMediaTypeVideo getAVMediaTypeVideo()
201 #define AVMediaTypeAudio getAVMediaTypeAudio()
202 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
203 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
204 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
205 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
206 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
207 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
208 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
209 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
210 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
211
212 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
213 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
214 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
215
216 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
217 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
218 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
219
220 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
221 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
222 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
223 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
224
225 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
226 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
227 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
228 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
229 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
230 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
231 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
232 #endif
233
234 #if ENABLE(AVF_CAPTIONS)
235 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
236 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
237 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
238 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
239 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
240 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
241 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
242 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
248
249 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
250 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
251 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
252 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
253 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
254 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
255 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
256 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
257 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
258 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
259 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
260 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
261 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
262 #endif
263
264 #if ENABLE(DATACUE_VALUE)
265 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
266 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
267 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
268 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
269 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
270
271 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
272 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
273 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
274 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
275 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
276 #endif
277
278 #if PLATFORM(IOS)
279 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
280 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
281 #endif
282
283 SOFT_LINK_FRAMEWORK(MediaToolbox)
284 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
285
286 using namespace WebCore;
287
288 enum MediaPlayerAVFoundationObservationContext {
289     MediaPlayerAVFoundationObservationContextPlayerItem,
290     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
291     MediaPlayerAVFoundationObservationContextPlayer,
292     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
293 };
294
295 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
296 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
297 #else
298 @interface WebCoreAVFMovieObserver : NSObject
299 #endif
300 {
301     MediaPlayerPrivateAVFoundationObjC* m_callback;
302     int m_delayCallbacks;
303 }
304 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
305 -(void)disconnect;
306 -(void)metadataLoaded;
307 -(void)didEnd:(NSNotification *)notification;
308 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
309 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
310 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
311 - (void)outputSequenceWasFlushed:(id)output;
312 #endif
313 @end
314
315 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
316 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
317     MediaPlayerPrivateAVFoundationObjC* m_callback;
318 }
319 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
320 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
321 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
322 @end
323 #endif
324
325 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
326 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
327     MediaPlayerPrivateAVFoundationObjC *m_callback;
328     dispatch_semaphore_t m_semaphore;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
331 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
332 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
333 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
334 @end
335 #endif
336
337 namespace WebCore {
338
339 static NSArray *assetMetadataKeyNames();
340 static NSArray *itemKVOProperties();
341 static NSArray *assetTrackMetadataKeyNames();
342 static NSArray *playerKVOProperties();
343 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
344
345 #if !LOG_DISABLED
346 static const char *boolString(bool val)
347 {
348     return val ? "true" : "false";
349 }
350 #endif
351
352 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
353 static dispatch_queue_t globalLoaderDelegateQueue()
354 {
355     static dispatch_queue_t globalQueue;
356     static dispatch_once_t onceToken;
357     dispatch_once(&onceToken, ^{
358         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
359     });
360     return globalQueue;
361 }
362 #endif
363
364 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
365 static dispatch_queue_t globalPullDelegateQueue()
366 {
367     static dispatch_queue_t globalQueue;
368     static dispatch_once_t onceToken;
369     dispatch_once(&onceToken, ^{
370         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
371     });
372     return globalQueue;
373 }
374 #endif
375
376 #if USE(CFNETWORK)
377 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
378 public:
379     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
380     {
381         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
382     }
383
384     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
385     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
386
387 private:
388     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
389         : m_challenge(challenge)
390     {
391         ASSERT(m_challenge);
392     }
393
394     void refAuthenticationClient() override { ref(); }
395     void derefAuthenticationClient() override { deref(); }
396
397     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
398     {
399         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
400     }
401
402     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
403     {
404         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
405     }
406
407     void receivedCancellation(const AuthenticationChallenge&) override
408     {
409         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
410     }
411
412     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
413     {
414         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
415             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
416     }
417
418     void receivedChallengeRejection(const AuthenticationChallenge&) override
419     {
420         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
421             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
422     }
423
424     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
425 };
426 #endif
427
428 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
429 {
430     if (!isAvailable())
431         return;
432
433     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
434             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
435     AVFoundationMIMETypeCache::singleton().loadTypes();
436 }
437
438 static AVAssetCacheType *assetCacheForPath(const String& path)
439 {
440     NSURL *assetCacheURL;
441     
442     if (path.isEmpty())
443         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
444     else
445         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
446
447     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
448 }
449
450 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
451 {
452     HashSet<RefPtr<SecurityOrigin>> origins;
453     for (NSString *key in [assetCacheForPath(path) allKeys]) {
454         URL keyAsURL = URL(URL(), key);
455         if (keyAsURL.isValid())
456             origins.add(SecurityOrigin::create(keyAsURL));
457     }
458     return origins;
459 }
460
461 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
462 {
463     ASSERT(date);
464     using namespace std::chrono;
465
466     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
467 }
468
469 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
470 {
471     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
472     
473     AVAssetCacheType* assetCache = assetCacheForPath(path);
474     
475     for (NSString *key in [assetCache allKeys]) {
476         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
477             [assetCache removeEntryForKey:key];
478     }
479
480     NSFileManager *fileManager = [NSFileManager defaultManager];
481     NSURL *baseURL = [assetCache URL];
482
483     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
484         [fileManager removeItemAtURL:baseURL error:nil];
485         return;
486     }
487     
488     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
489     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
490         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
491         errorHandler:nil];
492     
493     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
494     for (NSURL *fileURL : enumerator) {
495         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
496     
497         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
498             continue;
499         
500         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
501             continue;
502         
503         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
504             continue;
505         
506         [urlsToDelete addObject:fileURL];
507     }
508     
509     for (NSURL *fileURL in urlsToDelete.get())
510         [fileManager removeItemAtURL:fileURL error:nil];
511 }
512
513 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
514 {
515     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
516     AVAssetCacheType* assetCache = assetCacheForPath(path);
517     for (NSString *key in [assetCache allKeys]) {
518         URL keyAsURL = URL(URL(), key);
519         if (keyAsURL.isValid()) {
520             if (origins.contains(SecurityOrigin::create(keyAsURL)))
521                 [assetCache removeEntryForKey:key];
522         }
523     }
524 }
525
526 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
527     : MediaPlayerPrivateAVFoundation(player)
528     , m_weakPtrFactory(this)
529 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
530     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
531     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
532 #endif
533     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
534     , m_videoFrameHasDrawn(false)
535     , m_haveCheckedPlayability(false)
536 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
537     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
538     , m_videoOutputSemaphore(nullptr)
539 #endif
540 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
541     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
542 #endif
543     , m_currentTextTrack(0)
544     , m_cachedRate(0)
545     , m_cachedTotalBytes(0)
546     , m_pendingStatusChanges(0)
547     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
548     , m_cachedLikelyToKeepUp(false)
549     , m_cachedBufferEmpty(false)
550     , m_cachedBufferFull(false)
551     , m_cachedHasEnabledAudio(false)
552     , m_shouldBufferData(true)
553     , m_cachedIsReadyForDisplay(false)
554     , m_haveBeenAskedToCreateLayer(false)
555 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
556     , m_allowsWirelessVideoPlayback(true)
557 #endif
558 {
559 }
560
561 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
562 {
563 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
564     [m_loaderDelegate.get() setCallback:0];
565     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
566
567     for (auto& pair : m_resourceLoaderMap)
568         pair.value->invalidate();
569 #endif
570 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
571     [m_videoOutputDelegate setCallback:0];
572     [m_videoOutput setDelegate:nil queue:0];
573     if (m_videoOutputSemaphore)
574         dispatch_release(m_videoOutputSemaphore);
575 #endif
576
577     if (m_videoLayer)
578         destroyVideoLayer();
579
580     cancelLoad();
581 }
582
583 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
584 {
585     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
586     tearDownVideoRendering();
587
588     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
589     [m_objcObserver.get() disconnect];
590
591     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
592     setIgnoreLoadStateChanges(true);
593     if (m_avAsset) {
594         [m_avAsset.get() cancelLoading];
595         m_avAsset = nil;
596     }
597
598     clearTextTracks();
599
600 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
601     if (m_legibleOutput) {
602         if (m_avPlayerItem)
603             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
604         m_legibleOutput = nil;
605     }
606 #endif
607
608     if (m_avPlayerItem) {
609         for (NSString *keyName in itemKVOProperties())
610             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
611         
612         m_avPlayerItem = nil;
613     }
614     if (m_avPlayer) {
615         if (m_timeObserver)
616             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
617         m_timeObserver = nil;
618
619         for (NSString *keyName in playerKVOProperties())
620             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
621         m_avPlayer = nil;
622     }
623
624     // Reset cached properties
625     m_pendingStatusChanges = 0;
626     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
627     m_cachedSeekableRanges = nullptr;
628     m_cachedLoadedRanges = nullptr;
629     m_cachedHasEnabledAudio = false;
630     m_cachedPresentationSize = FloatSize();
631     m_cachedDuration = MediaTime::zeroTime();
632
633     for (AVPlayerItemTrack *track in m_cachedTracks.get())
634         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
635     m_cachedTracks = nullptr;
636
637 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
638     if (m_provider) {
639         m_provider->setPlayerItem(nullptr);
640         m_provider->setAudioTrack(nullptr);
641     }
642 #endif
643
644     setIgnoreLoadStateChanges(false);
645 }
646
647 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
648 {
649     return m_haveBeenAskedToCreateLayer;
650 }
651
652 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
653 {
654 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
655     if (m_videoOutput)
656         return true;
657 #endif
658     return m_imageGenerator;
659 }
660
661 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
662 {
663 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
664     createVideoOutput();
665 #else
666     createImageGenerator();
667 #endif
668 }
669
670 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
671 {
672     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
673
674     if (!m_avAsset || m_imageGenerator)
675         return;
676
677     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
678
679     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
680     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
681     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
682     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
683
684     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
685 }
686
687 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
688 {
689 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
690     destroyVideoOutput();
691     destroyOpenGLVideoOutput();
692 #endif
693     destroyImageGenerator();
694 }
695
696 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
697 {
698     if (!m_imageGenerator)
699         return;
700
701     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
702
703     m_imageGenerator = 0;
704 }
705
706 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
707 {
708     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
709         return;
710
711     callOnMainThread([this, weakThis = createWeakPtr()] {
712         if (!weakThis)
713             return;
714
715         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
716             return;
717         m_haveBeenAskedToCreateLayer = true;
718
719         if (!m_videoLayer)
720             createAVPlayerLayer();
721
722 #if USE(VIDEOTOOLBOX)
723         if (!m_videoOutput)
724             createVideoOutput();
725 #endif
726
727         player()->client().mediaPlayerRenderingModeChanged(player());
728     });
729 }
730
731 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
732 {
733     if (!m_avPlayer)
734         return;
735
736     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
737     [m_videoLayer setPlayer:m_avPlayer.get()];
738     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
739
740 #ifndef NDEBUG
741     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
742 #endif
743     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
744     updateVideoLayerGravity();
745     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
746     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
747     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
748
749 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
750     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
751
752 #if PLATFORM(IOS)
753     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
754         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
755 #endif
756 #else
757     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
758 #endif
759 }
760
761 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
762 {
763     if (!m_videoLayer)
764         return;
765
766     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
767
768     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
769     [m_videoLayer setPlayer:nil];
770
771 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
772     m_videoFullscreenLayerManager->didDestroyVideoLayer();
773 #endif
774
775     m_videoLayer = nil;
776 }
777
778 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
779 {
780     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
781     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
782
783     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
784     if (!date)
785         return MediaTime::invalidTime();
786
787     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
788
789     // Rounding due to second offset error when subtracting.
790     return MediaTime::createWithDouble(round(date - currentTime));
791 }
792
793 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
794 {
795     if (currentRenderingMode() == MediaRenderingToLayer)
796         return m_cachedIsReadyForDisplay;
797
798     return m_videoFrameHasDrawn;
799 }
800
801 #if ENABLE(AVF_CAPTIONS)
802 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
803 {
804     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
805     if (manualSelectionMode)
806         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
807
808     // FIXME: Match these to correct types:
809     if (kind == PlatformTextTrack::Caption)
810         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
811
812     if (kind == PlatformTextTrack::Subtitle)
813         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
814
815     if (kind == PlatformTextTrack::Description)
816         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
817
818     if (kind == PlatformTextTrack::Forced)
819         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
820
821     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
822 }
823     
824 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
825 {
826     trackModeChanged();
827 }
828     
829 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
830 {
831     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
832     
833     for (auto& textTrack : m_textTracks) {
834         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
835             continue;
836         
837         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
838         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
839         
840         for (auto& track : outOfBandTrackSources) {
841             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
842             
843             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
844                 continue;
845             
846             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
847             if (track->mode() == PlatformTextTrack::Hidden)
848                 mode = InbandTextTrackPrivate::Hidden;
849             else if (track->mode() == PlatformTextTrack::Disabled)
850                 mode = InbandTextTrackPrivate::Disabled;
851             else if (track->mode() == PlatformTextTrack::Showing)
852                 mode = InbandTextTrackPrivate::Showing;
853             
854             textTrack->setMode(mode);
855             break;
856         }
857     }
858 }
859 #endif
860
861
862 static NSURL *canonicalURL(const String& url)
863 {
864     NSURL *cocoaURL = URL(ParsedURLString, url);
865     if (url.isEmpty())
866         return cocoaURL;
867
868     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
869     if (!request)
870         return cocoaURL;
871
872     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
873     if (!canonicalRequest)
874         return cocoaURL;
875
876     return [canonicalRequest URL];
877 }
878
879 #if PLATFORM(IOS)
880 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
881 {
882     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
883     [properties setDictionary:@{
884         NSHTTPCookieName: cookie.name,
885         NSHTTPCookieValue: cookie.value,
886         NSHTTPCookieDomain: cookie.domain,
887         NSHTTPCookiePath: cookie.path,
888         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
889     }];
890     if (cookie.secure)
891         [properties setObject:@YES forKey:NSHTTPCookieSecure];
892     if (cookie.session)
893         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
894
895     return [NSHTTPCookie cookieWithProperties:properties.get()];
896 }
897 #endif
898
899 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
900 {
901     if (m_avAsset)
902         return;
903
904     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
905
906     setDelayCallbacks(true);
907
908     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
909
910     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
911
912     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
913
914     String referrer = player()->referrer();
915     if (!referrer.isEmpty())
916         [headerFields.get() setObject:referrer forKey:@"Referer"];
917
918     String userAgent = player()->userAgent();
919     if (!userAgent.isEmpty())
920         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
921
922     if ([headerFields.get() count])
923         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
924
925     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
926         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
927
928 #if PLATFORM(IOS)
929     // FIXME: rdar://problem/20354688
930     String identifier = player()->sourceApplicationIdentifier();
931     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
932         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
933 #endif
934
935 #if ENABLE(AVF_CAPTIONS)
936     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
937     if (!outOfBandTrackSources.isEmpty()) {
938         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
939         for (auto& trackSource : outOfBandTrackSources) {
940             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
941             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
942             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
943             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
944             [outOfBandTracks.get() addObject:@{
945                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
946                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
947                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
948                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
949                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
950                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
951             }];
952         }
953
954         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
955     }
956 #endif
957
958 #if PLATFORM(IOS)
959     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
960     if (!networkInterfaceName.isEmpty())
961         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
962 #endif
963
964 #if PLATFORM(IOS)
965     Vector<Cookie> cookies;
966     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
967         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
968         for (auto& cookie : cookies)
969             [nsCookies addObject:toNSHTTPCookie(cookie)];
970
971         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
972     }
973 #endif
974
975     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
976     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
977     
978     if (usePersistentCache)
979         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
980
981     NSURL *cocoaURL = canonicalURL(url);
982     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
983
984 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
985     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
986     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
987
988 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
989     if (Settings::isAVFoundationNSURLSessionEnabled()
990         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
991         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
992         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
993         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
994         if (mediaResourceLoader)
995             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
996     }
997 #endif
998
999 #endif
1000
1001     m_haveCheckedPlayability = false;
1002
1003     setDelayCallbacks(false);
1004 }
1005
1006 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1007 {
1008     if (!m_avPlayer)
1009         return;
1010
1011     if (pthread_main_np()) {
1012         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1013         return;
1014     }
1015
1016     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1017     RetainPtr<AVPlayerItemType> strongItem = item;
1018     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1019         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1020     });
1021 }
1022
1023 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1024 {
1025     if (m_avPlayer)
1026         return;
1027
1028     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1029
1030     setDelayCallbacks(true);
1031
1032     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1033     for (NSString *keyName in playerKVOProperties())
1034         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1035
1036 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1037     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1038 #endif
1039
1040 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1041     updateDisableExternalPlayback();
1042     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1043 #endif
1044
1045 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1046     if (m_shouldPlayToPlaybackTarget) {
1047         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1048         m_shouldPlayToPlaybackTarget = false;
1049         setShouldPlayToPlaybackTarget(true);
1050     }
1051 #endif
1052
1053     if (m_muted) {
1054         // Clear m_muted so setMuted doesn't return without doing anything.
1055         m_muted = false;
1056         [m_avPlayer.get() setMuted:m_muted];
1057     }
1058
1059     if (player()->client().mediaPlayerIsVideo())
1060         createAVPlayerLayer();
1061
1062     if (m_avPlayerItem)
1063         setAVPlayerItem(m_avPlayerItem.get());
1064
1065     setDelayCallbacks(false);
1066 }
1067
1068 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1069 {
1070     if (m_avPlayerItem)
1071         return;
1072
1073     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1074
1075     setDelayCallbacks(true);
1076
1077     // Create the player item so we can load media data. 
1078     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1079
1080     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1081
1082     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1083     for (NSString *keyName in itemKVOProperties())
1084         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1085
1086     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1087
1088     if (m_avPlayer)
1089         setAVPlayerItem(m_avPlayerItem.get());
1090
1091 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1092     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1093
1094     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1095     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1096     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1097
1098     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1099     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1100     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1101     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1102 #endif
1103
1104 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1105     if (m_provider) {
1106         m_provider->setPlayerItem(m_avPlayerItem.get());
1107         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1108     }
1109 #endif
1110
1111     setDelayCallbacks(false);
1112 }
1113
1114 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1115 {
1116     if (m_haveCheckedPlayability)
1117         return;
1118     m_haveCheckedPlayability = true;
1119
1120     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1121     auto weakThis = createWeakPtr();
1122
1123     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1124         callOnMainThread([weakThis] {
1125             if (weakThis)
1126                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1127         });
1128     }];
1129 }
1130
1131 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1132 {
1133     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1134
1135     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1136     dispatch_group_enter(metadataLoadingGroup.get());
1137     auto weakThis = createWeakPtr();
1138     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1139
1140         callOnMainThread([weakThis, metadataLoadingGroup] {
1141             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1142                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1143                     dispatch_group_enter(metadataLoadingGroup.get());
1144                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1145                         dispatch_group_leave(metadataLoadingGroup.get());
1146                     }];
1147                 }
1148             }
1149             dispatch_group_leave(metadataLoadingGroup.get());
1150         });
1151     }];
1152
1153     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1154         callOnMainThread([weakThis] {
1155             if (weakThis)
1156                 [weakThis->m_objcObserver.get() metadataLoaded];
1157         });
1158     });
1159 }
1160
1161 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1162 {
1163     if (!m_avPlayerItem)
1164         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1165
1166     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1167         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1168     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1169         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1170     if (m_cachedLikelyToKeepUp)
1171         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1172     if (m_cachedBufferFull)
1173         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1174     if (m_cachedBufferEmpty)
1175         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1176
1177     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1178 }
1179
1180 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1181 {
1182     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1183     PlatformMedia pm;
1184     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1185     pm.media.avfMediaPlayer = m_avPlayer.get();
1186     return pm;
1187 }
1188
1189 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1190 {
1191 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1192     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1193 #else
1194     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1195 #endif
1196 }
1197
1198 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1199 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, std::function<void()> completionHandler)
1200 {
1201     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1202         completionHandler();
1203         return;
1204     }
1205
1206     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, completionHandler);
1207
1208     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1209         syncTextTrackBounds();
1210         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1211     }
1212
1213     updateDisableExternalPlayback();
1214 }
1215
1216 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1217 {
1218     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1219     syncTextTrackBounds();
1220 }
1221
1222 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1223 {
1224     m_videoFullscreenGravity = gravity;
1225
1226     if (!m_videoLayer)
1227         return;
1228
1229     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1230     if (gravity == MediaPlayer::VideoGravityResize)
1231         videoGravity = AVLayerVideoGravityResize;
1232     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1233         videoGravity = AVLayerVideoGravityResizeAspect;
1234     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1235         videoGravity = AVLayerVideoGravityResizeAspectFill;
1236     else
1237         ASSERT_NOT_REACHED();
1238     
1239     if ([m_videoLayer videoGravity] == videoGravity)
1240         return;
1241
1242     [m_videoLayer setVideoGravity:videoGravity];
1243     syncTextTrackBounds();
1244 }
1245
1246 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1247 {
1248 #if PLATFORM(IOS)
1249     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1250         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1251     updateDisableExternalPlayback();
1252 #else
1253     UNUSED_PARAM(mode);
1254 #endif
1255 }
1256
1257 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1258
1259 #if PLATFORM(IOS)
1260 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1261 {
1262     if (m_currentMetaData)
1263         return m_currentMetaData.get();
1264     return nil;
1265 }
1266
1267 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1268 {
1269     if (!m_avPlayerItem)
1270         return emptyString();
1271     
1272     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1273     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1274
1275     return logString.get();
1276 }
1277
1278 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1279 {
1280     if (!m_avPlayerItem)
1281         return emptyString();
1282
1283     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1284     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1285
1286     return logString.get();
1287 }
1288 #endif
1289
1290 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1291 {
1292     [CATransaction begin];
1293     [CATransaction setDisableActions:YES];    
1294     if (m_videoLayer)
1295         [m_videoLayer.get() setHidden:!isVisible];
1296     [CATransaction commit];
1297 }
1298     
1299 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1300 {
1301     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1302     if (!metaDataAvailable())
1303         return;
1304
1305     setDelayCallbacks(true);
1306     m_cachedRate = requestedRate();
1307     [m_avPlayer.get() setRate:requestedRate()];
1308     setDelayCallbacks(false);
1309 }
1310
1311 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1312 {
1313     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1314     if (!metaDataAvailable())
1315         return;
1316
1317     setDelayCallbacks(true);
1318     m_cachedRate = 0;
1319     [m_avPlayer.get() setRate:0];
1320     setDelayCallbacks(false);
1321 }
1322
1323 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1324 {
1325     // Do not ask the asset for duration before it has been loaded or it will fetch the
1326     // answer synchronously.
1327     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1328         return MediaTime::invalidTime();
1329     
1330     CMTime cmDuration;
1331     
1332     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1333     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1334         cmDuration = [m_avPlayerItem.get() duration];
1335     else
1336         cmDuration = [m_avAsset.get() duration];
1337
1338     if (CMTIME_IS_NUMERIC(cmDuration))
1339         return toMediaTime(cmDuration);
1340
1341     if (CMTIME_IS_INDEFINITE(cmDuration))
1342         return MediaTime::positiveInfiniteTime();
1343
1344     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1345     return MediaTime::invalidTime();
1346 }
1347
1348 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1349 {
1350     if (!metaDataAvailable() || !m_avPlayerItem)
1351         return MediaTime::zeroTime();
1352
1353     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1354     if (CMTIME_IS_NUMERIC(itemTime))
1355         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1356
1357     return MediaTime::zeroTime();
1358 }
1359
1360 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1361 {
1362     // setCurrentTime generates several event callbacks, update afterwards.
1363     setDelayCallbacks(true);
1364
1365     if (m_metadataTrack)
1366         m_metadataTrack->flushPartialCues();
1367
1368     CMTime cmTime = toCMTime(time);
1369     CMTime cmBefore = toCMTime(negativeTolerance);
1370     CMTime cmAfter = toCMTime(positiveTolerance);
1371
1372     auto weakThis = createWeakPtr();
1373
1374     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1375
1376     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1377         callOnMainThread([weakThis, finished] {
1378             auto _this = weakThis.get();
1379             if (!_this)
1380                 return;
1381
1382             _this->seekCompleted(finished);
1383         });
1384     }];
1385
1386     setDelayCallbacks(false);
1387 }
1388
1389 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1390 {
1391 #if PLATFORM(IOS)
1392     UNUSED_PARAM(volume);
1393     return;
1394 #else
1395     if (!m_avPlayer)
1396         return;
1397
1398     [m_avPlayer.get() setVolume:volume];
1399 #endif
1400 }
1401
1402 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1403 {
1404     if (m_muted == muted)
1405         return;
1406
1407     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setMuted(%p) - set to %s", this, boolString(muted));
1408
1409     m_muted = muted;
1410
1411     if (!m_avPlayer)
1412         return;
1413
1414     [m_avPlayer.get() setMuted:m_muted];
1415 }
1416
1417 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1418 {
1419     UNUSED_PARAM(closedCaptionsVisible);
1420
1421     if (!metaDataAvailable())
1422         return;
1423
1424     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1425 }
1426
1427 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1428 {
1429     setDelayCallbacks(true);
1430     m_cachedRate = rate;
1431     [m_avPlayer.get() setRate:rate];
1432     setDelayCallbacks(false);
1433 }
1434
1435 double MediaPlayerPrivateAVFoundationObjC::rate() const
1436 {
1437     if (!metaDataAvailable())
1438         return 0;
1439
1440     return m_cachedRate;
1441 }
1442
1443 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1444 {
1445     if (m_avPlayerItem)
1446         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1447 }
1448
1449 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1450 {
1451     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1452
1453     if (!m_avPlayerItem)
1454         return timeRanges;
1455
1456     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1457         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1458         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1459             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1460     }
1461     return timeRanges;
1462 }
1463
1464 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1465 {
1466     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1467         return MediaTime::zeroTime();
1468
1469     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1470     bool hasValidRange = false;
1471     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1472         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1473         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1474             continue;
1475
1476         hasValidRange = true;
1477         MediaTime startOfRange = toMediaTime(timeRange.start);
1478         if (minTimeSeekable > startOfRange)
1479             minTimeSeekable = startOfRange;
1480     }
1481     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1482 }
1483
1484 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1485 {
1486     if (!m_cachedSeekableRanges)
1487         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1488
1489     MediaTime maxTimeSeekable;
1490     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1491         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1492         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1493             continue;
1494         
1495         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1496         if (maxTimeSeekable < endOfRange)
1497             maxTimeSeekable = endOfRange;
1498     }
1499     return maxTimeSeekable;
1500 }
1501
1502 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1503 {
1504     if (!m_cachedLoadedRanges)
1505         return MediaTime::zeroTime();
1506
1507     MediaTime maxTimeLoaded;
1508     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1509         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1510         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1511             continue;
1512         
1513         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1514         if (maxTimeLoaded < endOfRange)
1515             maxTimeLoaded = endOfRange;
1516     }
1517
1518     return maxTimeLoaded;   
1519 }
1520
1521 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1522 {
1523     if (!metaDataAvailable())
1524         return 0;
1525
1526     if (m_cachedTotalBytes)
1527         return m_cachedTotalBytes;
1528
1529     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1530         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1531
1532     return m_cachedTotalBytes;
1533 }
1534
1535 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1536 {
1537     m_avAsset = asset;
1538 }
1539
1540 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1541 {
1542     if (!m_avAsset)
1543         return MediaPlayerAVAssetStatusDoesNotExist;
1544
1545     for (NSString *keyName in assetMetadataKeyNames()) {
1546         NSError *error = nil;
1547         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1548 #if !LOG_DISABLED
1549         if (error)
1550             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1551 #endif
1552
1553         if (keyStatus < AVKeyValueStatusLoaded)
1554             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1555         
1556         if (keyStatus == AVKeyValueStatusFailed)
1557             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1558
1559         if (keyStatus == AVKeyValueStatusCancelled)
1560             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1561     }
1562
1563     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1564         return MediaPlayerAVAssetStatusPlayable;
1565
1566     return MediaPlayerAVAssetStatusLoaded;
1567 }
1568
1569 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1570 {
1571     if (!m_avAsset)
1572         return 0;
1573
1574     NSError *error = nil;
1575     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1576     return [error code];
1577 }
1578
1579 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1580 {
1581     if (!metaDataAvailable() || context.paintingDisabled())
1582         return;
1583
1584     setDelayCallbacks(true);
1585     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1586
1587 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1588     if (videoOutputHasAvailableFrame())
1589         paintWithVideoOutput(context, rect);
1590     else
1591 #endif
1592         paintWithImageGenerator(context, rect);
1593
1594     END_BLOCK_OBJC_EXCEPTIONS;
1595     setDelayCallbacks(false);
1596
1597     m_videoFrameHasDrawn = true;
1598 }
1599
1600 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1601 {
1602     if (!metaDataAvailable() || context.paintingDisabled())
1603         return;
1604
1605     // We can ignore the request if we are already rendering to a layer.
1606     if (currentRenderingMode() == MediaRenderingToLayer)
1607         return;
1608
1609     // paint() is best effort, so only paint if we already have an image generator or video output available.
1610     if (!hasContextRenderer())
1611         return;
1612
1613     paintCurrentFrameInContext(context, rect);
1614 }
1615
1616 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1617 {
1618     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1619
1620     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1621     if (image) {
1622         GraphicsContextStateSaver stateSaver(context);
1623         context.translate(rect.x(), rect.y() + rect.height());
1624         context.scale(FloatSize(1.0f, -1.0f));
1625         context.setImageInterpolationQuality(InterpolationLow);
1626         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1627         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1628     }
1629 }
1630
1631 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1632 {
1633     if (!m_imageGenerator)
1634         createImageGenerator();
1635     ASSERT(m_imageGenerator);
1636
1637 #if !LOG_DISABLED
1638     double start = monotonicallyIncreasingTime();
1639 #endif
1640
1641     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1642     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1643     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1644
1645 #if !LOG_DISABLED
1646     double duration = monotonicallyIncreasingTime() - start;
1647     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1648 #endif
1649
1650     return image;
1651 }
1652
1653 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1654 {
1655     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::getSupportedTypes");
1656     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1657
1658
1659 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1660 static bool keySystemIsSupported(const String& keySystem)
1661 {
1662     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1663         return true;
1664     return false;
1665 }
1666 #endif
1667
1668 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1669 {
1670 #if ENABLE(ENCRYPTED_MEDIA)
1671     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1672     // In addition to the steps in the current specification, this method must run the following steps:
1673
1674     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1675     //    If keySystem is null, continue to the next step.
1676     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1677         // "Clear Key" is only supported with HLS:
1678         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1679             return MediaPlayer::IsNotSupported;
1680
1681         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1682         if (!keySystemIsSupported(parameters.keySystem))
1683             return MediaPlayer::IsNotSupported;
1684
1685         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1686         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1687     }
1688
1689     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1690 #endif
1691
1692 #if ENABLE(MEDIA_SOURCE)
1693     if (parameters.isMediaSource)
1694         return MediaPlayer::IsNotSupported;
1695 #endif
1696 #if ENABLE(MEDIA_STREAM)
1697     if (parameters.isMediaStream)
1698         return MediaPlayer::IsNotSupported;
1699 #endif
1700     if (isUnsupportedMIMEType(parameters.type))
1701         return MediaPlayer::IsNotSupported;
1702
1703     if (!staticMIMETypeList().contains(parameters.type) && !AVFoundationMIMETypeCache::singleton().types().contains(parameters.type))
1704         return MediaPlayer::IsNotSupported;
1705
1706     // The spec says:
1707     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1708     if (parameters.codecs.isEmpty())
1709         return MediaPlayer::MayBeSupported;
1710
1711     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1712     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1713 }
1714
1715 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1716 {
1717 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1718     if (!keySystem.isEmpty()) {
1719         // "Clear Key" is only supported with HLS:
1720         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1721             return MediaPlayer::IsNotSupported;
1722
1723         if (!keySystemIsSupported(keySystem))
1724             return false;
1725
1726         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1727             return false;
1728
1729         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1730             return false;
1731
1732         return true;
1733     }
1734 #else
1735     UNUSED_PARAM(keySystem);
1736     UNUSED_PARAM(mimeType);
1737 #endif
1738     return false;
1739 }
1740
1741 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1742 #if ENABLE(ENCRYPTED_MEDIA_V2)
1743 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1744 {
1745     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1746         [infoRequest setContentLength:keyData->byteLength()];
1747         [infoRequest setByteRangeAccessSupported:YES];
1748     }
1749
1750     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1751         long long start = [dataRequest currentOffset];
1752         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1753
1754         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1755             [request finishLoadingWithError:nil];
1756             return;
1757         }
1758
1759         ASSERT(start <= std::numeric_limits<int>::max());
1760         ASSERT(end <= std::numeric_limits<int>::max());
1761         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1762         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1763         [dataRequest respondWithData:nsData.get()];
1764     }
1765
1766     [request finishLoading];
1767 }
1768 #endif
1769
1770 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1771 {
1772     String scheme = [[[avRequest request] URL] scheme];
1773     String keyURI = [[[avRequest request] URL] absoluteString];
1774
1775 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1776     if (scheme == "skd") {
1777         // Create an initData with the following layout:
1778         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1779         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1780         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1781         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1782         initDataView->set<uint32_t>(0, keyURISize, true);
1783
1784         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1785         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1786
1787 #if ENABLE(ENCRYPTED_MEDIA)
1788         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1789 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1790         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1791         if (!player()->keyNeeded(initData.get()))
1792 #endif
1793             return false;
1794
1795         m_keyURIToRequestMap.set(keyURI, avRequest);
1796         return true;
1797 #if ENABLE(ENCRYPTED_MEDIA_V2)
1798     } else if (scheme == "clearkey") {
1799         String keyID = [[[avRequest request] URL] resourceSpecifier];
1800         StringView keyIDView(keyID);
1801         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1802
1803         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1804         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1805
1806         auto keyData = player()->cachedKeyForKeyId(keyID);
1807         if (keyData) {
1808             fulfillRequestWithKeyData(avRequest, keyData.get());
1809             return false;
1810         }
1811
1812         if (!player()->keyNeeded(initData.get()))
1813             return false;
1814
1815         m_keyURIToRequestMap.set(keyID, avRequest);
1816         return true;
1817 #endif
1818     }
1819 #endif
1820
1821     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1822     m_resourceLoaderMap.add(avRequest, resourceLoader);
1823     resourceLoader->startLoading();
1824     return true;
1825 }
1826
1827 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1828 {
1829 #if USE(CFNETWORK)
1830     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1831     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1832     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1833 #else
1834     AuthenticationChallenge challenge(nsChallenge);
1835 #endif
1836
1837     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1838 }
1839
1840 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1841 {
1842     String scheme = [[[avRequest request] URL] scheme];
1843
1844     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1845
1846     if (resourceLoader)
1847         resourceLoader->stopLoading();
1848 }
1849
1850 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1851 {
1852     m_resourceLoaderMap.remove(avRequest);
1853 }
1854 #endif
1855
1856 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1857 {
1858     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1859 }
1860
1861 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1862 {
1863     if (!metaDataAvailable())
1864         return timeValue;
1865
1866     // FIXME - impossible to implement until rdar://8721510 is fixed.
1867     return timeValue;
1868 }
1869
1870 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1871 {
1872 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1873     return 0;
1874 #else
1875     return 5;
1876 #endif
1877 }
1878
1879 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1880 {
1881     if (!m_videoLayer)
1882         return;
1883
1884 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1885     // Do not attempt to change the video gravity while in full screen mode.
1886     // See setVideoFullscreenGravity().
1887     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1888         return;
1889 #endif
1890
1891     [CATransaction begin];
1892     [CATransaction setDisableActions:YES];    
1893     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1894     [m_videoLayer.get() setVideoGravity:gravity];
1895     [CATransaction commit];
1896 }
1897
1898 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1899 {
1900     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1901         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1902     }];
1903     if (index == NSNotFound)
1904         return nil;
1905     return [tracks objectAtIndex:index];
1906 }
1907
1908 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1909 {
1910     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1911     m_languageOfPrimaryAudioTrack = String();
1912
1913     if (!m_avAsset)
1914         return;
1915
1916     setDelayCharacteristicsChangedNotification(true);
1917
1918     bool haveCCTrack = false;
1919     bool hasCaptions = false;
1920
1921     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1922     // asked about those fairly fequently.
1923     if (!m_avPlayerItem) {
1924         // We don't have a player item yet, so check with the asset because some assets support inspection
1925         // prior to becoming ready to play.
1926         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1927         setHasVideo(firstEnabledVideoTrack);
1928         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1929 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1930         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1931 #endif
1932
1933         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1934     } else {
1935         bool hasVideo = false;
1936         bool hasAudio = false;
1937         bool hasMetaData = false;
1938         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1939             if ([track isEnabled]) {
1940                 AVAssetTrack *assetTrack = [track assetTrack];
1941                 NSString *mediaType = [assetTrack mediaType];
1942                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1943                     hasVideo = true;
1944                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1945                     hasAudio = true;
1946                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1947 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1948                     hasCaptions = true;
1949 #endif
1950                     haveCCTrack = true;
1951                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1952                     hasMetaData = true;
1953                 }
1954             }
1955         }
1956
1957 #if ENABLE(VIDEO_TRACK)
1958         updateAudioTracks();
1959         updateVideoTracks();
1960
1961 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1962         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1963         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1964 #endif
1965 #endif
1966
1967         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1968         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1969         // when it is not.
1970         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1971
1972         setHasAudio(hasAudio);
1973 #if ENABLE(DATACUE_VALUE)
1974         if (hasMetaData)
1975             processMetadataTrack();
1976 #endif
1977     }
1978
1979 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1980     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1981     if (legibleGroup && m_cachedTracks) {
1982         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1983         if (hasCaptions)
1984             processMediaSelectionOptions();
1985     }
1986 #endif
1987
1988 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1989     if (!hasCaptions && haveCCTrack)
1990         processLegacyClosedCaptionsTracks();
1991 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1992     if (haveCCTrack)
1993         processLegacyClosedCaptionsTracks();
1994 #endif
1995
1996     setHasClosedCaptions(hasCaptions);
1997
1998     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1999         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
2000
2001     sizeChanged();
2002
2003     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2004         characteristicsChanged();
2005
2006 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2007     if (m_provider)
2008         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2009 #endif
2010
2011     setDelayCharacteristicsChangedNotification(false);
2012 }
2013
2014 #if ENABLE(VIDEO_TRACK)
2015 template <typename RefT, typename PassRefT>
2016 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2017 {
2018     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2019         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2020     }]]]);
2021     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2022
2023     for (auto& oldItem : oldItems) {
2024         if (oldItem->playerItemTrack())
2025             [oldTracks addObject:oldItem->playerItemTrack()];
2026     }
2027
2028     // Find the added & removed AVPlayerItemTracks:
2029     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2030     [removedTracks minusSet:newTracks.get()];
2031
2032     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2033     [addedTracks minusSet:oldTracks.get()];
2034
2035     typedef Vector<RefT> ItemVector;
2036     ItemVector replacementItems;
2037     ItemVector addedItems;
2038     ItemVector removedItems;
2039     for (auto& oldItem : oldItems) {
2040         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2041             removedItems.append(oldItem);
2042         else
2043             replacementItems.append(oldItem);
2044     }
2045
2046     for (AVPlayerItemTrack* track in addedTracks.get())
2047         addedItems.append(itemFactory(track));
2048
2049     replacementItems.appendVector(addedItems);
2050     oldItems.swap(replacementItems);
2051
2052     for (auto& removedItem : removedItems)
2053         (player->*removedFunction)(removedItem);
2054
2055     for (auto& addedItem : addedItems)
2056         (player->*addedFunction)(addedItem);
2057 }
2058
2059 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2060 template <typename RefT, typename PassRefT>
2061 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2062 {
2063     group->updateOptions(characteristics);
2064
2065     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2066     for (auto& option : group->options()) {
2067         if (!option)
2068             continue;
2069         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2070         if (!avOption)
2071             continue;
2072         newSelectionOptions.add(option);
2073     }
2074
2075     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2076     for (auto& oldItem : oldItems) {
2077         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2078             oldSelectionOptions.add(option);
2079     }
2080
2081     // Find the added & removed AVMediaSelectionOptions:
2082     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2083     for (auto& oldOption : oldSelectionOptions) {
2084         if (!newSelectionOptions.contains(oldOption))
2085             removedSelectionOptions.add(oldOption);
2086     }
2087
2088     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2089     for (auto& newOption : newSelectionOptions) {
2090         if (!oldSelectionOptions.contains(newOption))
2091             addedSelectionOptions.add(newOption);
2092     }
2093
2094     typedef Vector<RefT> ItemVector;
2095     ItemVector replacementItems;
2096     ItemVector addedItems;
2097     ItemVector removedItems;
2098     for (auto& oldItem : oldItems) {
2099         if (!oldItem->mediaSelectionOption())
2100             removedItems.append(oldItem);
2101         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2102             removedItems.append(oldItem);
2103         else
2104             replacementItems.append(oldItem);
2105     }
2106
2107     for (auto& option : addedSelectionOptions)
2108         addedItems.append(itemFactory(*option.get()));
2109
2110     replacementItems.appendVector(addedItems);
2111     oldItems.swap(replacementItems);
2112     
2113     for (auto& removedItem : removedItems)
2114         (player->*removedFunction)(removedItem);
2115     
2116     for (auto& addedItem : addedItems)
2117         (player->*addedFunction)(addedItem);
2118 }
2119 #endif
2120
2121 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2122 {
2123 #if !LOG_DISABLED
2124     size_t count = m_audioTracks.size();
2125 #endif
2126
2127 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2128     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2129     if (!m_audibleGroup) {
2130         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2131             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2132     }
2133
2134     if (m_audibleGroup)
2135         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2136     else
2137 #endif
2138         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2139
2140     for (auto& track : m_audioTracks)
2141         track->resetPropertiesFromTrack();
2142
2143 #if !LOG_DISABLED
2144     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2145 #endif
2146 }
2147
2148 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2149 {
2150 #if !LOG_DISABLED
2151     size_t count = m_videoTracks.size();
2152 #endif
2153
2154     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2155
2156 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2157     if (!m_visualGroup) {
2158         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2159             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2160     }
2161
2162     if (m_visualGroup)
2163         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2164 #endif
2165
2166     for (auto& track : m_audioTracks)
2167         track->resetPropertiesFromTrack();
2168
2169 #if !LOG_DISABLED
2170     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2171 #endif
2172 }
2173
2174 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2175 {
2176 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2177     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2178         return true;
2179 #endif
2180     return false;
2181 }
2182
2183 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2184 {
2185 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2186     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2187         return;
2188
2189     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2190     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2191     [m_textTrackRepresentationLayer setFrame:textFrame];
2192 #endif
2193 }
2194
2195 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2196 {
2197 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2198     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2199     if (representationLayer == m_textTrackRepresentationLayer) {
2200         syncTextTrackBounds();
2201         return;
2202     }
2203
2204     if (m_textTrackRepresentationLayer)
2205         [m_textTrackRepresentationLayer removeFromSuperlayer];
2206
2207     m_textTrackRepresentationLayer = representationLayer;
2208
2209     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2210         syncTextTrackBounds();
2211         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2212     }
2213
2214 #else
2215     UNUSED_PARAM(representation);
2216 #endif
2217 }
2218 #endif // ENABLE(VIDEO_TRACK)
2219
2220 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2221 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2222 {
2223     if (!m_provider) {
2224         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2225         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2226     }
2227
2228     return m_provider.get();
2229 }
2230 #endif
2231
2232 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2233 {
2234     if (!m_avAsset)
2235         return;
2236
2237     setNaturalSize(m_cachedPresentationSize);
2238 }
2239     
2240 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2241 {
2242     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2243         return false;
2244     
2245     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2246     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2247     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2248 }
2249
2250 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2251 {
2252 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2253     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2254     if (!Settings::isAVFoundationNSURLSessionEnabled()
2255         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2256         return false;
2257
2258     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2259     if ([session respondsToSelector:@selector(didPassCORSAccessChecks)])
2260         return session.didPassCORSAccessChecks;
2261 #endif
2262     return false;
2263 }
2264
2265 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2266 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2267 {
2268     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2269
2270     if (!m_avPlayerItem || m_videoOutput)
2271         return;
2272
2273 #if USE(VIDEOTOOLBOX)
2274     NSDictionary* attributes = nil;
2275 #else
2276     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2277                                 nil];
2278 #endif
2279     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2280     ASSERT(m_videoOutput);
2281
2282     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2283
2284     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2285
2286     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2287 }
2288
2289 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2290 {
2291     if (!m_videoOutput)
2292         return;
2293
2294     if (m_avPlayerItem)
2295         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2296     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2297
2298     m_videoOutput = 0;
2299 }
2300
2301 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2302 {
2303     if (!m_videoOutput)
2304         createVideoOutput();
2305     ASSERT(m_videoOutput);
2306
2307     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2308
2309     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2310         return 0;
2311
2312     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2313 }
2314
2315 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2316 {
2317     if (!m_avPlayerItem)
2318         return false;
2319
2320     if (m_lastImage)
2321         return true;
2322
2323     if (!m_videoOutput)
2324         createVideoOutput();
2325
2326     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2327 }
2328
2329 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2330 {
2331     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2332
2333     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2334     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2335     // should be displayed.
2336     if (!pixelBuffer)
2337         return;
2338
2339     if (!m_pixelBufferConformer) {
2340 #if USE(VIDEOTOOLBOX)
2341         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2342 #else
2343         NSDictionary *attributes = nil;
2344 #endif
2345         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2346     }
2347
2348 #if !LOG_DISABLED
2349     double start = monotonicallyIncreasingTime();
2350 #endif
2351
2352     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2353
2354 #if !LOG_DISABLED
2355     double duration = monotonicallyIncreasingTime() - start;
2356     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2357 #endif
2358 }
2359
2360 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2361 {
2362     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2363         waitForVideoOutputMediaDataWillChange();
2364
2365     updateLastImage();
2366
2367     if (!m_lastImage)
2368         return;
2369
2370     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2371     if (!firstEnabledVideoTrack)
2372         return;
2373
2374     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2375
2376     GraphicsContextStateSaver stateSaver(context);
2377     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2378     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2379     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2380
2381     context.concatCTM(videoTransform);
2382     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2383
2384     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2385     // video frame, destroy it now that it is no longer needed.
2386     if (m_imageGenerator)
2387         destroyImageGenerator();
2388
2389 }
2390
2391 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2392 {
2393     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2394
2395     if (!m_avPlayerItem || m_openGLVideoOutput)
2396         return;
2397
2398 #if PLATFORM(IOS)
2399     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2400 #else
2401     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2402 #endif
2403     m_openGLVideoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2404     ASSERT(m_openGLVideoOutput);
2405
2406     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2407
2408     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2409 }
2410
2411 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2412 {
2413     if (!m_openGLVideoOutput)
2414         return;
2415
2416     if (m_avPlayerItem)
2417         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2418     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2419
2420     m_openGLVideoOutput = 0;
2421 }
2422
2423 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2424 {
2425     if (!m_openGLVideoOutput)
2426         return;
2427
2428     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2429     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2430         return;
2431
2432     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2433 }
2434
2435 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2436 {
2437     if (flipY || premultiplyAlpha)
2438         return false;
2439
2440     ASSERT(context);
2441
2442     if (!m_openGLVideoOutput)
2443         createOpenGLVideoOutput();
2444
2445     updateLastOpenGLImage();
2446
2447     if (!m_lastOpenGLImage)
2448         return false;
2449
2450     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2451     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2452
2453     if (!m_textureCache) {
2454         m_textureCache = TextureCacheCV::create(*context);
2455         if (!m_textureCache)
2456             return false;
2457     }
2458
2459     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2460
2461     if (!m_videoTextureCopier)
2462         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2463
2464     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2465 }
2466
2467 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2468 {
2469     updateLastImage();
2470     return m_lastImage;
2471 }
2472
2473 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2474 {
2475     if (!m_videoOutputSemaphore)
2476         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2477
2478     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2479
2480     // Wait for 1 second.
2481     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2482
2483     if (result)
2484         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2485 }
2486
2487 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2488 {
2489     dispatch_semaphore_signal(m_videoOutputSemaphore);
2490 }
2491 #endif
2492
2493 #if ENABLE(ENCRYPTED_MEDIA)
2494 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2495 {
2496     if (!keySystemIsSupported(keySystem))
2497         return MediaPlayer::KeySystemNotSupported;
2498
2499     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2500     String keyURI;
2501     String keyID;
2502     RefPtr<Uint8Array> certificate;
2503     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2504         return MediaPlayer::InvalidPlayerState;
2505
2506     if (!m_keyURIToRequestMap.contains(keyURI))
2507         return MediaPlayer::InvalidPlayerState;
2508
2509     String sessionID = createCanonicalUUIDString();
2510
2511     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2512
2513     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2514     NSString* assetStr = keyID;
2515     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2516     NSError* error = 0;
2517     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2518
2519     if (!keyRequest) {
2520         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2521         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2522         return MediaPlayer::NoError;
2523     }
2524
2525     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2526     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2527     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2528
2529     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2530     m_sessionIDToRequestMap.set(sessionID, avRequest);
2531     m_keyURIToRequestMap.remove(keyURI);
2532
2533     return MediaPlayer::NoError;
2534 }
2535
2536 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2537 {
2538     if (!keySystemIsSupported(keySystem))
2539         return MediaPlayer::KeySystemNotSupported;
2540
2541     if (!m_sessionIDToRequestMap.contains(sessionID))
2542         return MediaPlayer::InvalidPlayerState;
2543
2544     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2545     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2546     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2547     [avRequest.get() finishLoading];
2548     m_sessionIDToRequestMap.remove(sessionID);
2549
2550     player()->keyAdded(keySystem, sessionID);
2551
2552     UNUSED_PARAM(initDataPtr);
2553     UNUSED_PARAM(initDataLength);
2554     return MediaPlayer::NoError;
2555 }
2556
2557 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2558 {
2559     if (!keySystemIsSupported(keySystem))
2560         return MediaPlayer::KeySystemNotSupported;
2561
2562     if (!m_sessionIDToRequestMap.contains(sessionID))
2563         return MediaPlayer::InvalidPlayerState;
2564
2565     m_sessionIDToRequestMap.remove(sessionID);
2566     return MediaPlayer::NoError;
2567 }
2568 #endif
2569
2570 #if ENABLE(ENCRYPTED_MEDIA_V2)
2571 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2572 {
2573     return m_keyURIToRequestMap.take(keyURI);
2574 }
2575
2576 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2577 {
2578     Vector<String> fulfilledKeyIds;
2579
2580     for (auto& pair : m_keyURIToRequestMap) {
2581         const String& keyId = pair.key;
2582         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2583
2584         auto keyData = player()->cachedKeyForKeyId(keyId);
2585         if (!keyData)
2586             continue;
2587
2588         fulfillRequestWithKeyData(request.get(), keyData.get());
2589         fulfilledKeyIds.append(keyId);
2590     }
2591
2592     for (auto& keyId : fulfilledKeyIds)
2593         m_keyURIToRequestMap.remove(keyId);
2594 }
2595
2596 void MediaPlayerPrivateAVFoundationObjC::removeSession(CDMSession& session)
2597 {
2598     ASSERT_UNUSED(session, &session == m_session);
2599     m_session = nullptr;
2600 }
2601
2602 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2603 {
2604     if (!keySystemIsSupported(keySystem))
2605         return nullptr;
2606     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2607     m_session = session->createWeakPtr();
2608     return WTFMove(session);
2609 }
2610
2611 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2612 {
2613     if (m_session && newValue)
2614         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2615 }
2616 #endif
2617
2618 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2619 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2620 {
2621 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2622     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2623 #endif
2624
2625     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2626     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2627
2628         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2629         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2630             continue;
2631
2632         bool newCCTrack = true;
2633         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2634             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2635                 continue;
2636
2637             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2638             if (track->avPlayerItemTrack() == playerItemTrack) {
2639                 removedTextTracks.remove(i - 1);
2640                 newCCTrack = false;
2641                 break;
2642             }
2643         }
2644
2645         if (!newCCTrack)
2646             continue;
2647         
2648         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2649     }
2650
2651     processNewAndRemovedTextTracks(removedTextTracks);
2652 }
2653 #endif
2654
2655 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2656 {
2657     if (!m_avAsset)
2658         return nil;
2659
2660     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2661         return nil;
2662
2663     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2664 }
2665
2666 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2667 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2668 {
2669     if (!m_avAsset)
2670         return false;
2671
2672     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2673         return false;
2674
2675     return true;
2676 }
2677
2678 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2679 {
2680     if (!hasLoadedMediaSelectionGroups())
2681         return nil;
2682
2683     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2684 }
2685
2686 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2687 {
2688     if (!hasLoadedMediaSelectionGroups())
2689         return nil;
2690
2691     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2692 }
2693
2694 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2695 {
2696     if (!hasLoadedMediaSelectionGroups())
2697         return nil;
2698
2699     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2700 }
2701
2702 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2703 {
2704     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2705     if (!legibleGroup) {
2706         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2707         return;
2708     }
2709
2710     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2711     // but set the selected legible track to nil so text tracks will not be automatically configured.
2712     if (!m_textTracks.size())
2713         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2714
2715     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2716     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2717     for (AVMediaSelectionOptionType *option in legibleOptions) {
2718         bool newTrack = true;
2719         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2720             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2721                 continue;
2722             
2723             RetainPtr<AVMediaSelectionOptionType> currentOption;
2724 #if ENABLE(AVF_CAPTIONS)
2725             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2726                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2727                 currentOption = track->mediaSelectionOption();
2728             } else
2729 #endif
2730             {
2731                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2732                 currentOption = track->mediaSelectionOption();
2733             }
2734             
2735             if ([currentOption.get() isEqual:option]) {
2736                 removedTextTracks.remove(i - 1);
2737                 newTrack = false;
2738                 break;
2739             }
2740         }
2741         if (!newTrack)
2742             continue;
2743
2744 #if ENABLE(AVF_CAPTIONS)
2745         if ([option outOfBandSource]) {
2746             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2747             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2748             continue;
2749         }
2750 #endif
2751
2752         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2753     }
2754
2755     processNewAndRemovedTextTracks(removedTextTracks);
2756 }
2757
2758 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2759 {
2760     if (m_metadataTrack)
2761         return;
2762
2763     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2764     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2765     player()->addTextTrack(m_metadataTrack);
2766 }
2767
2768 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2769 {
2770     ASSERT(time >= MediaTime::zeroTime());
2771
2772     if (!m_currentTextTrack)
2773         return;
2774
2775     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2776 }
2777
2778 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2779 {
2780     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2781
2782     if (!m_currentTextTrack)
2783         return;
2784     
2785     m_currentTextTrack->resetCueValues();
2786 }
2787 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2788
2789 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2790 {
2791     if (m_currentTextTrack == track)
2792         return;
2793
2794     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2795         
2796     m_currentTextTrack = track;
2797
2798     if (track) {
2799         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2800             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2801 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2802 #if ENABLE(AVF_CAPTIONS)
2803         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2804             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2805 #endif
2806         else
2807             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2808 #endif
2809     } else {
2810 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2811         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2812 #endif
2813         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2814     }
2815
2816 }
2817
2818 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2819 {
2820     if (!m_languageOfPrimaryAudioTrack.isNull())
2821         return m_languageOfPrimaryAudioTrack;
2822
2823     if (!m_avPlayerItem.get())
2824         return emptyString();
2825
2826 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2827     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2828     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2829     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2830     if (currentlySelectedAudibleOption) {
2831         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2832         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2833
2834         return m_languageOfPrimaryAudioTrack;
2835     }
2836 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2837
2838     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2839     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2840     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2841     if (!tracks || [tracks count] != 1) {
2842         m_languageOfPrimaryAudioTrack = emptyString();
2843         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2844         return m_languageOfPrimaryAudioTrack;
2845     }
2846
2847     AVAssetTrack *track = [tracks objectAtIndex:0];
2848     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2849
2850 #if !LOG_DISABLED
2851     if (m_languageOfPrimaryAudioTrack == emptyString())
2852         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2853     else
2854         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2855 #endif
2856
2857     return m_languageOfPrimaryAudioTrack;
2858 }
2859
2860 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2861 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2862 {
2863     bool wirelessTarget = false;
2864
2865 #if !PLATFORM(IOS)
2866     if (m_playbackTarget) {
2867         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2868             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2869         else
2870             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2871     }
2872 #else
2873     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2874 #endif
2875
2876     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2877
2878     return wirelessTarget;
2879 }
2880
2881 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2882 {
2883     if (!m_avPlayer)
2884         return MediaPlayer::TargetTypeNone;
2885
2886 #if PLATFORM(IOS)
2887     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2888     case wkExternalPlaybackTypeNone:
2889         return MediaPlayer::TargetTypeNone;
2890     case wkExternalPlaybackTypeAirPlay:
2891         return MediaPlayer::TargetTypeAirPlay;
2892     case wkExternalPlaybackTypeTVOut:
2893         return MediaPlayer::TargetTypeTVOut;
2894     }
2895
2896     ASSERT_NOT_REACHED();
2897     return MediaPlayer::TargetTypeNone;
2898
2899 #else
2900     return MediaPlayer::TargetTypeAirPlay;
2901 #endif
2902 }
2903
2904 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2905 {
2906     if (!m_avPlayer)
2907         return emptyString();
2908
2909     String wirelessTargetName;
2910 #if !PLATFORM(IOS)
2911     if (m_playbackTarget)
2912         wirelessTargetName = m_playbackTarget->deviceName();
2913 #else
2914     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2915 #endif
2916     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2917
2918     return wirelessTargetName;
2919 }
2920
2921 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2922 {
2923     if (!m_avPlayer)
2924         return !m_allowsWirelessVideoPlayback;
2925
2926     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2927     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2928
2929     return !m_allowsWirelessVideoPlayback;
2930 }
2931
2932 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2933 {
2934     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2935     m_allowsWirelessVideoPlayback = !disabled;
2936     if (!m_avPlayer)
2937         return;
2938
2939     setDelayCallbacks(true);
2940     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2941     setDelayCallbacks(false);
2942 }
2943
2944 #if !PLATFORM(IOS)
2945 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2946 {
2947     m_playbackTarget = WTFMove(target);
2948
2949     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2950
2951     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2952
2953     if (!m_playbackTarget->hasActiveRoute())
2954         setShouldPlayToPlaybackTarget(false);
2955 }
2956
2957 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2958 {
2959     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2960         return;
2961
2962     m_shouldPlayToPlaybackTarget = shouldPlay;
2963
2964     if (!m_playbackTarget)
2965         return;
2966
2967     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2968         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2969
2970         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2971
2972         if (!m_avPlayer)
2973             return;
2974
2975         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2976         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2977             return;
2978
2979         setDelayCallbacks(true);
2980         m_avPlayer.get().outputContext = newContext;
2981         setDelayCallbacks(false);
2982
2983         return;
2984     }
2985
2986     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2987
2988     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2989
2990     setDelayCallbacks(true);
2991     auto weakThis = createWeakPtr();
2992     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2993         if (!weakThis)
2994             return;
2995         weakThis->playbackTargetIsWirelessDidChange();
2996     }));
2997     setDelayCallbacks(false);
2998 }
2999 #endif // !PLATFORM(IOS)
3000
3001 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
3002 {
3003     if (!m_avPlayer)
3004         return;
3005
3006 #if PLATFORM(IOS)
3007     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
3008 #endif
3009 }
3010 #endif
3011
3012 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3013 {
3014     m_cachedItemStatus = status;
3015
3016     updateStates();
3017 }
3018
3019 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3020 {
3021     m_pendingStatusChanges++;
3022 }
3023
3024 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3025 {
3026     m_cachedLikelyToKeepUp = likelyToKeepUp;
3027
3028     ASSERT(m_pendingStatusChanges);
3029     if (!--m_pendingStatusChanges)
3030         updateStates();
3031 }
3032
3033 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3034 {
3035     m_pendingStatusChanges++;
3036 }
3037
3038 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3039 {
3040     m_cachedBufferEmpty = bufferEmpty;
3041
3042     ASSERT(m_pendingStatusChanges);
3043     if (!--m_pendingStatusChanges)
3044         updateStates();
3045 }
3046
3047 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3048 {
3049     m_pendingStatusChanges++;
3050 }
3051
3052 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3053 {
3054     m_cachedBufferFull = bufferFull;
3055
3056     ASSERT(m_pendingStatusChanges);
3057     if (!--m_pendingStatusChanges)
3058         updateStates();
3059 }
3060
3061 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3062 {
3063     m_cachedSeekableRanges = seekableRanges;
3064
3065     seekableTimeRangesChanged();
3066     updateStates();
3067 }
3068
3069 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3070 {
3071     m_cachedLoadedRanges = loadedRanges;
3072
3073     loadedTimeRangesChanged();
3074     updateStates();
3075 }
3076
3077 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3078 {
3079     m_cachedIsReadyForDisplay = isReady;
3080     if (!hasVideo() && isReady)
3081         tracksChanged();
3082     updateStates();
3083 }
3084
3085 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3086 {
3087     tracksChanged();
3088     updateStates();
3089 }
3090
3091 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3092 {
3093     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
3094     if (m_shouldBufferData == shouldBuffer)
3095         return;
3096
3097     m_shouldBufferData = shouldBuffer;
3098     
3099     if (!m_avPlayer)
3100         return;
3101
3102     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3103 }
3104
3105 #if ENABLE(DATACUE_VALUE)
3106 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3107 {
3108     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3109     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3110     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3111     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3112     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3113
3114     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3115         return quickTimeUserData;
3116     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3117         return isoUserData;
3118     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3119         return quickTimeMetadata;
3120     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3121         return iTunesMetadata;
3122     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3123         return id3Metadata;
3124
3125     return emptyAtom;
3126 }
3127 #endif
3128
3129 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3130 {
3131     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3132
3133     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3134
3135 #if ENABLE(DATACUE_VALUE)
3136     if (seeking())
3137         return;
3138
3139     if (!m_metadataTrack)
3140         processMetadataTrack();
3141
3142     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3143         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3144         return;
3145     }
3146
3147     // Set the duration of all incomplete cues before adding new ones.
3148     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3149     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3150         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3151         if (start < earliestStartTime)
3152             earliestStartTime = start;
3153     }
3154     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3155
3156     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3157         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3158         MediaTime end = MediaTime::positiveInfiniteTime();
3159         if (CMTIME_IS_VALID(item.duration))
3160             end = start + toMediaTime(item.duration);
3161
3162         AtomicString type = nullAtom;
3163         if (item.keySpace)
3164             type = metadataType(item.keySpace);
3165
3166         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3167     }
3168 #endif
3169 }
3170
3171 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3172 {
3173     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3174         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3175
3176     NSArray *assetTracks = [m_avAsset tracks];
3177
3178     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3179         AVAssetTrack* assetTrack = [obj assetTrack];
3180
3181         if ([assetTracks containsObject:assetTrack])
3182             return YES;
3183
3184         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3185         if (!hasLoadedMediaSelectionGroups())
3186             return NO;
3187
3188         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3189             return NO;
3190
3191         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3192             return NO;
3193
3194         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3195             return NO;
3196
3197         return YES;
3198     }]];
3199
3200     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3201         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3202
3203     m_cachedTotalBytes = 0;
3204
3205     tracksChanged();
3206     updateStates();
3207 }
3208
3209 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3210 {
3211     m_cachedHasEnabledAudio = hasEnabledAudio;
3212
3213     tracksChanged();
3214     updateStates();
3215 }
3216
3217 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3218 {
3219     m_cachedPresentationSize = size;
3220
3221     sizeChanged();
3222     updateStates();
3223 }
3224
3225 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3226 {
3227     m_cachedDuration = duration;
3228
3229     invalidateCachedDuration();
3230 }
3231
3232 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3233 {
3234     m_cachedRate = rate;
3235
3236     updateStates();
3237     rateChanged();
3238 }
3239     
3240 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3241 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3242 {
3243     playbackTargetIsWirelessChanged();
3244 }
3245 #endif
3246
3247 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3248 {
3249     m_cachedCanPlayFastForward = newValue;
3250 }
3251
3252 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3253 {
3254     m_cachedCanPlayFastReverse = newValue;
3255 }
3256
3257 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3258 {
3259     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3260         return MediaPlayerPrivateAVFoundation::resolvedURL();
3261
3262     return URL([m_avAsset resolvedURL]);
3263 }
3264
3265 NSArray* assetMetadataKeyNames()
3266 {
3267     static NSArray* keys;
3268     if (!keys) {
3269         keys = [[NSArray alloc] initWithObjects:@"duration",
3270                     @"naturalSize",
3271                     @"preferredTransform",
3272                     @"preferredVolume",
3273                     @"preferredRate",
3274                     @"playable",
3275                     @"resolvedURL",
3276                     @"tracks",
3277                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3278                    nil];
3279     }
3280     return keys;
3281 }
3282
3283 NSArray* itemKVOProperties()
3284 {
3285     static NSArray* keys;
3286     if (!keys) {
3287         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3288                 @"status",
3289                 @"asset",
3290                 @"tracks",
3291                 @"seekableTimeRanges",
3292                 @"loadedTimeRanges",
3293                 @"playbackLikelyToKeepUp",
3294                 @"playbackBufferFull",
3295                 @"playbackBufferEmpty",
3296                 @"duration",
3297                 @"hasEnabledAudio",
3298                 @"timedMetadata",
3299                 @"canPlayFastForward",
3300                 @"canPlayFastReverse",
3301                 nil];
3302     }
3303     return keys;
3304 }
3305
3306 NSArray* assetTrackMetadataKeyNames()
3307 {
3308     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3309     return keys;
3310 }
3311
3312 NSArray* playerKVOProperties()
3313 {
3314     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3315 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3316                             @"externalPlaybackActive", @"allowsExternalPlayback",
3317 #endif
3318 #if ENABLE(ENCRYPTED_MEDIA_V2)
3319                             @"outputObscuredDueToInsufficientExternalProtection",
3320 #endif
3321                             nil];
3322     return keys;
3323 }
3324 } // namespace WebCore
3325
3326 @implementation WebCoreAVFMovieObserver
3327
3328 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3329 {
3330     self = [super init];
3331     if (!self)
3332         return nil;
3333     m_callback = callback;
3334     return self;
3335 }
3336
3337 - (void)disconnect
3338 {
3339     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3340     m_callback = 0;
3341 }
3342
3343 - (void)metadataLoaded
3344 {
3345     if (!m_callback)
3346         return;
3347
3348     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3349 }
3350
3351 - (void)didEnd:(NSNotification *)unusedNotification
3352 {
3353     UNUSED_PARAM(unusedNotification);
3354     if (!m_callback)
3355         return;
3356     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3357 }
3358
3359 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3360 {
3361     UNUSED_PARAM(object);
3362     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3363
3364     if (!m_callback)
3365         return;
3366
3367     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3368
3369 #if !LOG_DISABLED
3370     if (willChange)
3371         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3372     else {
3373         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3374         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3375     }
3376 #endif
3377
3378     std::function<void ()> function;
3379
3380     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3381         if ([keyPath isEqualToString:@"readyForDisplay"])
3382             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3383     }
3384
3385     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3386         if ([keyPath isEqualToString:@"enabled"])
3387             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3388     }
3389
3390     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3391         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3392             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3393         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3394             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3395         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3396             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3397     }
3398
3399     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3400         // A value changed for an AVPlayerItem
3401         if ([keyPath isEqualToString:@"status"])
3402             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3403         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3404             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3405         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3406             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3407         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3408             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3409         else if ([keyPath isEqualToString:@"asset"])
3410             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3411         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3412             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3413         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3414             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3415         else if ([keyPath isEqualToString:@"tracks"])
3416             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3417         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3418             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3419         else if ([keyPath isEqualToString:@"presentationSize"])
3420             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3421         else if ([keyPath isEqualToString:@"duration"])
3422             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3423         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3424             MediaTime now;
3425             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3426             if (CMTIME_IS_NUMERIC(itemTime))
3427                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3428             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3429         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3430             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3431         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3432             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3433     }
3434
3435     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3436         // A value changed for an AVPlayer.
3437         if ([keyPath isEqualToString:@"rate"])
3438             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3439 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3440         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3441             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3442 #endif
3443 #if ENABLE(ENCRYPTED_MEDIA_V2)
3444         else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3445             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged, m_callback, [newValue boolValue]);
3446 #endif
3447     }
3448     
3449     if (!function)
3450         return;
3451
3452     auto weakThis = m_callback->createWeakPtr();
3453     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3454         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3455         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3456         if (!weakThis)
3457             return;
3458         function();
3459     }));
3460 }
3461
3462 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3463 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime