Allow clients to override their own hardware media requirements where no fallback...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVFoundationSPI.h"
34 #import "AVTrackPrivateAVFObjCImpl.h"
35 #import "AudioSourceProviderAVFObjC.h"
36 #import "AudioTrackPrivateAVFObjC.h"
37 #import "AuthenticationChallenge.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "MediaTimeAVFoundation.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "Settings.h"
61 #import "TextEncoding.h"
62 #import "TextTrackRepresentation.h"
63 #import "TextureCacheCV.h"
64 #import "URL.h"
65 #import "VideoTextureCopierCV.h"
66 #import "VideoTrackPrivateAVFObjC.h"
67 #import "WebCoreAVFResourceLoader.h"
68 #import "WebCoreCALayerExtras.h"
69 #import "WebCoreNSURLSession.h"
70 #import "WebCoreSystemInterface.h"
71 #import <functional>
72 #import <map>
73 #import <objc/runtime.h>
74 #import <runtime/DataView.h>
75 #import <runtime/JSCInlines.h>
76 #import <runtime/TypedArrayInlines.h>
77 #import <runtime/Uint16Array.h>
78 #import <runtime/Uint32Array.h>
79 #import <runtime/Uint8Array.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/CurrentTime.h>
82 #import <wtf/ListHashSet.h>
83 #import <wtf/NeverDestroyed.h>
84 #import <wtf/OSObjectPtr.h>
85 #import <wtf/text/CString.h>
86
87 #if ENABLE(AVF_CAPTIONS)
88 #include "TextTrack.h"
89 #endif
90
91 #import <AVFoundation/AVAssetImageGenerator.h>
92 #import <AVFoundation/AVAssetTrack.h>
93 #import <AVFoundation/AVMediaSelectionGroup.h>
94 #import <AVFoundation/AVMetadataItem.h>
95 #import <AVFoundation/AVPlayer.h>
96 #import <AVFoundation/AVPlayerItem.h>
97 #import <AVFoundation/AVPlayerItemOutput.h>
98 #import <AVFoundation/AVPlayerItemTrack.h>
99 #import <AVFoundation/AVPlayerLayer.h>
100 #import <AVFoundation/AVTime.h>
101
102 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
103 #import "VideoFullscreenLayerManager.h"
104 #endif
105
106 #if PLATFORM(IOS)
107 #import "WAKAppKitStubs.h"
108 #import <CoreImage/CoreImage.h>
109 #import <mach/mach_port.h>
110 #else
111 #import <Foundation/NSGeometry.h>
112 #import <QuartzCore/CoreImage.h>
113 #endif
114
115 #if USE(VIDEOTOOLBOX)
116 #import <CoreVideo/CoreVideo.h>
117 #import <VideoToolbox/VideoToolbox.h>
118 #endif
119
120 #if USE(CFURLCONNECTION)
121 #include "CFNSURLConnectionSPI.h"
122 #endif
123
124 #import "CoreVideoSoftLink.h"
125
126 namespace std {
127 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
128     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
129 };
130 }
131
132 #if ENABLE(AVF_CAPTIONS)
133 // Note: This must be defined before our SOFT_LINK macros:
134 @class AVMediaSelectionOption;
135 @interface AVMediaSelectionOption (OutOfBandExtensions)
136 @property (nonatomic, readonly) NSString* outOfBandSource;
137 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
138 @end
139 #endif
140
141 @interface AVURLAsset (WebKitExtensions)
142 @property (nonatomic, readonly) NSURL *resolvedURL;
143 @end
144
145 typedef AVPlayer AVPlayerType;
146 typedef AVPlayerItem AVPlayerItemType;
147 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
148 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
149 typedef AVMetadataItem AVMetadataItemType;
150 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
151 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
152 typedef AVAssetCache AVAssetCacheType;
153
154 #pragma mark - Soft Linking
155
156 // Soft-linking headers must be included last since they #define functions, constants, etc.
157 #import "CoreMediaSoftLink.h"
158
159 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
160
161 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
162
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
165 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
166 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
167 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
168 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
169 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
170 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
171
172 SOFT_LINK_CLASS(CoreImage, CIContext)
173 SOFT_LINK_CLASS(CoreImage, CIImage)
174
175 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
176 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
177 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
190
191 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
192 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
193 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
194
195 #define AVPlayer initAVPlayer()
196 #define AVPlayerItem initAVPlayerItem()
197 #define AVPlayerLayer initAVPlayerLayer()
198 #define AVURLAsset initAVURLAsset()
199 #define AVAssetImageGenerator initAVAssetImageGenerator()
200 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
201 #define AVMetadataItem initAVMetadataItem()
202 #define AVAssetCache initAVAssetCache()
203
204 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
205 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
206 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
207 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
208 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
209 #define AVMediaTypeVideo getAVMediaTypeVideo()
210 #define AVMediaTypeAudio getAVMediaTypeAudio()
211 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
212 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
213 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
214 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
215 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
216 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
217 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
218 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
219 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
220 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
221 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
222
223 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
224 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
225 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
226
227 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
228 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
229 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
230
231 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
232 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
233 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
234 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
235
236 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
237 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
238 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
239 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
240 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
241 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
242 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
243 #endif
244
245 #if ENABLE(AVF_CAPTIONS)
246 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
259
260 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
261 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
262 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
263 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
264 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
265 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
266 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
267 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
268 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
269 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
270 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
271 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
272 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
273 #endif
274
275 #if ENABLE(DATACUE_VALUE)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
277 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
281
282 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
283 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
284 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
285 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
286 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
287 #endif
288
289 #if PLATFORM(IOS)
290 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
292 #endif
293
294 SOFT_LINK_FRAMEWORK(MediaToolbox)
295 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
296
297 using namespace WebCore;
298
299 enum MediaPlayerAVFoundationObservationContext {
300     MediaPlayerAVFoundationObservationContextPlayerItem,
301     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
302     MediaPlayerAVFoundationObservationContextPlayer,
303     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
304 };
305
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
307 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
308 #else
309 @interface WebCoreAVFMovieObserver : NSObject
310 #endif
311 {
312     MediaPlayerPrivateAVFoundationObjC* m_callback;
313     int m_delayCallbacks;
314 }
315 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 -(void)disconnect;
317 -(void)metadataLoaded;
318 -(void)didEnd:(NSNotification *)notification;
319 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
320 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
321 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
322 - (void)outputSequenceWasFlushed:(id)output;
323 #endif
324 @end
325
326 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
327 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
333 @end
334 #endif
335
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
338     MediaPlayerPrivateAVFoundationObjC *m_callback;
339     dispatch_semaphore_t m_semaphore;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
344 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
345 @end
346 #endif
347
348 namespace WebCore {
349
350 static NSArray *assetMetadataKeyNames();
351 static NSArray *itemKVOProperties();
352 static NSArray *assetTrackMetadataKeyNames();
353 static NSArray *playerKVOProperties();
354 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
355
356 #if !LOG_DISABLED
357 static const char *boolString(bool val)
358 {
359     return val ? "true" : "false";
360 }
361 #endif
362
363 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
364 static dispatch_queue_t globalLoaderDelegateQueue()
365 {
366     static dispatch_queue_t globalQueue;
367     static dispatch_once_t onceToken;
368     dispatch_once(&onceToken, ^{
369         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
370     });
371     return globalQueue;
372 }
373 #endif
374
375 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
376 static dispatch_queue_t globalPullDelegateQueue()
377 {
378     static dispatch_queue_t globalQueue;
379     static dispatch_once_t onceToken;
380     dispatch_once(&onceToken, ^{
381         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
382     });
383     return globalQueue;
384 }
385 #endif
386
387 #if USE(CFURLCONNECTION)
388 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
389 public:
390     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
391     {
392         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
393     }
394
395     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
396     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
397
398 private:
399     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
400         : m_challenge(challenge)
401     {
402         ASSERT(m_challenge);
403     }
404
405     void refAuthenticationClient() override { ref(); }
406     void derefAuthenticationClient() override { deref(); }
407
408     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
409     {
410         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
411     }
412
413     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
414     {
415         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
416     }
417
418     void receivedCancellation(const AuthenticationChallenge&) override
419     {
420         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
421     }
422
423     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
424     {
425         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
426             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
427     }
428
429     void receivedChallengeRejection(const AuthenticationChallenge&) override
430     {
431         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
432             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
433     }
434
435     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
436 };
437 #endif
438
439 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
440 {
441     if (!isAvailable())
442         return;
443
444     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
445             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
446     AVFoundationMIMETypeCache::singleton().loadTypes();
447 }
448
449 static AVAssetCacheType *assetCacheForPath(const String& path)
450 {
451     NSURL *assetCacheURL;
452     
453     if (path.isEmpty())
454         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
455     else
456         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
457
458     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
459 }
460
461 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
462 {
463     HashSet<RefPtr<SecurityOrigin>> origins;
464     for (NSString *key in [assetCacheForPath(path) allKeys]) {
465         URL keyAsURL = URL(URL(), key);
466         if (keyAsURL.isValid())
467             origins.add(SecurityOrigin::create(keyAsURL));
468     }
469     return origins;
470 }
471
472 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
473 {
474     ASSERT(date);
475     using namespace std::chrono;
476
477     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
478 }
479
480 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
481 {
482     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
483     
484     AVAssetCacheType* assetCache = assetCacheForPath(path);
485     
486     for (NSString *key in [assetCache allKeys]) {
487         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
488             [assetCache removeEntryForKey:key];
489     }
490
491     NSFileManager *fileManager = [NSFileManager defaultManager];
492     NSURL *baseURL = [assetCache URL];
493
494     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
495         [fileManager removeItemAtURL:baseURL error:nil];
496         return;
497     }
498     
499     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
500     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
501         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
502         errorHandler:nil];
503     
504     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
505     for (NSURL *fileURL : enumerator) {
506         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
507     
508         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
509             continue;
510         
511         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
512             continue;
513         
514         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
515             continue;
516         
517         [urlsToDelete addObject:fileURL];
518     }
519     
520     for (NSURL *fileURL in urlsToDelete.get())
521         [fileManager removeItemAtURL:fileURL error:nil];
522 }
523
524 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
525 {
526     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
527     AVAssetCacheType* assetCache = assetCacheForPath(path);
528     for (NSString *key in [assetCache allKeys]) {
529         URL keyAsURL = URL(URL(), key);
530         if (keyAsURL.isValid()) {
531             if (origins.contains(SecurityOrigin::create(keyAsURL)))
532                 [assetCache removeEntryForKey:key];
533         }
534     }
535 }
536
537 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
538     : MediaPlayerPrivateAVFoundation(player)
539     , m_weakPtrFactory(this)
540 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
541     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
542     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
543 #endif
544     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
545     , m_videoFrameHasDrawn(false)
546     , m_haveCheckedPlayability(false)
547 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
548     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
549     , m_videoOutputSemaphore(nullptr)
550 #endif
551 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
552     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
553 #endif
554     , m_currentTextTrack(0)
555     , m_cachedRate(0)
556     , m_cachedTotalBytes(0)
557     , m_pendingStatusChanges(0)
558     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
559     , m_cachedLikelyToKeepUp(false)
560     , m_cachedBufferEmpty(false)
561     , m_cachedBufferFull(false)
562     , m_cachedHasEnabledAudio(false)
563     , m_shouldBufferData(true)
564     , m_cachedIsReadyForDisplay(false)
565     , m_haveBeenAskedToCreateLayer(false)
566 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
567     , m_allowsWirelessVideoPlayback(true)
568 #endif
569 {
570 }
571
572 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
573 {
574 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
575     [m_loaderDelegate.get() setCallback:0];
576     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
577
578     for (auto& pair : m_resourceLoaderMap)
579         pair.value->invalidate();
580 #endif
581 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
582     [m_videoOutputDelegate setCallback:0];
583     [m_videoOutput setDelegate:nil queue:0];
584     if (m_videoOutputSemaphore)
585         dispatch_release(m_videoOutputSemaphore);
586 #endif
587
588     if (m_videoLayer)
589         destroyVideoLayer();
590
591     cancelLoad();
592 }
593
594 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
595 {
596     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
597     tearDownVideoRendering();
598
599     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
600     [m_objcObserver.get() disconnect];
601
602     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
603     setIgnoreLoadStateChanges(true);
604     if (m_avAsset) {
605         [m_avAsset.get() cancelLoading];
606         m_avAsset = nil;
607     }
608
609     clearTextTracks();
610
611 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
612     if (m_legibleOutput) {
613         if (m_avPlayerItem)
614             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
615         m_legibleOutput = nil;
616     }
617 #endif
618
619     if (m_avPlayerItem) {
620         for (NSString *keyName in itemKVOProperties())
621             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
622         
623         m_avPlayerItem = nil;
624     }
625     if (m_avPlayer) {
626         if (m_timeObserver)
627             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
628         m_timeObserver = nil;
629
630         for (NSString *keyName in playerKVOProperties())
631             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
632
633         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
634         m_avPlayer = nil;
635     }
636
637     // Reset cached properties
638     m_pendingStatusChanges = 0;
639     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
640     m_cachedSeekableRanges = nullptr;
641     m_cachedLoadedRanges = nullptr;
642     m_cachedHasEnabledAudio = false;
643     m_cachedPresentationSize = FloatSize();
644     m_cachedDuration = MediaTime::zeroTime();
645
646     for (AVPlayerItemTrack *track in m_cachedTracks.get())
647         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
648     m_cachedTracks = nullptr;
649
650 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
651     if (m_provider) {
652         m_provider->setPlayerItem(nullptr);
653         m_provider->setAudioTrack(nullptr);
654     }
655 #endif
656
657     setIgnoreLoadStateChanges(false);
658 }
659
660 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
661 {
662     return m_haveBeenAskedToCreateLayer;
663 }
664
665 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
666 {
667 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
668     if (m_videoOutput)
669         return true;
670 #endif
671     return m_imageGenerator;
672 }
673
674 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
675 {
676 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
677     createVideoOutput();
678 #else
679     createImageGenerator();
680 #endif
681 }
682
683 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
684 {
685     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
686
687     if (!m_avAsset || m_imageGenerator)
688         return;
689
690     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
691
692     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
693     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
694     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
695     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
696
697     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
698 }
699
700 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
701 {
702 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
703     destroyVideoOutput();
704     destroyOpenGLVideoOutput();
705 #endif
706     destroyImageGenerator();
707 }
708
709 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
710 {
711     if (!m_imageGenerator)
712         return;
713
714     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
715
716     m_imageGenerator = 0;
717 }
718
719 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
720 {
721     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
722         return;
723
724     callOnMainThread([this, weakThis = createWeakPtr()] {
725         if (!weakThis)
726             return;
727
728         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
729             return;
730         m_haveBeenAskedToCreateLayer = true;
731
732         if (!m_videoLayer)
733             createAVPlayerLayer();
734
735 #if USE(VIDEOTOOLBOX)
736         if (!m_videoOutput)
737             createVideoOutput();
738 #endif
739
740         player()->client().mediaPlayerRenderingModeChanged(player());
741     });
742 }
743
744 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
745 {
746     if (!m_avPlayer)
747         return;
748
749     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
750     [m_videoLayer setPlayer:m_avPlayer.get()];
751
752 #ifndef NDEBUG
753     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
754 #endif
755     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
756     updateVideoLayerGravity();
757     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
758     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
759     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
760
761 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
762     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
763
764 #if PLATFORM(IOS)
765     [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
766 #endif
767 #else
768     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
769 #endif
770 }
771
772 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
773 {
774     if (!m_videoLayer)
775         return;
776
777     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
778
779     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
780     [m_videoLayer setPlayer:nil];
781
782 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
783     m_videoFullscreenLayerManager->didDestroyVideoLayer();
784 #endif
785
786     m_videoLayer = nil;
787 }
788
789 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
790 {
791     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
792     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
793
794     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
795     if (!date)
796         return MediaTime::invalidTime();
797
798     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
799
800     // Rounding due to second offset error when subtracting.
801     return MediaTime::createWithDouble(round(date - currentTime));
802 }
803
804 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
805 {
806     if (currentRenderingMode() == MediaRenderingToLayer)
807         return m_cachedIsReadyForDisplay;
808
809     return m_videoFrameHasDrawn;
810 }
811
812 #if ENABLE(AVF_CAPTIONS)
813 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
814 {
815     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
816     if (manualSelectionMode)
817         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
818
819     // FIXME: Match these to correct types:
820     if (kind == PlatformTextTrack::Caption)
821         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
822
823     if (kind == PlatformTextTrack::Subtitle)
824         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
825
826     if (kind == PlatformTextTrack::Description)
827         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
828
829     if (kind == PlatformTextTrack::Forced)
830         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
831
832     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
833 }
834     
835 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
836 {
837     trackModeChanged();
838 }
839     
840 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
841 {
842     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
843     
844     for (auto& textTrack : m_textTracks) {
845         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
846             continue;
847         
848         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
849         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
850         
851         for (auto& track : outOfBandTrackSources) {
852             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
853             
854             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
855                 continue;
856             
857             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
858             if (track->mode() == PlatformTextTrack::Hidden)
859                 mode = InbandTextTrackPrivate::Hidden;
860             else if (track->mode() == PlatformTextTrack::Disabled)
861                 mode = InbandTextTrackPrivate::Disabled;
862             else if (track->mode() == PlatformTextTrack::Showing)
863                 mode = InbandTextTrackPrivate::Showing;
864             
865             textTrack->setMode(mode);
866             break;
867         }
868     }
869 }
870 #endif
871
872
873 static NSURL *canonicalURL(const String& url)
874 {
875     NSURL *cocoaURL = URL(ParsedURLString, url);
876     if (url.isEmpty())
877         return cocoaURL;
878
879     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
880     if (!request)
881         return cocoaURL;
882
883     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
884     if (!canonicalRequest)
885         return cocoaURL;
886
887     return [canonicalRequest URL];
888 }
889
890 #if PLATFORM(IOS)
891 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
892 {
893     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
894     [properties setDictionary:@{
895         NSHTTPCookieName: cookie.name,
896         NSHTTPCookieValue: cookie.value,
897         NSHTTPCookieDomain: cookie.domain,
898         NSHTTPCookiePath: cookie.path,
899         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
900     }];
901     if (cookie.secure)
902         [properties setObject:@YES forKey:NSHTTPCookieSecure];
903     if (cookie.session)
904         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
905
906     return [NSHTTPCookie cookieWithProperties:properties.get()];
907 }
908 #endif
909
910 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
911 {
912     if (m_avAsset)
913         return;
914
915     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
916
917     setDelayCallbacks(true);
918
919     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
920
921     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
922
923     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
924
925     String referrer = player()->referrer();
926     if (!referrer.isEmpty())
927         [headerFields.get() setObject:referrer forKey:@"Referer"];
928
929     String userAgent = player()->userAgent();
930     if (!userAgent.isEmpty())
931         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
932
933     if ([headerFields.get() count])
934         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
935
936     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
937         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
938
939 #if PLATFORM(IOS)
940     // FIXME: rdar://problem/20354688
941     String identifier = player()->sourceApplicationIdentifier();
942     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
943         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
944
945     if (AVURLAssetRequiresCustomURLLoadingKey)
946         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
947 #endif
948
949     auto type = player()->contentMIMEType();
950     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
951         auto codecs = player()->contentTypeCodecs();
952         if (!codecs.isEmpty()) {
953             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
954             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
955         } else
956             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
957     }
958
959 #if ENABLE(AVF_CAPTIONS)
960     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
961     if (!outOfBandTrackSources.isEmpty()) {
962         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
963         for (auto& trackSource : outOfBandTrackSources) {
964             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
965             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
966             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
967             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
968             [outOfBandTracks.get() addObject:@{
969                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
970                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
971                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
972                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
973                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
974                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
975             }];
976         }
977
978         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
979     }
980 #endif
981
982 #if PLATFORM(IOS)
983     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
984     if (!networkInterfaceName.isEmpty())
985         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
986 #endif
987
988 #if PLATFORM(IOS)
989     Vector<Cookie> cookies;
990     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
991         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
992         for (auto& cookie : cookies)
993             [nsCookies addObject:toNSHTTPCookie(cookie)];
994
995         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
996     }
997 #endif
998
999     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
1000     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
1001     
1002     if (usePersistentCache)
1003         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
1004
1005     NSURL *cocoaURL = canonicalURL(url);
1006     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
1007
1008 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1009     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
1010     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
1011
1012 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
1013     if (Settings::isAVFoundationNSURLSessionEnabled()
1014         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
1015         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
1016         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
1017         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
1018         if (mediaResourceLoader)
1019             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
1020     }
1021 #endif
1022
1023 #endif
1024
1025     m_haveCheckedPlayability = false;
1026
1027     setDelayCallbacks(false);
1028 }
1029
1030 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1031 {
1032     if (!m_avPlayer)
1033         return;
1034
1035     if (pthread_main_np()) {
1036         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1037         return;
1038     }
1039
1040     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1041     RetainPtr<AVPlayerItemType> strongItem = item;
1042     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1043         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1044     });
1045 }
1046
1047 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1048 {
1049     if (m_avPlayer)
1050         return;
1051
1052     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1053
1054     setDelayCallbacks(true);
1055
1056     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1057     for (NSString *keyName in playerKVOProperties())
1058         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1059
1060 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1061     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1062 #endif
1063
1064 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1065     updateDisableExternalPlayback();
1066     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1067 #endif
1068
1069 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1070     if (m_shouldPlayToPlaybackTarget) {
1071         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1072         m_shouldPlayToPlaybackTarget = false;
1073         setShouldPlayToPlaybackTarget(true);
1074     }
1075 #endif
1076
1077 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1078     setShouldDisableSleep(player()->shouldDisableSleep());
1079 #endif
1080
1081     if (m_muted) {
1082         // Clear m_muted so setMuted doesn't return without doing anything.
1083         m_muted = false;
1084         [m_avPlayer.get() setMuted:m_muted];
1085     }
1086
1087     if (player()->client().mediaPlayerIsVideo())
1088         createAVPlayerLayer();
1089
1090     if (m_avPlayerItem)
1091         setAVPlayerItem(m_avPlayerItem.get());
1092
1093     setDelayCallbacks(false);
1094 }
1095
1096 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1097 {
1098     if (m_avPlayerItem)
1099         return;
1100
1101     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1102
1103     setDelayCallbacks(true);
1104
1105     // Create the player item so we can load media data. 
1106     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1107
1108     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1109
1110     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1111     for (NSString *keyName in itemKVOProperties())
1112         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1113
1114     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1115
1116     if (m_avPlayer)
1117         setAVPlayerItem(m_avPlayerItem.get());
1118
1119 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1120     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1121
1122     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1123     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1124     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1125
1126     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1127     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1128     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1129     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1130 #endif
1131
1132 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1133     if (m_provider) {
1134         m_provider->setPlayerItem(m_avPlayerItem.get());
1135         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1136     }
1137 #endif
1138
1139     setDelayCallbacks(false);
1140 }
1141
1142 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1143 {
1144     if (m_haveCheckedPlayability)
1145         return;
1146     m_haveCheckedPlayability = true;
1147
1148     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1149     auto weakThis = createWeakPtr();
1150
1151     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1152         callOnMainThread([weakThis] {
1153             if (weakThis)
1154                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1155         });
1156     }];
1157 }
1158
1159 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1160 {
1161     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1162
1163     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1164     dispatch_group_enter(metadataLoadingGroup.get());
1165     auto weakThis = createWeakPtr();
1166     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1167
1168         callOnMainThread([weakThis, metadataLoadingGroup] {
1169             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1170                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1171                     dispatch_group_enter(metadataLoadingGroup.get());
1172                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1173                         dispatch_group_leave(metadataLoadingGroup.get());
1174                     }];
1175                 }
1176             }
1177             dispatch_group_leave(metadataLoadingGroup.get());
1178         });
1179     }];
1180
1181     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1182         callOnMainThread([weakThis] {
1183             if (weakThis)
1184                 [weakThis->m_objcObserver.get() metadataLoaded];
1185         });
1186     });
1187 }
1188
1189 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1190 {
1191     if (!m_avPlayerItem)
1192         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1193
1194     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1195         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1196     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1197         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1198     if (m_cachedLikelyToKeepUp)
1199         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1200     if (m_cachedBufferFull)
1201         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1202     if (m_cachedBufferEmpty)
1203         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1204
1205     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1206 }
1207
1208 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1209 {
1210     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1211     PlatformMedia pm;
1212     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1213     pm.media.avfMediaPlayer = m_avPlayer.get();
1214     return pm;
1215 }
1216
1217 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1218 {
1219 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1220     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1221 #else
1222     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1223 #endif
1224 }
1225
1226 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1227 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1228 {
1229     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1230         completionHandler();
1231         return;
1232     }
1233
1234     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
1235
1236     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1237         syncTextTrackBounds();
1238         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1239     }
1240
1241     updateDisableExternalPlayback();
1242 }
1243
1244 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1245 {
1246     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1247     syncTextTrackBounds();
1248 }
1249
1250 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1251 {
1252     m_videoFullscreenGravity = gravity;
1253
1254     if (!m_videoLayer)
1255         return;
1256
1257     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1258     if (gravity == MediaPlayer::VideoGravityResize)
1259         videoGravity = AVLayerVideoGravityResize;
1260     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1261         videoGravity = AVLayerVideoGravityResizeAspect;
1262     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1263         videoGravity = AVLayerVideoGravityResizeAspectFill;
1264     else
1265         ASSERT_NOT_REACHED();
1266     
1267     if ([m_videoLayer videoGravity] == videoGravity)
1268         return;
1269
1270     [m_videoLayer setVideoGravity:videoGravity];
1271     syncTextTrackBounds();
1272 }
1273
1274 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1275 {
1276 #if PLATFORM(IOS)
1277     [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1278     updateDisableExternalPlayback();
1279 #else
1280     UNUSED_PARAM(mode);
1281 #endif
1282 }
1283
1284 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1285
1286 #if PLATFORM(IOS)
1287 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1288 {
1289     if (m_currentMetaData)
1290         return m_currentMetaData.get();
1291     return nil;
1292 }
1293
1294 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1295 {
1296     if (!m_avPlayerItem)
1297         return emptyString();
1298     
1299     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1300     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1301
1302     return logString.get();
1303 }
1304
1305 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1306 {
1307     if (!m_avPlayerItem)
1308         return emptyString();
1309
1310     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1311     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1312
1313     return logString.get();
1314 }
1315 #endif
1316
1317 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1318 {
1319     [CATransaction begin];
1320     [CATransaction setDisableActions:YES];    
1321     if (m_videoLayer)
1322         [m_videoLayer.get() setHidden:!isVisible];
1323     [CATransaction commit];
1324 }
1325     
1326 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1327 {
1328     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1329     if (!metaDataAvailable())
1330         return;
1331
1332     setDelayCallbacks(true);
1333     m_cachedRate = requestedRate();
1334     [m_avPlayer.get() setRate:requestedRate()];
1335     setDelayCallbacks(false);
1336 }
1337
1338 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1339 {
1340     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1341     if (!metaDataAvailable())
1342         return;
1343
1344     setDelayCallbacks(true);
1345     m_cachedRate = 0;
1346     [m_avPlayer.get() setRate:0];
1347     setDelayCallbacks(false);
1348 }
1349
1350 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1351 {
1352     // Do not ask the asset for duration before it has been loaded or it will fetch the
1353     // answer synchronously.
1354     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1355         return MediaTime::invalidTime();
1356     
1357     CMTime cmDuration;
1358     
1359     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1360     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1361         cmDuration = [m_avPlayerItem.get() duration];
1362     else
1363         cmDuration = [m_avAsset.get() duration];
1364
1365     if (CMTIME_IS_NUMERIC(cmDuration))
1366         return toMediaTime(cmDuration);
1367
1368     if (CMTIME_IS_INDEFINITE(cmDuration))
1369         return MediaTime::positiveInfiniteTime();
1370
1371     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1372     return MediaTime::invalidTime();
1373 }
1374
1375 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1376 {
1377     if (!metaDataAvailable() || !m_avPlayerItem)
1378         return MediaTime::zeroTime();
1379
1380     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1381     if (CMTIME_IS_NUMERIC(itemTime))
1382         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1383
1384     return MediaTime::zeroTime();
1385 }
1386
1387 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1388 {
1389     // setCurrentTime generates several event callbacks, update afterwards.
1390     setDelayCallbacks(true);
1391
1392     if (m_metadataTrack)
1393         m_metadataTrack->flushPartialCues();
1394
1395     CMTime cmTime = toCMTime(time);
1396     CMTime cmBefore = toCMTime(negativeTolerance);
1397     CMTime cmAfter = toCMTime(positiveTolerance);
1398
1399     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1400     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1401         cmBefore = kCMTimeZero;
1402     
1403     auto weakThis = createWeakPtr();
1404
1405     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1406
1407     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1408         callOnMainThread([weakThis, finished] {
1409             auto _this = weakThis.get();
1410             if (!_this)
1411                 return;
1412
1413             _this->seekCompleted(finished);
1414         });
1415     }];
1416
1417     setDelayCallbacks(false);
1418 }
1419
1420 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1421 {
1422 #if PLATFORM(IOS)
1423     UNUSED_PARAM(volume);
1424     return;
1425 #else
1426     if (!m_avPlayer)
1427         return;
1428
1429     [m_avPlayer.get() setVolume:volume];
1430 #endif
1431 }
1432
1433 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1434 {
1435     if (m_muted == muted)
1436         return;
1437
1438     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setMuted(%p) - set to %s", this, boolString(muted));
1439
1440     m_muted = muted;
1441
1442     if (!m_avPlayer)
1443         return;
1444
1445     [m_avPlayer.get() setMuted:m_muted];
1446 }
1447
1448 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1449 {
1450     UNUSED_PARAM(closedCaptionsVisible);
1451
1452     if (!metaDataAvailable())
1453         return;
1454
1455     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1456 }
1457
1458 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1459 {
1460     setDelayCallbacks(true);
1461     m_cachedRate = rate;
1462     [m_avPlayer.get() setRate:rate];
1463     setDelayCallbacks(false);
1464 }
1465
1466 double MediaPlayerPrivateAVFoundationObjC::rate() const
1467 {
1468     if (!metaDataAvailable())
1469         return 0;
1470
1471     return m_cachedRate;
1472 }
1473
1474 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1475 {
1476 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1477     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1478 #else
1479     return 0;
1480 #endif
1481 }
1482
1483 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1484 {
1485 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1486     return [m_avPlayerItem liveUpdateInterval];
1487 #else
1488     return 0;
1489 #endif
1490 }
1491
1492 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1493 {
1494     if (m_avPlayerItem)
1495         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1496 }
1497
1498 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1499 {
1500     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1501
1502     if (!m_avPlayerItem)
1503         return timeRanges;
1504
1505     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1506         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1507         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1508             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1509     }
1510     return timeRanges;
1511 }
1512
1513 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1514 {
1515     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1516         return MediaTime::zeroTime();
1517
1518     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1519     bool hasValidRange = false;
1520     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1521         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1522         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1523             continue;
1524
1525         hasValidRange = true;
1526         MediaTime startOfRange = toMediaTime(timeRange.start);
1527         if (minTimeSeekable > startOfRange)
1528             minTimeSeekable = startOfRange;
1529     }
1530     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1531 }
1532
1533 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1534 {
1535     if (!m_cachedSeekableRanges)
1536         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1537
1538     MediaTime maxTimeSeekable;
1539     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1540         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1541         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1542             continue;
1543         
1544         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1545         if (maxTimeSeekable < endOfRange)
1546             maxTimeSeekable = endOfRange;
1547     }
1548     return maxTimeSeekable;
1549 }
1550
1551 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1552 {
1553     if (!m_cachedLoadedRanges)
1554         return MediaTime::zeroTime();
1555
1556     MediaTime maxTimeLoaded;
1557     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1558         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1559         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1560             continue;
1561         
1562         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1563         if (maxTimeLoaded < endOfRange)
1564             maxTimeLoaded = endOfRange;
1565     }
1566
1567     return maxTimeLoaded;   
1568 }
1569
1570 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1571 {
1572     if (!metaDataAvailable())
1573         return 0;
1574
1575     if (m_cachedTotalBytes)
1576         return m_cachedTotalBytes;
1577
1578     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1579         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1580
1581     return m_cachedTotalBytes;
1582 }
1583
1584 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1585 {
1586     m_avAsset = asset;
1587 }
1588
1589 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1590 {
1591     if (!m_avAsset)
1592         return MediaPlayerAVAssetStatusDoesNotExist;
1593
1594     for (NSString *keyName in assetMetadataKeyNames()) {
1595         NSError *error = nil;
1596         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1597 #if !LOG_DISABLED
1598         if (error)
1599             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1600 #endif
1601
1602         if (keyStatus < AVKeyValueStatusLoaded)
1603             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1604         
1605         if (keyStatus == AVKeyValueStatusFailed)
1606             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1607
1608         if (keyStatus == AVKeyValueStatusCancelled)
1609             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1610     }
1611
1612     if (!player()->shouldCheckHardwareSupport())
1613         m_tracksArePlayable = true;
1614
1615     if (!m_tracksArePlayable) {
1616         m_tracksArePlayable = true;
1617         for (AVAssetTrack *track in [m_avAsset tracks]) {
1618             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1619                 m_tracksArePlayable = false;
1620                 break;
1621             }
1622         }
1623     }
1624
1625     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1626         return MediaPlayerAVAssetStatusPlayable;
1627
1628     return MediaPlayerAVAssetStatusLoaded;
1629 }
1630
1631 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1632 {
1633     if (!m_avAsset)
1634         return 0;
1635
1636     NSError *error = nil;
1637     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1638     return [error code];
1639 }
1640
1641 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1642 {
1643     if (!metaDataAvailable() || context.paintingDisabled())
1644         return;
1645
1646     setDelayCallbacks(true);
1647     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1648
1649 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1650     if (videoOutputHasAvailableFrame())
1651         paintWithVideoOutput(context, rect);
1652     else
1653 #endif
1654         paintWithImageGenerator(context, rect);
1655
1656     END_BLOCK_OBJC_EXCEPTIONS;
1657     setDelayCallbacks(false);
1658
1659     m_videoFrameHasDrawn = true;
1660 }
1661
1662 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1663 {
1664     if (!metaDataAvailable() || context.paintingDisabled())
1665         return;
1666
1667     // We can ignore the request if we are already rendering to a layer.
1668     if (currentRenderingMode() == MediaRenderingToLayer)
1669         return;
1670
1671     // paint() is best effort, so only paint if we already have an image generator or video output available.
1672     if (!hasContextRenderer())
1673         return;
1674
1675     paintCurrentFrameInContext(context, rect);
1676 }
1677
1678 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1679 {
1680     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1681
1682     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1683     if (image) {
1684         GraphicsContextStateSaver stateSaver(context);
1685         context.translate(rect.x(), rect.y() + rect.height());
1686         context.scale(FloatSize(1.0f, -1.0f));
1687         context.setImageInterpolationQuality(InterpolationLow);
1688         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1689         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1690     }
1691 }
1692
1693 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1694 {
1695     if (!m_imageGenerator)
1696         createImageGenerator();
1697     ASSERT(m_imageGenerator);
1698
1699 #if !LOG_DISABLED
1700     double start = monotonicallyIncreasingTime();
1701 #endif
1702
1703     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1704     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1705     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1706
1707 #if !LOG_DISABLED
1708     double duration = monotonicallyIncreasingTime() - start;
1709     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1710 #endif
1711
1712     return image;
1713 }
1714
1715 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1716 {
1717     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::getSupportedTypes");
1718     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1719
1720
1721 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1722 static bool keySystemIsSupported(const String& keySystem)
1723 {
1724     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1725         return true;
1726     return false;
1727 }
1728 #endif
1729
1730 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1731 {
1732 #if ENABLE(MEDIA_SOURCE)
1733     if (parameters.isMediaSource)
1734         return MediaPlayer::IsNotSupported;
1735 #endif
1736 #if ENABLE(MEDIA_STREAM)
1737     if (parameters.isMediaStream)
1738         return MediaPlayer::IsNotSupported;
1739 #endif
1740
1741     auto containerType = parameters.type.containerType();
1742     if (isUnsupportedMIMEType(containerType))
1743         return MediaPlayer::IsNotSupported;
1744
1745     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1746         return MediaPlayer::IsNotSupported;
1747
1748     // The spec says:
1749     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1750     if (parameters.type.codecs().isEmpty())
1751         return MediaPlayer::MayBeSupported;
1752
1753     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1754         return MediaPlayer::IsNotSupported;
1755
1756     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1757     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1758 }
1759
1760 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1761 {
1762 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1763     if (!keySystem.isEmpty()) {
1764         // "Clear Key" is only supported with HLS:
1765         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1766             return MediaPlayer::IsNotSupported;
1767
1768         if (!keySystemIsSupported(keySystem))
1769             return false;
1770
1771         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1772             return false;
1773
1774         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1775             return false;
1776
1777         return true;
1778     }
1779 #else
1780     UNUSED_PARAM(keySystem);
1781     UNUSED_PARAM(mimeType);
1782 #endif
1783     return false;
1784 }
1785
1786 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1787 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1788 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1789 {
1790     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1791         [infoRequest setContentLength:keyData->byteLength()];
1792         [infoRequest setByteRangeAccessSupported:YES];
1793     }
1794
1795     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1796         long long start = [dataRequest currentOffset];
1797         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1798
1799         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1800             [request finishLoadingWithError:nil];
1801             return;
1802         }
1803
1804         ASSERT(start <= std::numeric_limits<int>::max());
1805         ASSERT(end <= std::numeric_limits<int>::max());
1806         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1807         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1808         [dataRequest respondWithData:nsData.get()];
1809     }
1810
1811     [request finishLoading];
1812 }
1813 #endif
1814
1815 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1816 {
1817     String scheme = [[[avRequest request] URL] scheme];
1818     String keyURI = [[[avRequest request] URL] absoluteString];
1819
1820 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1821     if (scheme == "skd") {
1822         // Create an initData with the following layout:
1823         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1824         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1825         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1826         unsigned byteLength = initDataBuffer->byteLength();
1827         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1828         initDataView->set<uint32_t>(0, keyURISize, true);
1829
1830         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1831         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1832
1833         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1834         if (!player()->keyNeeded(initData.get()))
1835             return false;
1836
1837         m_keyURIToRequestMap.set(keyURI, avRequest);
1838         return true;
1839     }
1840
1841     if (scheme == "clearkey") {
1842         String keyID = [[[avRequest request] URL] resourceSpecifier];
1843         StringView keyIDView(keyID);
1844         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1845
1846         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1847         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1848
1849         auto keyData = player()->cachedKeyForKeyId(keyID);
1850         if (keyData) {
1851             fulfillRequestWithKeyData(avRequest, keyData.get());
1852             return false;
1853         }
1854
1855         if (!player()->keyNeeded(initData.get()))
1856             return false;
1857
1858         m_keyURIToRequestMap.set(keyID, avRequest);
1859         return true;
1860     }
1861 #endif
1862
1863     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1864     m_resourceLoaderMap.add(avRequest, resourceLoader);
1865     resourceLoader->startLoading();
1866     return true;
1867 }
1868
1869 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1870 {
1871 #if USE(CFURLCONNECTION)
1872     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1873     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1874     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1875 #else
1876     AuthenticationChallenge challenge(nsChallenge);
1877 #endif
1878
1879     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1880 }
1881
1882 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1883 {
1884     String scheme = [[[avRequest request] URL] scheme];
1885
1886     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1887
1888     if (resourceLoader)
1889         resourceLoader->stopLoading();
1890 }
1891
1892 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1893 {
1894     m_resourceLoaderMap.remove(avRequest);
1895 }
1896 #endif
1897
1898 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1899 {
1900     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1901 }
1902
1903 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1904 {
1905     if (!metaDataAvailable())
1906         return timeValue;
1907
1908     // FIXME - impossible to implement until rdar://8721510 is fixed.
1909     return timeValue;
1910 }
1911
1912 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1913 {
1914 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1915     return 0;
1916 #else
1917     return 5;
1918 #endif
1919 }
1920
1921 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1922 {
1923     if (!m_videoLayer)
1924         return;
1925
1926 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1927     // Do not attempt to change the video gravity while in full screen mode.
1928     // See setVideoFullscreenGravity().
1929     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1930         return;
1931 #endif
1932
1933     [CATransaction begin];
1934     [CATransaction setDisableActions:YES];    
1935     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1936     [m_videoLayer.get() setVideoGravity:gravity];
1937     [CATransaction commit];
1938 }
1939
1940 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1941 {
1942     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1943         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1944     }];
1945     if (index == NSNotFound)
1946         return nil;
1947     return [tracks objectAtIndex:index];
1948 }
1949
1950 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1951 {
1952     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1953     m_languageOfPrimaryAudioTrack = String();
1954
1955     if (!m_avAsset)
1956         return;
1957
1958     setDelayCharacteristicsChangedNotification(true);
1959
1960     bool haveCCTrack = false;
1961     bool hasCaptions = false;
1962
1963     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1964     // asked about those fairly fequently.
1965     if (!m_avPlayerItem) {
1966         // We don't have a player item yet, so check with the asset because some assets support inspection
1967         // prior to becoming ready to play.
1968         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1969         setHasVideo(firstEnabledVideoTrack);
1970         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1971 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1972         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1973 #endif
1974         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1975         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1976         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1977         if (size.width() < 0)
1978             size.setWidth(-size.width());
1979         if (size.height() < 0)
1980             size.setHeight(-size.height());
1981         presentationSizeDidChange(size);
1982     } else {
1983         bool hasVideo = false;
1984         bool hasAudio = false;
1985         bool hasMetaData = false;
1986         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1987             if ([track isEnabled]) {
1988                 AVAssetTrack *assetTrack = [track assetTrack];
1989                 NSString *mediaType = [assetTrack mediaType];
1990                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1991                     hasVideo = true;
1992                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1993                     hasAudio = true;
1994                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1995 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1996                     hasCaptions = true;
1997 #endif
1998                     haveCCTrack = true;
1999                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
2000                     hasMetaData = true;
2001                 }
2002             }
2003         }
2004
2005 #if ENABLE(VIDEO_TRACK)
2006         updateAudioTracks();
2007         updateVideoTracks();
2008
2009 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2010         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
2011         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
2012 #endif
2013 #endif
2014
2015         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
2016         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
2017         // when it is not.
2018         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
2019
2020         setHasAudio(hasAudio);
2021 #if ENABLE(DATACUE_VALUE)
2022         if (hasMetaData)
2023             processMetadataTrack();
2024 #endif
2025     }
2026
2027 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2028     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2029     if (legibleGroup && m_cachedTracks) {
2030         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
2031         if (hasCaptions)
2032             processMediaSelectionOptions();
2033     }
2034 #endif
2035
2036 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2037     if (!hasCaptions && haveCCTrack)
2038         processLegacyClosedCaptionsTracks();
2039 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2040     if (haveCCTrack)
2041         processLegacyClosedCaptionsTracks();
2042 #endif
2043
2044     setHasClosedCaptions(hasCaptions);
2045
2046     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
2047         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
2048
2049     sizeChanged();
2050
2051     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2052         characteristicsChanged();
2053
2054 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2055     if (m_provider)
2056         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2057 #endif
2058
2059     setDelayCharacteristicsChangedNotification(false);
2060 }
2061
2062 #if ENABLE(VIDEO_TRACK)
2063
2064 template <typename RefT, typename PassRefT>
2065 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2066 {
2067     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2068         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2069     }]]]);
2070     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2071
2072     for (auto& oldItem : oldItems) {
2073         if (oldItem->playerItemTrack())
2074             [oldTracks addObject:oldItem->playerItemTrack()];
2075     }
2076
2077     // Find the added & removed AVPlayerItemTracks:
2078     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2079     [removedTracks minusSet:newTracks.get()];
2080
2081     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2082     [addedTracks minusSet:oldTracks.get()];
2083
2084     typedef Vector<RefT> ItemVector;
2085     ItemVector replacementItems;
2086     ItemVector addedItems;
2087     ItemVector removedItems;
2088     for (auto& oldItem : oldItems) {
2089         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2090             removedItems.append(oldItem);
2091         else
2092             replacementItems.append(oldItem);
2093     }
2094
2095     for (AVPlayerItemTrack* track in addedTracks.get())
2096         addedItems.append(itemFactory(track));
2097
2098     replacementItems.appendVector(addedItems);
2099     oldItems.swap(replacementItems);
2100
2101     for (auto& removedItem : removedItems)
2102         (player->*removedFunction)(*removedItem);
2103
2104     for (auto& addedItem : addedItems)
2105         (player->*addedFunction)(*addedItem);
2106 }
2107
2108 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2109
2110 template <typename RefT, typename PassRefT>
2111 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2112 {
2113     group->updateOptions(characteristics);
2114
2115     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2116     for (auto& option : group->options()) {
2117         if (!option)
2118             continue;
2119         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2120         if (!avOption)
2121             continue;
2122         newSelectionOptions.add(option);
2123     }
2124
2125     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2126     for (auto& oldItem : oldItems) {
2127         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2128             oldSelectionOptions.add(option);
2129     }
2130
2131     // Find the added & removed AVMediaSelectionOptions:
2132     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2133     for (auto& oldOption : oldSelectionOptions) {
2134         if (!newSelectionOptions.contains(oldOption))
2135             removedSelectionOptions.add(oldOption);
2136     }
2137
2138     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2139     for (auto& newOption : newSelectionOptions) {
2140         if (!oldSelectionOptions.contains(newOption))
2141             addedSelectionOptions.add(newOption);
2142     }
2143
2144     typedef Vector<RefT> ItemVector;
2145     ItemVector replacementItems;
2146     ItemVector addedItems;
2147     ItemVector removedItems;
2148     for (auto& oldItem : oldItems) {
2149         if (!oldItem->mediaSelectionOption())
2150             removedItems.append(oldItem);
2151         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2152             removedItems.append(oldItem);
2153         else
2154             replacementItems.append(oldItem);
2155     }
2156
2157     for (auto& option : addedSelectionOptions)
2158         addedItems.append(itemFactory(*option.get()));
2159
2160     replacementItems.appendVector(addedItems);
2161     oldItems.swap(replacementItems);
2162     
2163     for (auto& removedItem : removedItems)
2164         (player->*removedFunction)(*removedItem);
2165
2166     for (auto& addedItem : addedItems)
2167         (player->*addedFunction)(*addedItem);
2168 }
2169
2170 #endif
2171
2172 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2173 {
2174 #if !LOG_DISABLED
2175     size_t count = m_audioTracks.size();
2176 #endif
2177
2178 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2179     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2180     if (!m_audibleGroup) {
2181         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2182             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2183     }
2184
2185     if (m_audibleGroup)
2186         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2187     else
2188 #endif
2189         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2190
2191     for (auto& track : m_audioTracks)
2192         track->resetPropertiesFromTrack();
2193
2194 #if !LOG_DISABLED
2195     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2196 #endif
2197 }
2198
2199 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2200 {
2201 #if !LOG_DISABLED
2202     size_t count = m_videoTracks.size();
2203 #endif
2204
2205     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2206
2207 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2208     if (!m_visualGroup) {
2209         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2210             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2211     }
2212
2213     if (m_visualGroup)
2214         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2215 #endif
2216
2217     for (auto& track : m_audioTracks)
2218         track->resetPropertiesFromTrack();
2219
2220 #if !LOG_DISABLED
2221     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2222 #endif
2223 }
2224
2225 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2226 {
2227 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2228     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2229         return true;
2230 #endif
2231     return false;
2232 }
2233
2234 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2235 {
2236 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2237     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2238         return;
2239
2240     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2241     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2242     [m_textTrackRepresentationLayer setFrame:textFrame];
2243 #endif
2244 }
2245
2246 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2247 {
2248 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2249     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2250     if (representationLayer == m_textTrackRepresentationLayer) {
2251         syncTextTrackBounds();
2252         return;
2253     }
2254
2255     if (m_textTrackRepresentationLayer)
2256         [m_textTrackRepresentationLayer removeFromSuperlayer];
2257
2258     m_textTrackRepresentationLayer = representationLayer;
2259
2260     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2261         syncTextTrackBounds();
2262         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2263     }
2264
2265 #else
2266     UNUSED_PARAM(representation);
2267 #endif
2268 }
2269
2270 #endif // ENABLE(VIDEO_TRACK)
2271
2272 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2273
2274 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2275 {
2276     if (!m_provider) {
2277         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2278         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2279     }
2280     return m_provider.get();
2281 }
2282
2283 #endif
2284
2285 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2286 {
2287     if (!m_avAsset)
2288         return;
2289
2290     setNaturalSize(m_cachedPresentationSize);
2291 }
2292     
2293 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2294 {
2295     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2296         return false;
2297     
2298     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2299     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2300     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
2301 }
2302
2303 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2304 {
2305 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2306     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2307     if (!Settings::isAVFoundationNSURLSessionEnabled()
2308         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2309         return false;
2310
2311     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2312     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2313         return session.didPassCORSAccessChecks;
2314 #endif
2315     return false;
2316 }
2317
2318 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2319
2320 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2321 {
2322     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2323
2324     if (!m_avPlayerItem || m_videoOutput)
2325         return;
2326
2327 #if USE(VIDEOTOOLBOX)
2328     NSDictionary* attributes = nil;
2329 #else
2330     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2331 #endif
2332     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2333     ASSERT(m_videoOutput);
2334
2335     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2336
2337     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2338
2339     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2340 }
2341
2342 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2343 {
2344     if (!m_videoOutput)
2345         return;
2346
2347     if (m_avPlayerItem)
2348         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2349     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2350
2351     m_videoOutput = 0;
2352 }
2353
2354 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2355 {
2356     if (!m_videoOutput)
2357         createVideoOutput();
2358     ASSERT(m_videoOutput);
2359
2360     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2361
2362     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2363         return 0;
2364
2365     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2366 }
2367
2368 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2369 {
2370     if (!m_avPlayerItem)
2371         return false;
2372
2373     if (m_lastImage)
2374         return true;
2375
2376     if (!m_videoOutput)
2377         createVideoOutput();
2378
2379     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2380 }
2381
2382 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2383 {
2384     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2385
2386     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2387     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2388     // should be displayed.
2389     if (!pixelBuffer)
2390         return;
2391
2392     if (!m_pixelBufferConformer) {
2393 #if USE(VIDEOTOOLBOX)
2394         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2395 #else
2396         NSDictionary *attributes = nil;
2397 #endif
2398         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2399     }
2400
2401 #if !LOG_DISABLED
2402     double start = monotonicallyIncreasingTime();
2403 #endif
2404
2405     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2406
2407 #if !LOG_DISABLED
2408     double duration = monotonicallyIncreasingTime() - start;
2409     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2410 #endif
2411 }
2412
2413 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2414 {
2415     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2416         waitForVideoOutputMediaDataWillChange();
2417
2418     updateLastImage();
2419
2420     if (!m_lastImage)
2421         return;
2422
2423     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2424     if (!firstEnabledVideoTrack)
2425         return;
2426
2427     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2428
2429     GraphicsContextStateSaver stateSaver(context);
2430     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2431     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2432     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2433
2434     context.concatCTM(videoTransform);
2435     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2436
2437     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2438     // video frame, destroy it now that it is no longer needed.
2439     if (m_imageGenerator)
2440         destroyImageGenerator();
2441
2442 }
2443
2444 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2445 {
2446     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2447
2448     if (!m_avPlayerItem || m_openGLVideoOutput)
2449         return;
2450
2451 #if PLATFORM(IOS)
2452     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2453 #else
2454     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2455 #endif
2456     m_openGLVideoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2457     ASSERT(m_openGLVideoOutput);
2458
2459     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2460
2461     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2462 }
2463
2464 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2465 {
2466     if (!m_openGLVideoOutput)
2467         return;
2468
2469     if (m_avPlayerItem)
2470         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2471     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2472
2473     m_openGLVideoOutput = 0;
2474 }
2475
2476 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2477 {
2478     if (!m_openGLVideoOutput)
2479         return;
2480
2481     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2482     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2483         return;
2484
2485     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2486 }
2487
2488 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2489 {
2490     if (flipY || premultiplyAlpha)
2491         return false;
2492
2493     ASSERT(context);
2494
2495     if (!m_openGLVideoOutput)
2496         createOpenGLVideoOutput();
2497
2498     updateLastOpenGLImage();
2499
2500     if (!m_lastOpenGLImage)
2501         return false;
2502
2503     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2504     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2505
2506     if (!m_textureCache) {
2507         m_textureCache = TextureCacheCV::create(*context);
2508         if (!m_textureCache)
2509             return false;
2510     }
2511
2512     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2513
2514     if (!m_videoTextureCopier)
2515         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2516
2517     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2518 }
2519
2520 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2521 {
2522     updateLastImage();
2523     return m_lastImage;
2524 }
2525
2526 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2527 {
2528     if (!m_videoOutputSemaphore)
2529         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2530
2531     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2532
2533     // Wait for 1 second.
2534     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2535
2536     if (result)
2537         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2538 }
2539
2540 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2541 {
2542     dispatch_semaphore_signal(m_videoOutputSemaphore);
2543 }
2544
2545 #endif
2546
2547 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2548
2549 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2550 {
2551     return m_keyURIToRequestMap.take(keyURI);
2552 }
2553
2554 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2555 {
2556     Vector<String> fulfilledKeyIds;
2557
2558     for (auto& pair : m_keyURIToRequestMap) {
2559         const String& keyId = pair.key;
2560         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2561
2562         auto keyData = player()->cachedKeyForKeyId(keyId);
2563         if (!keyData)
2564             continue;
2565
2566         fulfillRequestWithKeyData(request.get(), keyData.get());
2567         fulfilledKeyIds.append(keyId);
2568     }
2569
2570     for (auto& keyId : fulfilledKeyIds)
2571         m_keyURIToRequestMap.remove(keyId);
2572 }
2573
2574 void MediaPlayerPrivateAVFoundationObjC::removeSession(CDMSession& session)
2575 {
2576     ASSERT_UNUSED(session, &session == m_session);
2577     m_session = nullptr;
2578 }
2579
2580 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2581 {
2582     if (!keySystemIsSupported(keySystem))
2583         return nullptr;
2584     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2585     m_session = session->createWeakPtr();
2586     return WTFMove(session);
2587 }
2588
2589 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2590 {
2591     if (m_session && newValue)
2592         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2593 }
2594
2595 #endif
2596
2597 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2598
2599 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2600 {
2601 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2602     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2603 #endif
2604
2605     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2606     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2607
2608         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2609         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2610             continue;
2611
2612         bool newCCTrack = true;
2613         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2614             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2615                 continue;
2616
2617             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2618             if (track->avPlayerItemTrack() == playerItemTrack) {
2619                 removedTextTracks.remove(i - 1);
2620                 newCCTrack = false;
2621                 break;
2622             }
2623         }
2624
2625         if (!newCCTrack)
2626             continue;
2627         
2628         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2629     }
2630
2631     processNewAndRemovedTextTracks(removedTextTracks);
2632 }
2633
2634 #endif
2635
2636 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2637 {
2638     if (!m_avAsset)
2639         return nil;
2640
2641     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2642         return nil;
2643
2644     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2645 }
2646
2647 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2648
2649 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2650 {
2651     if (!m_avAsset)
2652         return false;
2653
2654     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2655         return false;
2656
2657     return true;
2658 }
2659
2660 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2661 {
2662     if (!hasLoadedMediaSelectionGroups())
2663         return nil;
2664
2665     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2666 }
2667
2668 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2669 {
2670     if (!hasLoadedMediaSelectionGroups())
2671         return nil;
2672
2673     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2674 }
2675
2676 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2677 {
2678     if (!hasLoadedMediaSelectionGroups())
2679         return nil;
2680
2681     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2682 }
2683
2684 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2685 {
2686     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2687     if (!legibleGroup) {
2688         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2689         return;
2690     }
2691
2692     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2693     // but set the selected legible track to nil so text tracks will not be automatically configured.
2694     if (!m_textTracks.size())
2695         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2696
2697     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2698     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2699     for (AVMediaSelectionOptionType *option in legibleOptions) {
2700         bool newTrack = true;
2701         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2702             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2703                 continue;
2704             
2705             RetainPtr<AVMediaSelectionOptionType> currentOption;
2706 #if ENABLE(AVF_CAPTIONS)
2707             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2708                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2709                 currentOption = track->mediaSelectionOption();
2710             } else
2711 #endif
2712             {
2713                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2714                 currentOption = track->mediaSelectionOption();
2715             }
2716             
2717             if ([currentOption.get() isEqual:option]) {
2718                 removedTextTracks.remove(i - 1);
2719                 newTrack = false;
2720                 break;
2721             }
2722         }
2723         if (!newTrack)
2724             continue;
2725
2726 #if ENABLE(AVF_CAPTIONS)
2727         if ([option outOfBandSource]) {
2728             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2729             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2730             continue;
2731         }
2732 #endif
2733
2734         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2735     }
2736
2737     processNewAndRemovedTextTracks(removedTextTracks);
2738 }
2739
2740 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2741 {
2742     if (m_metadataTrack)
2743         return;
2744
2745     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2746     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2747     player()->addTextTrack(*m_metadataTrack);
2748 }
2749
2750 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2751 {
2752     ASSERT(time >= MediaTime::zeroTime());
2753
2754     if (!m_currentTextTrack)
2755         return;
2756
2757     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2758 }
2759
2760 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2761 {
2762     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2763
2764     if (!m_currentTextTrack)
2765         return;
2766     
2767     m_currentTextTrack->resetCueValues();
2768 }
2769
2770 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2771
2772 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2773 {
2774     if (m_currentTextTrack == track)
2775         return;
2776
2777     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2778         
2779     m_currentTextTrack = track;
2780
2781     if (track) {
2782         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2783 #pragma clang diagnostic push
2784 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2785             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2786 #pragma clang diagnostic pop
2787 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2788 #if ENABLE(AVF_CAPTIONS)
2789         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2790             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2791 #endif
2792         else
2793             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2794 #endif
2795     } else {
2796 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2797         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2798 #endif
2799 #pragma clang diagnostic push
2800 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2801         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2802 #pragma clang diagnostic pop
2803     }
2804
2805 }
2806
2807 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2808 {
2809     if (!m_languageOfPrimaryAudioTrack.isNull())
2810         return m_languageOfPrimaryAudioTrack;
2811
2812     if (!m_avPlayerItem.get())
2813         return emptyString();
2814
2815 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2816     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2817     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2818 #pragma clang diagnostic push
2819 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2820     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2821 #pragma clang diagnostic pop
2822     if (currentlySelectedAudibleOption) {
2823         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2824         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2825
2826         return m_languageOfPrimaryAudioTrack;
2827     }
2828 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2829
2830     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2831     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2832     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2833     if (!tracks || [tracks count] != 1) {
2834         m_languageOfPrimaryAudioTrack = emptyString();
2835         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2836         return m_languageOfPrimaryAudioTrack;
2837     }
2838
2839     AVAssetTrack *track = [tracks objectAtIndex:0];
2840     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2841
2842 #if !LOG_DISABLED
2843     if (m_languageOfPrimaryAudioTrack == emptyString())
2844         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2845     else
2846         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2847 #endif
2848
2849     return m_languageOfPrimaryAudioTrack;
2850 }
2851
2852 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2853 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2854 {
2855     bool wirelessTarget = false;
2856
2857 #if !PLATFORM(IOS)
2858     if (m_playbackTarget) {
2859         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2860             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2861         else
2862             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2863     }
2864 #else
2865     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2866 #endif
2867
2868     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2869
2870     return wirelessTarget;
2871 }
2872
2873 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2874 {
2875     if (!m_avPlayer)
2876         return MediaPlayer::TargetTypeNone;
2877
2878 #if PLATFORM(IOS)
2879     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2880     case wkExternalPlaybackTypeNone:
2881         return MediaPlayer::TargetTypeNone;
2882     case wkExternalPlaybackTypeAirPlay:
2883         return MediaPlayer::TargetTypeAirPlay;
2884     case wkExternalPlaybackTypeTVOut:
2885         return MediaPlayer::TargetTypeTVOut;
2886     }
2887
2888     ASSERT_NOT_REACHED();
2889     return MediaPlayer::TargetTypeNone;
2890
2891 #else
2892     return MediaPlayer::TargetTypeAirPlay;
2893 #endif
2894 }
2895
2896 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2897 {
2898     if (!m_avPlayer)
2899         return emptyString();
2900
2901     String wirelessTargetName;
2902 #if !PLATFORM(IOS)
2903     if (m_playbackTarget)
2904         wirelessTargetName = m_playbackTarget->deviceName();
2905 #else
2906     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2907 #endif
2908     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2909
2910     return wirelessTargetName;
2911 }
2912
2913 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2914 {
2915     if (!m_avPlayer)
2916         return !m_allowsWirelessVideoPlayback;
2917
2918     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2919     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2920
2921     return !m_allowsWirelessVideoPlayback;
2922 }
2923
2924 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2925 {
2926     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2927     m_allowsWirelessVideoPlayback = !disabled;
2928     if (!m_avPlayer)
2929         return;
2930
2931     setDelayCallbacks(true);
2932     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2933     setDelayCallbacks(false);
2934 }
2935
2936 #if !PLATFORM(IOS)
2937
2938 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2939 {
2940     m_playbackTarget = WTFMove(target);
2941
2942     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2943
2944     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2945
2946     if (!m_playbackTarget->hasActiveRoute())
2947         setShouldPlayToPlaybackTarget(false);
2948 }
2949
2950 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2951 {
2952     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2953         return;
2954
2955     m_shouldPlayToPlaybackTarget = shouldPlay;
2956
2957     if (!m_playbackTarget)
2958         return;
2959
2960     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2961         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2962
2963         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2964
2965         if (!m_avPlayer)
2966             return;
2967
2968         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2969         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2970             return;
2971
2972         setDelayCallbacks(true);
2973         m_avPlayer.get().outputContext = newContext;
2974         setDelayCallbacks(false);
2975
2976         return;
2977     }
2978
2979     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2980
2981     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2982
2983     setDelayCallbacks(true);
2984     auto weakThis = createWeakPtr();
2985     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2986         if (!weakThis)
2987             return;
2988         weakThis->playbackTargetIsWirelessDidChange();
2989     }));
2990     setDelayCallbacks(false);
2991 }
2992
2993 #endif // !PLATFORM(IOS)
2994
2995 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2996 {
2997 #if PLATFORM(IOS)
2998     if (!m_avPlayer)
2999         return;
3000
3001     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
3002 #endif
3003 }
3004
3005 #endif
3006
3007 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3008 {
3009     m_cachedItemStatus = status;
3010
3011     updateStates();
3012 }
3013
3014 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3015 {
3016     m_pendingStatusChanges++;
3017 }
3018
3019 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3020 {
3021     m_cachedLikelyToKeepUp = likelyToKeepUp;
3022
3023     ASSERT(m_pendingStatusChanges);
3024     if (!--m_pendingStatusChanges)
3025         updateStates();
3026 }
3027
3028 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3029 {
3030     m_pendingStatusChanges++;
3031 }
3032
3033 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3034 {
3035     m_cachedBufferEmpty = bufferEmpty;
3036
3037     ASSERT(m_pendingStatusChanges);
3038     if (!--m_pendingStatusChanges)
3039         updateStates();
3040 }
3041
3042 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3043 {
3044     m_pendingStatusChanges++;
3045 }
3046
3047 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3048 {
3049     m_cachedBufferFull = bufferFull;