[WTF] Clean up StringStatics.cpp by using LazyNeverDestroyed<> for Atoms
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVFoundationSPI.h"
34 #import "AVTrackPrivateAVFObjCImpl.h"
35 #import "AudioSourceProviderAVFObjC.h"
36 #import "AudioTrackPrivateAVFObjC.h"
37 #import "AuthenticationChallenge.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "MediaTimeAVFoundation.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "Settings.h"
61 #import "TextEncoding.h"
62 #import "TextTrackRepresentation.h"
63 #import "TextureCacheCV.h"
64 #import "URL.h"
65 #import "VideoTextureCopierCV.h"
66 #import "VideoTrackPrivateAVFObjC.h"
67 #import "WebCoreAVFResourceLoader.h"
68 #import "WebCoreCALayerExtras.h"
69 #import "WebCoreNSURLSession.h"
70 #import "WebCoreSystemInterface.h"
71 #import <functional>
72 #import <map>
73 #import <objc/runtime.h>
74 #import <runtime/DataView.h>
75 #import <runtime/JSCInlines.h>
76 #import <runtime/TypedArrayInlines.h>
77 #import <runtime/Uint16Array.h>
78 #import <runtime/Uint32Array.h>
79 #import <runtime/Uint8Array.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/CurrentTime.h>
82 #import <wtf/ListHashSet.h>
83 #import <wtf/NeverDestroyed.h>
84 #import <wtf/OSObjectPtr.h>
85 #import <wtf/text/CString.h>
86
87 #if ENABLE(AVF_CAPTIONS)
88 #include "TextTrack.h"
89 #endif
90
91 #import <AVFoundation/AVAssetImageGenerator.h>
92 #import <AVFoundation/AVAssetTrack.h>
93 #import <AVFoundation/AVMediaSelectionGroup.h>
94 #import <AVFoundation/AVMetadataItem.h>
95 #import <AVFoundation/AVPlayer.h>
96 #import <AVFoundation/AVPlayerItem.h>
97 #import <AVFoundation/AVPlayerItemOutput.h>
98 #import <AVFoundation/AVPlayerItemTrack.h>
99 #import <AVFoundation/AVPlayerLayer.h>
100 #import <AVFoundation/AVTime.h>
101
102 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
103 #import "VideoFullscreenLayerManager.h"
104 #endif
105
106 #if PLATFORM(IOS)
107 #import "WAKAppKitStubs.h"
108 #import <CoreImage/CoreImage.h>
109 #import <mach/mach_port.h>
110 #else
111 #import <Foundation/NSGeometry.h>
112 #import <QuartzCore/CoreImage.h>
113 #endif
114
115 #if USE(VIDEOTOOLBOX)
116 #import <CoreVideo/CoreVideo.h>
117 #import <VideoToolbox/VideoToolbox.h>
118 #endif
119
120 #if USE(CFURLCONNECTION)
121 #include "CFNSURLConnectionSPI.h"
122 #endif
123
124 #import "CoreVideoSoftLink.h"
125
126 namespace std {
127 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
128     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
129 };
130 }
131
132 #if ENABLE(AVF_CAPTIONS)
133 // Note: This must be defined before our SOFT_LINK macros:
134 @class AVMediaSelectionOption;
135 @interface AVMediaSelectionOption (OutOfBandExtensions)
136 @property (nonatomic, readonly) NSString* outOfBandSource;
137 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
138 @end
139 #endif
140
141 @interface AVURLAsset (WebKitExtensions)
142 @property (nonatomic, readonly) NSURL *resolvedURL;
143 @end
144
145 typedef AVPlayer AVPlayerType;
146 typedef AVPlayerItem AVPlayerItemType;
147 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
148 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
149 typedef AVMetadataItem AVMetadataItemType;
150 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
151 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
152 typedef AVAssetCache AVAssetCacheType;
153
154 #pragma mark - Soft Linking
155
156 // Soft-linking headers must be included last since they #define functions, constants, etc.
157 #import "CoreMediaSoftLink.h"
158
159 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
160
161 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
162
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
165 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
166 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
167 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
168 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
169 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
170 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
171
172 SOFT_LINK_CLASS(CoreImage, CIContext)
173 SOFT_LINK_CLASS(CoreImage, CIImage)
174
175 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
176 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
177 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
190
191 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
192 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
193 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
194
195 #define AVPlayer initAVPlayer()
196 #define AVPlayerItem initAVPlayerItem()
197 #define AVPlayerLayer initAVPlayerLayer()
198 #define AVURLAsset initAVURLAsset()
199 #define AVAssetImageGenerator initAVAssetImageGenerator()
200 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
201 #define AVMetadataItem initAVMetadataItem()
202 #define AVAssetCache initAVAssetCache()
203
204 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
205 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
206 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
207 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
208 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
209 #define AVMediaTypeVideo getAVMediaTypeVideo()
210 #define AVMediaTypeAudio getAVMediaTypeAudio()
211 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
212 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
213 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
214 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
215 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
216 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
217 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
218 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
219 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
220 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
221 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
222
223 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
224 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
225 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
226
227 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
228 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
229 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
230
231 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
232 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
233 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
234 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
235
236 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
237 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
238 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
239 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
240 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
241 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
242 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
243 #endif
244
245 #if ENABLE(AVF_CAPTIONS)
246 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
259
260 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
261 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
262 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
263 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
264 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
265 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
266 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
267 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
268 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
269 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
270 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
271 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
272 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
273 #endif
274
275 #if ENABLE(DATACUE_VALUE)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
277 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
281
282 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
283 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
284 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
285 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
286 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
287 #endif
288
289 #if PLATFORM(IOS)
290 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
292 #endif
293
294 SOFT_LINK_FRAMEWORK(MediaToolbox)
295 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
296
297 using namespace WebCore;
298
299 enum MediaPlayerAVFoundationObservationContext {
300     MediaPlayerAVFoundationObservationContextPlayerItem,
301     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
302     MediaPlayerAVFoundationObservationContextPlayer,
303     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
304 };
305
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
307 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
308 #else
309 @interface WebCoreAVFMovieObserver : NSObject
310 #endif
311 {
312     MediaPlayerPrivateAVFoundationObjC* m_callback;
313     int m_delayCallbacks;
314 }
315 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 -(void)disconnect;
317 -(void)metadataLoaded;
318 -(void)didEnd:(NSNotification *)notification;
319 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
320 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
321 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
322 - (void)outputSequenceWasFlushed:(id)output;
323 #endif
324 @end
325
326 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
327 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
333 @end
334 #endif
335
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
338     MediaPlayerPrivateAVFoundationObjC *m_callback;
339     dispatch_semaphore_t m_semaphore;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
344 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
345 @end
346 #endif
347
348 namespace WebCore {
349
350 static NSArray *assetMetadataKeyNames();
351 static NSArray *itemKVOProperties();
352 static NSArray *assetTrackMetadataKeyNames();
353 static NSArray *playerKVOProperties();
354 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
355
356 #if !LOG_DISABLED
357 static const char *boolString(bool val)
358 {
359     return val ? "true" : "false";
360 }
361 #endif
362
363 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
364 static dispatch_queue_t globalLoaderDelegateQueue()
365 {
366     static dispatch_queue_t globalQueue;
367     static dispatch_once_t onceToken;
368     dispatch_once(&onceToken, ^{
369         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
370     });
371     return globalQueue;
372 }
373 #endif
374
375 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
376 static dispatch_queue_t globalPullDelegateQueue()
377 {
378     static dispatch_queue_t globalQueue;
379     static dispatch_once_t onceToken;
380     dispatch_once(&onceToken, ^{
381         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
382     });
383     return globalQueue;
384 }
385 #endif
386
387 #if USE(CFURLCONNECTION)
388 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
389 public:
390     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
391     {
392         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
393     }
394
395     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
396     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
397
398 private:
399     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
400         : m_challenge(challenge)
401     {
402         ASSERT(m_challenge);
403     }
404
405     void refAuthenticationClient() override { ref(); }
406     void derefAuthenticationClient() override { deref(); }
407
408     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
409     {
410         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
411     }
412
413     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
414     {
415         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
416     }
417
418     void receivedCancellation(const AuthenticationChallenge&) override
419     {
420         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
421     }
422
423     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
424     {
425         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
426             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
427     }
428
429     void receivedChallengeRejection(const AuthenticationChallenge&) override
430     {
431         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
432             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
433     }
434
435     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
436 };
437 #endif
438
439 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
440 {
441     if (!isAvailable())
442         return;
443
444     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
445             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
446     AVFoundationMIMETypeCache::singleton().loadTypes();
447 }
448
449 static AVAssetCacheType *assetCacheForPath(const String& path)
450 {
451     NSURL *assetCacheURL;
452     
453     if (path.isEmpty())
454         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
455     else
456         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
457
458     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
459 }
460
461 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
462 {
463     HashSet<RefPtr<SecurityOrigin>> origins;
464     for (NSString *key in [assetCacheForPath(path) allKeys]) {
465         URL keyAsURL = URL(URL(), key);
466         if (keyAsURL.isValid())
467             origins.add(SecurityOrigin::create(keyAsURL));
468     }
469     return origins;
470 }
471
472 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
473 {
474     ASSERT(date);
475     using namespace std::chrono;
476
477     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
478 }
479
480 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
481 {
482     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
483     
484     AVAssetCacheType* assetCache = assetCacheForPath(path);
485     
486     for (NSString *key in [assetCache allKeys]) {
487         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
488             [assetCache removeEntryForKey:key];
489     }
490
491     NSFileManager *fileManager = [NSFileManager defaultManager];
492     NSURL *baseURL = [assetCache URL];
493
494     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
495         [fileManager removeItemAtURL:baseURL error:nil];
496         return;
497     }
498     
499     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
500     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
501         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
502         errorHandler:nil];
503     
504     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
505     for (NSURL *fileURL : enumerator) {
506         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
507     
508         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
509             continue;
510         
511         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
512             continue;
513         
514         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
515             continue;
516         
517         [urlsToDelete addObject:fileURL];
518     }
519     
520     for (NSURL *fileURL in urlsToDelete.get())
521         [fileManager removeItemAtURL:fileURL error:nil];
522 }
523
524 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
525 {
526     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
527     AVAssetCacheType* assetCache = assetCacheForPath(path);
528     for (NSString *key in [assetCache allKeys]) {
529         URL keyAsURL = URL(URL(), key);
530         if (keyAsURL.isValid()) {
531             if (origins.contains(SecurityOrigin::create(keyAsURL)))
532                 [assetCache removeEntryForKey:key];
533         }
534     }
535 }
536
537 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
538     : MediaPlayerPrivateAVFoundation(player)
539     , m_weakPtrFactory(this)
540 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
541     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
542     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
543 #endif
544     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
545     , m_videoFrameHasDrawn(false)
546     , m_haveCheckedPlayability(false)
547 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
548     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
549     , m_videoOutputSemaphore(nullptr)
550 #endif
551 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
552     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
553 #endif
554     , m_currentTextTrack(0)
555     , m_cachedRate(0)
556     , m_cachedTotalBytes(0)
557     , m_pendingStatusChanges(0)
558     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
559     , m_cachedLikelyToKeepUp(false)
560     , m_cachedBufferEmpty(false)
561     , m_cachedBufferFull(false)
562     , m_cachedHasEnabledAudio(false)
563     , m_shouldBufferData(true)
564     , m_cachedIsReadyForDisplay(false)
565     , m_haveBeenAskedToCreateLayer(false)
566 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
567     , m_allowsWirelessVideoPlayback(true)
568 #endif
569 {
570 }
571
572 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
573 {
574 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
575     [m_loaderDelegate.get() setCallback:0];
576     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
577
578     for (auto& pair : m_resourceLoaderMap)
579         pair.value->invalidate();
580 #endif
581 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
582     [m_videoOutputDelegate setCallback:0];
583     [m_videoOutput setDelegate:nil queue:0];
584     if (m_videoOutputSemaphore)
585         dispatch_release(m_videoOutputSemaphore);
586 #endif
587
588     if (m_videoLayer)
589         destroyVideoLayer();
590
591     cancelLoad();
592 }
593
594 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
595 {
596     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
597     tearDownVideoRendering();
598
599     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
600     [m_objcObserver.get() disconnect];
601
602     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
603     setIgnoreLoadStateChanges(true);
604     if (m_avAsset) {
605         [m_avAsset.get() cancelLoading];
606         m_avAsset = nil;
607     }
608
609     clearTextTracks();
610
611 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
612     if (m_legibleOutput) {
613         if (m_avPlayerItem)
614             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
615         m_legibleOutput = nil;
616     }
617 #endif
618
619     if (m_avPlayerItem) {
620         for (NSString *keyName in itemKVOProperties())
621             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
622         
623         m_avPlayerItem = nil;
624     }
625     if (m_avPlayer) {
626         if (m_timeObserver)
627             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
628         m_timeObserver = nil;
629
630         for (NSString *keyName in playerKVOProperties())
631             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
632         m_avPlayer = nil;
633     }
634
635     // Reset cached properties
636     m_pendingStatusChanges = 0;
637     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
638     m_cachedSeekableRanges = nullptr;
639     m_cachedLoadedRanges = nullptr;
640     m_cachedHasEnabledAudio = false;
641     m_cachedPresentationSize = FloatSize();
642     m_cachedDuration = MediaTime::zeroTime();
643
644     for (AVPlayerItemTrack *track in m_cachedTracks.get())
645         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
646     m_cachedTracks = nullptr;
647
648 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
649     if (m_provider) {
650         m_provider->setPlayerItem(nullptr);
651         m_provider->setAudioTrack(nullptr);
652     }
653 #endif
654
655     setIgnoreLoadStateChanges(false);
656 }
657
658 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
659 {
660     return m_haveBeenAskedToCreateLayer;
661 }
662
663 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
664 {
665 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
666     if (m_videoOutput)
667         return true;
668 #endif
669     return m_imageGenerator;
670 }
671
672 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
673 {
674 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
675     createVideoOutput();
676 #else
677     createImageGenerator();
678 #endif
679 }
680
681 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
682 {
683     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
684
685     if (!m_avAsset || m_imageGenerator)
686         return;
687
688     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
689
690     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
691     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
692     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
693     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
694
695     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
696 }
697
698 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
699 {
700 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
701     destroyVideoOutput();
702     destroyOpenGLVideoOutput();
703 #endif
704     destroyImageGenerator();
705 }
706
707 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
708 {
709     if (!m_imageGenerator)
710         return;
711
712     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
713
714     m_imageGenerator = 0;
715 }
716
717 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
718 {
719     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
720         return;
721
722     callOnMainThread([this, weakThis = createWeakPtr()] {
723         if (!weakThis)
724             return;
725
726         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
727             return;
728         m_haveBeenAskedToCreateLayer = true;
729
730         if (!m_videoLayer)
731             createAVPlayerLayer();
732
733 #if USE(VIDEOTOOLBOX)
734         if (!m_videoOutput)
735             createVideoOutput();
736 #endif
737
738         player()->client().mediaPlayerRenderingModeChanged(player());
739     });
740 }
741
742 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
743 {
744     if (!m_avPlayer)
745         return;
746
747     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
748     [m_videoLayer setPlayer:m_avPlayer.get()];
749
750 #ifndef NDEBUG
751     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
752 #endif
753     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
754     updateVideoLayerGravity();
755     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
756     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
757     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
758
759 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
760     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
761
762 #if PLATFORM(IOS)
763     [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
764 #endif
765 #else
766     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
767 #endif
768 }
769
770 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
771 {
772     if (!m_videoLayer)
773         return;
774
775     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
776
777     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
778     [m_videoLayer setPlayer:nil];
779
780 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
781     m_videoFullscreenLayerManager->didDestroyVideoLayer();
782 #endif
783
784     m_videoLayer = nil;
785 }
786
787 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
788 {
789     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
790     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
791
792     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
793     if (!date)
794         return MediaTime::invalidTime();
795
796     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
797
798     // Rounding due to second offset error when subtracting.
799     return MediaTime::createWithDouble(round(date - currentTime));
800 }
801
802 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
803 {
804     if (currentRenderingMode() == MediaRenderingToLayer)
805         return m_cachedIsReadyForDisplay;
806
807     return m_videoFrameHasDrawn;
808 }
809
810 #if ENABLE(AVF_CAPTIONS)
811 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
812 {
813     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
814     if (manualSelectionMode)
815         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
816
817     // FIXME: Match these to correct types:
818     if (kind == PlatformTextTrack::Caption)
819         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
820
821     if (kind == PlatformTextTrack::Subtitle)
822         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
823
824     if (kind == PlatformTextTrack::Description)
825         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
826
827     if (kind == PlatformTextTrack::Forced)
828         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
829
830     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
831 }
832     
833 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
834 {
835     trackModeChanged();
836 }
837     
838 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
839 {
840     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
841     
842     for (auto& textTrack : m_textTracks) {
843         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
844             continue;
845         
846         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
847         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
848         
849         for (auto& track : outOfBandTrackSources) {
850             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
851             
852             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
853                 continue;
854             
855             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
856             if (track->mode() == PlatformTextTrack::Hidden)
857                 mode = InbandTextTrackPrivate::Hidden;
858             else if (track->mode() == PlatformTextTrack::Disabled)
859                 mode = InbandTextTrackPrivate::Disabled;
860             else if (track->mode() == PlatformTextTrack::Showing)
861                 mode = InbandTextTrackPrivate::Showing;
862             
863             textTrack->setMode(mode);
864             break;
865         }
866     }
867 }
868 #endif
869
870
871 static NSURL *canonicalURL(const String& url)
872 {
873     NSURL *cocoaURL = URL(ParsedURLString, url);
874     if (url.isEmpty())
875         return cocoaURL;
876
877     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
878     if (!request)
879         return cocoaURL;
880
881     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
882     if (!canonicalRequest)
883         return cocoaURL;
884
885     return [canonicalRequest URL];
886 }
887
888 #if PLATFORM(IOS)
889 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
890 {
891     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
892     [properties setDictionary:@{
893         NSHTTPCookieName: cookie.name,
894         NSHTTPCookieValue: cookie.value,
895         NSHTTPCookieDomain: cookie.domain,
896         NSHTTPCookiePath: cookie.path,
897         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
898     }];
899     if (cookie.secure)
900         [properties setObject:@YES forKey:NSHTTPCookieSecure];
901     if (cookie.session)
902         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
903
904     return [NSHTTPCookie cookieWithProperties:properties.get()];
905 }
906 #endif
907
908 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
909 {
910     if (m_avAsset)
911         return;
912
913     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
914
915     setDelayCallbacks(true);
916
917     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
918
919     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
920
921     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
922
923     String referrer = player()->referrer();
924     if (!referrer.isEmpty())
925         [headerFields.get() setObject:referrer forKey:@"Referer"];
926
927     String userAgent = player()->userAgent();
928     if (!userAgent.isEmpty())
929         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
930
931     if ([headerFields.get() count])
932         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
933
934     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
935         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
936
937 #if PLATFORM(IOS)
938     // FIXME: rdar://problem/20354688
939     String identifier = player()->sourceApplicationIdentifier();
940     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
941         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
942
943     if (AVURLAssetRequiresCustomURLLoadingKey)
944         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
945 #endif
946
947     auto type = player()->contentMIMEType();
948     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
949         auto codecs = player()->contentTypeCodecs();
950         if (!codecs.isEmpty()) {
951             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
952             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
953         } else
954             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
955     }
956
957 #if ENABLE(AVF_CAPTIONS)
958     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
959     if (!outOfBandTrackSources.isEmpty()) {
960         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
961         for (auto& trackSource : outOfBandTrackSources) {
962             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
963             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
964             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
965             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
966             [outOfBandTracks.get() addObject:@{
967                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
968                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
969                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
970                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
971                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
972                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
973             }];
974         }
975
976         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
977     }
978 #endif
979
980 #if PLATFORM(IOS)
981     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
982     if (!networkInterfaceName.isEmpty())
983         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
984 #endif
985
986 #if PLATFORM(IOS)
987     Vector<Cookie> cookies;
988     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
989         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
990         for (auto& cookie : cookies)
991             [nsCookies addObject:toNSHTTPCookie(cookie)];
992
993         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
994     }
995 #endif
996
997     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
998     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
999     
1000     if (usePersistentCache)
1001         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
1002
1003     NSURL *cocoaURL = canonicalURL(url);
1004     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
1005
1006 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1007     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
1008     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
1009
1010 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
1011     if (Settings::isAVFoundationNSURLSessionEnabled()
1012         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
1013         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
1014         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
1015         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
1016         if (mediaResourceLoader)
1017             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
1018     }
1019 #endif
1020
1021 #endif
1022
1023     m_haveCheckedPlayability = false;
1024
1025     setDelayCallbacks(false);
1026 }
1027
1028 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1029 {
1030     if (!m_avPlayer)
1031         return;
1032
1033     if (pthread_main_np()) {
1034         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1035         return;
1036     }
1037
1038     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1039     RetainPtr<AVPlayerItemType> strongItem = item;
1040     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1041         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1042     });
1043 }
1044
1045 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1046 {
1047     if (m_avPlayer)
1048         return;
1049
1050     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1051
1052     setDelayCallbacks(true);
1053
1054     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1055     for (NSString *keyName in playerKVOProperties())
1056         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1057
1058 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1059     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1060 #endif
1061
1062 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1063     updateDisableExternalPlayback();
1064     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1065 #endif
1066
1067 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1068     if (m_shouldPlayToPlaybackTarget) {
1069         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1070         m_shouldPlayToPlaybackTarget = false;
1071         setShouldPlayToPlaybackTarget(true);
1072     }
1073 #endif
1074
1075 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1076     setShouldDisableSleep(player()->shouldDisableSleep());
1077 #endif
1078
1079     if (m_muted) {
1080         // Clear m_muted so setMuted doesn't return without doing anything.
1081         m_muted = false;
1082         [m_avPlayer.get() setMuted:m_muted];
1083     }
1084
1085     if (player()->client().mediaPlayerIsVideo())
1086         createAVPlayerLayer();
1087
1088     if (m_avPlayerItem)
1089         setAVPlayerItem(m_avPlayerItem.get());
1090
1091     setDelayCallbacks(false);
1092 }
1093
1094 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1095 {
1096     if (m_avPlayerItem)
1097         return;
1098
1099     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1100
1101     setDelayCallbacks(true);
1102
1103     // Create the player item so we can load media data. 
1104     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1105
1106     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1107
1108     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1109     for (NSString *keyName in itemKVOProperties())
1110         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1111
1112     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1113
1114     if (m_avPlayer)
1115         setAVPlayerItem(m_avPlayerItem.get());
1116
1117 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1118     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1119
1120     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1121     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1122     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1123
1124     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1125     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1126     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1127     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1128 #endif
1129
1130 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1131     if (m_provider) {
1132         m_provider->setPlayerItem(m_avPlayerItem.get());
1133         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1134     }
1135 #endif
1136
1137     setDelayCallbacks(false);
1138 }
1139
1140 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1141 {
1142     if (m_haveCheckedPlayability)
1143         return;
1144     m_haveCheckedPlayability = true;
1145
1146     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1147     auto weakThis = createWeakPtr();
1148
1149     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1150         callOnMainThread([weakThis] {
1151             if (weakThis)
1152                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1153         });
1154     }];
1155 }
1156
1157 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1158 {
1159     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1160
1161     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1162     dispatch_group_enter(metadataLoadingGroup.get());
1163     auto weakThis = createWeakPtr();
1164     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1165
1166         callOnMainThread([weakThis, metadataLoadingGroup] {
1167             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1168                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1169                     dispatch_group_enter(metadataLoadingGroup.get());
1170                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1171                         dispatch_group_leave(metadataLoadingGroup.get());
1172                     }];
1173                 }
1174             }
1175             dispatch_group_leave(metadataLoadingGroup.get());
1176         });
1177     }];
1178
1179     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1180         callOnMainThread([weakThis] {
1181             if (weakThis)
1182                 [weakThis->m_objcObserver.get() metadataLoaded];
1183         });
1184     });
1185 }
1186
1187 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1188 {
1189     if (!m_avPlayerItem)
1190         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1191
1192     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1193         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1194     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1195         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1196     if (m_cachedLikelyToKeepUp)
1197         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1198     if (m_cachedBufferFull)
1199         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1200     if (m_cachedBufferEmpty)
1201         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1202
1203     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1204 }
1205
1206 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1207 {
1208     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1209     PlatformMedia pm;
1210     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1211     pm.media.avfMediaPlayer = m_avPlayer.get();
1212     return pm;
1213 }
1214
1215 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1216 {
1217 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1218     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1219 #else
1220     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1221 #endif
1222 }
1223
1224 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1225 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1226 {
1227     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1228         completionHandler();
1229         return;
1230     }
1231
1232     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
1233
1234     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1235         syncTextTrackBounds();
1236         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1237     }
1238
1239     updateDisableExternalPlayback();
1240 }
1241
1242 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1243 {
1244     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1245     syncTextTrackBounds();
1246 }
1247
1248 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1249 {
1250     m_videoFullscreenGravity = gravity;
1251
1252     if (!m_videoLayer)
1253         return;
1254
1255     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1256     if (gravity == MediaPlayer::VideoGravityResize)
1257         videoGravity = AVLayerVideoGravityResize;
1258     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1259         videoGravity = AVLayerVideoGravityResizeAspect;
1260     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1261         videoGravity = AVLayerVideoGravityResizeAspectFill;
1262     else
1263         ASSERT_NOT_REACHED();
1264     
1265     if ([m_videoLayer videoGravity] == videoGravity)
1266         return;
1267
1268     [m_videoLayer setVideoGravity:videoGravity];
1269     syncTextTrackBounds();
1270 }
1271
1272 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1273 {
1274 #if PLATFORM(IOS)
1275     [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1276     updateDisableExternalPlayback();
1277 #else
1278     UNUSED_PARAM(mode);
1279 #endif
1280 }
1281
1282 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1283
1284 #if PLATFORM(IOS)
1285 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1286 {
1287     if (m_currentMetaData)
1288         return m_currentMetaData.get();
1289     return nil;
1290 }
1291
1292 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1293 {
1294     if (!m_avPlayerItem)
1295         return emptyString();
1296     
1297     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1298     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1299
1300     return logString.get();
1301 }
1302
1303 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1304 {
1305     if (!m_avPlayerItem)
1306         return emptyString();
1307
1308     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1309     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1310
1311     return logString.get();
1312 }
1313 #endif
1314
1315 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1316 {
1317     [CATransaction begin];
1318     [CATransaction setDisableActions:YES];    
1319     if (m_videoLayer)
1320         [m_videoLayer.get() setHidden:!isVisible];
1321     [CATransaction commit];
1322 }
1323     
1324 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1325 {
1326     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1327     if (!metaDataAvailable())
1328         return;
1329
1330     setDelayCallbacks(true);
1331     m_cachedRate = requestedRate();
1332     [m_avPlayer.get() setRate:requestedRate()];
1333     setDelayCallbacks(false);
1334 }
1335
1336 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1337 {
1338     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1339     if (!metaDataAvailable())
1340         return;
1341
1342     setDelayCallbacks(true);
1343     m_cachedRate = 0;
1344     [m_avPlayer.get() setRate:0];
1345     setDelayCallbacks(false);
1346 }
1347
1348 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1349 {
1350     // Do not ask the asset for duration before it has been loaded or it will fetch the
1351     // answer synchronously.
1352     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1353         return MediaTime::invalidTime();
1354     
1355     CMTime cmDuration;
1356     
1357     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1358     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1359         cmDuration = [m_avPlayerItem.get() duration];
1360     else
1361         cmDuration = [m_avAsset.get() duration];
1362
1363     if (CMTIME_IS_NUMERIC(cmDuration))
1364         return toMediaTime(cmDuration);
1365
1366     if (CMTIME_IS_INDEFINITE(cmDuration))
1367         return MediaTime::positiveInfiniteTime();
1368
1369     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1370     return MediaTime::invalidTime();
1371 }
1372
1373 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1374 {
1375     if (!metaDataAvailable() || !m_avPlayerItem)
1376         return MediaTime::zeroTime();
1377
1378     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1379     if (CMTIME_IS_NUMERIC(itemTime))
1380         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1381
1382     return MediaTime::zeroTime();
1383 }
1384
1385 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1386 {
1387     // setCurrentTime generates several event callbacks, update afterwards.
1388     setDelayCallbacks(true);
1389
1390     if (m_metadataTrack)
1391         m_metadataTrack->flushPartialCues();
1392
1393     CMTime cmTime = toCMTime(time);
1394     CMTime cmBefore = toCMTime(negativeTolerance);
1395     CMTime cmAfter = toCMTime(positiveTolerance);
1396
1397     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1398     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1399         cmBefore = kCMTimeZero;
1400     
1401     auto weakThis = createWeakPtr();
1402
1403     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1404
1405     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1406         callOnMainThread([weakThis, finished] {
1407             auto _this = weakThis.get();
1408             if (!_this)
1409                 return;
1410
1411             _this->seekCompleted(finished);
1412         });
1413     }];
1414
1415     setDelayCallbacks(false);
1416 }
1417
1418 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1419 {
1420 #if PLATFORM(IOS)
1421     UNUSED_PARAM(volume);
1422     return;
1423 #else
1424     if (!m_avPlayer)
1425         return;
1426
1427     [m_avPlayer.get() setVolume:volume];
1428 #endif
1429 }
1430
1431 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1432 {
1433     if (m_muted == muted)
1434         return;
1435
1436     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setMuted(%p) - set to %s", this, boolString(muted));
1437
1438     m_muted = muted;
1439
1440     if (!m_avPlayer)
1441         return;
1442
1443     [m_avPlayer.get() setMuted:m_muted];
1444 }
1445
1446 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1447 {
1448     UNUSED_PARAM(closedCaptionsVisible);
1449
1450     if (!metaDataAvailable())
1451         return;
1452
1453     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1454 }
1455
1456 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1457 {
1458     setDelayCallbacks(true);
1459     m_cachedRate = rate;
1460     [m_avPlayer.get() setRate:rate];
1461     setDelayCallbacks(false);
1462 }
1463
1464 double MediaPlayerPrivateAVFoundationObjC::rate() const
1465 {
1466     if (!metaDataAvailable())
1467         return 0;
1468
1469     return m_cachedRate;
1470 }
1471
1472 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1473 {
1474 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1475     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1476 #else
1477     return 0;
1478 #endif
1479 }
1480
1481 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1482 {
1483 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1484     return [m_avPlayerItem liveUpdateInterval];
1485 #else
1486     return 0;
1487 #endif
1488 }
1489
1490 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1491 {
1492     if (m_avPlayerItem)
1493         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1494 }
1495
1496 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1497 {
1498     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1499
1500     if (!m_avPlayerItem)
1501         return timeRanges;
1502
1503     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1504         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1505         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1506             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1507     }
1508     return timeRanges;
1509 }
1510
1511 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1512 {
1513     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1514         return MediaTime::zeroTime();
1515
1516     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1517     bool hasValidRange = false;
1518     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1519         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1520         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1521             continue;
1522
1523         hasValidRange = true;
1524         MediaTime startOfRange = toMediaTime(timeRange.start);
1525         if (minTimeSeekable > startOfRange)
1526             minTimeSeekable = startOfRange;
1527     }
1528     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1529 }
1530
1531 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1532 {
1533     if (!m_cachedSeekableRanges)
1534         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1535
1536     MediaTime maxTimeSeekable;
1537     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1538         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1539         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1540             continue;
1541         
1542         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1543         if (maxTimeSeekable < endOfRange)
1544             maxTimeSeekable = endOfRange;
1545     }
1546     return maxTimeSeekable;
1547 }
1548
1549 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1550 {
1551     if (!m_cachedLoadedRanges)
1552         return MediaTime::zeroTime();
1553
1554     MediaTime maxTimeLoaded;
1555     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1556         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1557         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1558             continue;
1559         
1560         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1561         if (maxTimeLoaded < endOfRange)
1562             maxTimeLoaded = endOfRange;
1563     }
1564
1565     return maxTimeLoaded;   
1566 }
1567
1568 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1569 {
1570     if (!metaDataAvailable())
1571         return 0;
1572
1573     if (m_cachedTotalBytes)
1574         return m_cachedTotalBytes;
1575
1576     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1577         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1578
1579     return m_cachedTotalBytes;
1580 }
1581
1582 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1583 {
1584     m_avAsset = asset;
1585 }
1586
1587 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1588 {
1589     if (!m_avAsset)
1590         return MediaPlayerAVAssetStatusDoesNotExist;
1591
1592     for (NSString *keyName in assetMetadataKeyNames()) {
1593         NSError *error = nil;
1594         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1595 #if !LOG_DISABLED
1596         if (error)
1597             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1598 #endif
1599
1600         if (keyStatus < AVKeyValueStatusLoaded)
1601             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1602         
1603         if (keyStatus == AVKeyValueStatusFailed)
1604             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1605
1606         if (keyStatus == AVKeyValueStatusCancelled)
1607             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1608     }
1609
1610     if (!m_tracksArePlayable) {
1611         m_tracksArePlayable = true;
1612         for (AVAssetTrack *track in [m_avAsset tracks]) {
1613             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1614                 m_tracksArePlayable = false;
1615                 break;
1616             }
1617         }
1618     }
1619
1620     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1621         return MediaPlayerAVAssetStatusPlayable;
1622
1623     return MediaPlayerAVAssetStatusLoaded;
1624 }
1625
1626 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1627 {
1628     if (!m_avAsset)
1629         return 0;
1630
1631     NSError *error = nil;
1632     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1633     return [error code];
1634 }
1635
1636 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1637 {
1638     if (!metaDataAvailable() || context.paintingDisabled())
1639         return;
1640
1641     setDelayCallbacks(true);
1642     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1643
1644 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1645     if (videoOutputHasAvailableFrame())
1646         paintWithVideoOutput(context, rect);
1647     else
1648 #endif
1649         paintWithImageGenerator(context, rect);
1650
1651     END_BLOCK_OBJC_EXCEPTIONS;
1652     setDelayCallbacks(false);
1653
1654     m_videoFrameHasDrawn = true;
1655 }
1656
1657 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1658 {
1659     if (!metaDataAvailable() || context.paintingDisabled())
1660         return;
1661
1662     // We can ignore the request if we are already rendering to a layer.
1663     if (currentRenderingMode() == MediaRenderingToLayer)
1664         return;
1665
1666     // paint() is best effort, so only paint if we already have an image generator or video output available.
1667     if (!hasContextRenderer())
1668         return;
1669
1670     paintCurrentFrameInContext(context, rect);
1671 }
1672
1673 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1674 {
1675     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1676
1677     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1678     if (image) {
1679         GraphicsContextStateSaver stateSaver(context);
1680         context.translate(rect.x(), rect.y() + rect.height());
1681         context.scale(FloatSize(1.0f, -1.0f));
1682         context.setImageInterpolationQuality(InterpolationLow);
1683         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1684         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1685     }
1686 }
1687
1688 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1689 {
1690     if (!m_imageGenerator)
1691         createImageGenerator();
1692     ASSERT(m_imageGenerator);
1693
1694 #if !LOG_DISABLED
1695     double start = monotonicallyIncreasingTime();
1696 #endif
1697
1698     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1699     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1700     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1701
1702 #if !LOG_DISABLED
1703     double duration = monotonicallyIncreasingTime() - start;
1704     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1705 #endif
1706
1707     return image;
1708 }
1709
1710 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1711 {
1712     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::getSupportedTypes");
1713     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1714
1715
1716 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1717 static bool keySystemIsSupported(const String& keySystem)
1718 {
1719     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1720         return true;
1721     return false;
1722 }
1723 #endif
1724
1725 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1726 {
1727 #if ENABLE(MEDIA_SOURCE)
1728     if (parameters.isMediaSource)
1729         return MediaPlayer::IsNotSupported;
1730 #endif
1731 #if ENABLE(MEDIA_STREAM)
1732     if (parameters.isMediaStream)
1733         return MediaPlayer::IsNotSupported;
1734 #endif
1735
1736     auto containerType = parameters.type.containerType();
1737     if (isUnsupportedMIMEType(containerType))
1738         return MediaPlayer::IsNotSupported;
1739
1740     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1741         return MediaPlayer::IsNotSupported;
1742
1743     // The spec says:
1744     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1745     if (parameters.type.codecs().isEmpty())
1746         return MediaPlayer::MayBeSupported;
1747
1748     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1749         return MediaPlayer::IsNotSupported;
1750
1751     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1752     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1753 }
1754
1755 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1756 {
1757 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1758     if (!keySystem.isEmpty()) {
1759         // "Clear Key" is only supported with HLS:
1760         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1761             return MediaPlayer::IsNotSupported;
1762
1763         if (!keySystemIsSupported(keySystem))
1764             return false;
1765
1766         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1767             return false;
1768
1769         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1770             return false;
1771
1772         return true;
1773     }
1774 #else
1775     UNUSED_PARAM(keySystem);
1776     UNUSED_PARAM(mimeType);
1777 #endif
1778     return false;
1779 }
1780
1781 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1782 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1783 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1784 {
1785     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1786         [infoRequest setContentLength:keyData->byteLength()];
1787         [infoRequest setByteRangeAccessSupported:YES];
1788     }
1789
1790     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1791         long long start = [dataRequest currentOffset];
1792         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1793
1794         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1795             [request finishLoadingWithError:nil];
1796             return;
1797         }
1798
1799         ASSERT(start <= std::numeric_limits<int>::max());
1800         ASSERT(end <= std::numeric_limits<int>::max());
1801         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1802         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1803         [dataRequest respondWithData:nsData.get()];
1804     }
1805
1806     [request finishLoading];
1807 }
1808 #endif
1809
1810 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1811 {
1812     String scheme = [[[avRequest request] URL] scheme];
1813     String keyURI = [[[avRequest request] URL] absoluteString];
1814
1815 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1816     if (scheme == "skd") {
1817         // Create an initData with the following layout:
1818         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1819         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1820         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1821         unsigned byteLength = initDataBuffer->byteLength();
1822         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1823         initDataView->set<uint32_t>(0, keyURISize, true);
1824
1825         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1826         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1827
1828         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1829         if (!player()->keyNeeded(initData.get()))
1830             return false;
1831
1832         m_keyURIToRequestMap.set(keyURI, avRequest);
1833         return true;
1834     }
1835
1836     if (scheme == "clearkey") {
1837         String keyID = [[[avRequest request] URL] resourceSpecifier];
1838         StringView keyIDView(keyID);
1839         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1840
1841         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1842         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1843
1844         auto keyData = player()->cachedKeyForKeyId(keyID);
1845         if (keyData) {
1846             fulfillRequestWithKeyData(avRequest, keyData.get());
1847             return false;
1848         }
1849
1850         if (!player()->keyNeeded(initData.get()))
1851             return false;
1852
1853         m_keyURIToRequestMap.set(keyID, avRequest);
1854         return true;
1855     }
1856 #endif
1857
1858     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1859     m_resourceLoaderMap.add(avRequest, resourceLoader);
1860     resourceLoader->startLoading();
1861     return true;
1862 }
1863
1864 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1865 {
1866 #if USE(CFURLCONNECTION)
1867     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1868     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1869     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1870 #else
1871     AuthenticationChallenge challenge(nsChallenge);
1872 #endif
1873
1874     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1875 }
1876
1877 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1878 {
1879     String scheme = [[[avRequest request] URL] scheme];
1880
1881     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1882
1883     if (resourceLoader)
1884         resourceLoader->stopLoading();
1885 }
1886
1887 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1888 {
1889     m_resourceLoaderMap.remove(avRequest);
1890 }
1891 #endif
1892
1893 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1894 {
1895     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1896 }
1897
1898 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1899 {
1900     if (!metaDataAvailable())
1901         return timeValue;
1902
1903     // FIXME - impossible to implement until rdar://8721510 is fixed.
1904     return timeValue;
1905 }
1906
1907 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1908 {
1909 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1910     return 0;
1911 #else
1912     return 5;
1913 #endif
1914 }
1915
1916 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1917 {
1918     if (!m_videoLayer)
1919         return;
1920
1921 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1922     // Do not attempt to change the video gravity while in full screen mode.
1923     // See setVideoFullscreenGravity().
1924     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1925         return;
1926 #endif
1927
1928     [CATransaction begin];
1929     [CATransaction setDisableActions:YES];    
1930     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1931     [m_videoLayer.get() setVideoGravity:gravity];
1932     [CATransaction commit];
1933 }
1934
1935 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1936 {
1937     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1938         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1939     }];
1940     if (index == NSNotFound)
1941         return nil;
1942     return [tracks objectAtIndex:index];
1943 }
1944
1945 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1946 {
1947     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1948     m_languageOfPrimaryAudioTrack = String();
1949
1950     if (!m_avAsset)
1951         return;
1952
1953     setDelayCharacteristicsChangedNotification(true);
1954
1955     bool haveCCTrack = false;
1956     bool hasCaptions = false;
1957
1958     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1959     // asked about those fairly fequently.
1960     if (!m_avPlayerItem) {
1961         // We don't have a player item yet, so check with the asset because some assets support inspection
1962         // prior to becoming ready to play.
1963         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1964         setHasVideo(firstEnabledVideoTrack);
1965         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1966 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1967         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1968 #endif
1969         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1970         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1971         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1972         if (size.width() < 0)
1973             size.setWidth(-size.width());
1974         if (size.height() < 0)
1975             size.setHeight(-size.height());
1976         presentationSizeDidChange(size);
1977     } else {
1978         bool hasVideo = false;
1979         bool hasAudio = false;
1980         bool hasMetaData = false;
1981         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1982             if ([track isEnabled]) {
1983                 AVAssetTrack *assetTrack = [track assetTrack];
1984                 NSString *mediaType = [assetTrack mediaType];
1985                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1986                     hasVideo = true;
1987                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1988                     hasAudio = true;
1989                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1990 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1991                     hasCaptions = true;
1992 #endif
1993                     haveCCTrack = true;
1994                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1995                     hasMetaData = true;
1996                 }
1997             }
1998         }
1999
2000 #if ENABLE(VIDEO_TRACK)
2001         updateAudioTracks();
2002         updateVideoTracks();
2003
2004 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2005         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
2006         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
2007 #endif
2008 #endif
2009
2010         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
2011         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
2012         // when it is not.
2013         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
2014
2015         setHasAudio(hasAudio);
2016 #if ENABLE(DATACUE_VALUE)
2017         if (hasMetaData)
2018             processMetadataTrack();
2019 #endif
2020     }
2021
2022 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2023     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2024     if (legibleGroup && m_cachedTracks) {
2025         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
2026         if (hasCaptions)
2027             processMediaSelectionOptions();
2028     }
2029 #endif
2030
2031 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2032     if (!hasCaptions && haveCCTrack)
2033         processLegacyClosedCaptionsTracks();
2034 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2035     if (haveCCTrack)
2036         processLegacyClosedCaptionsTracks();
2037 #endif
2038
2039     setHasClosedCaptions(hasCaptions);
2040
2041     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
2042         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
2043
2044     sizeChanged();
2045
2046     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2047         characteristicsChanged();
2048
2049 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2050     if (m_provider)
2051         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2052 #endif
2053
2054     setDelayCharacteristicsChangedNotification(false);
2055 }
2056
2057 #if ENABLE(VIDEO_TRACK)
2058
2059 template <typename RefT, typename PassRefT>
2060 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2061 {
2062     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2063         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2064     }]]]);
2065     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2066
2067     for (auto& oldItem : oldItems) {
2068         if (oldItem->playerItemTrack())
2069             [oldTracks addObject:oldItem->playerItemTrack()];
2070     }
2071
2072     // Find the added & removed AVPlayerItemTracks:
2073     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2074     [removedTracks minusSet:newTracks.get()];
2075
2076     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2077     [addedTracks minusSet:oldTracks.get()];
2078
2079     typedef Vector<RefT> ItemVector;
2080     ItemVector replacementItems;
2081     ItemVector addedItems;
2082     ItemVector removedItems;
2083     for (auto& oldItem : oldItems) {
2084         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2085             removedItems.append(oldItem);
2086         else
2087             replacementItems.append(oldItem);
2088     }
2089
2090     for (AVPlayerItemTrack* track in addedTracks.get())
2091         addedItems.append(itemFactory(track));
2092
2093     replacementItems.appendVector(addedItems);
2094     oldItems.swap(replacementItems);
2095
2096     for (auto& removedItem : removedItems)
2097         (player->*removedFunction)(*removedItem);
2098
2099     for (auto& addedItem : addedItems)
2100         (player->*addedFunction)(*addedItem);
2101 }
2102
2103 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2104
2105 template <typename RefT, typename PassRefT>
2106 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2107 {
2108     group->updateOptions(characteristics);
2109
2110     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2111     for (auto& option : group->options()) {
2112         if (!option)
2113             continue;
2114         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2115         if (!avOption)
2116             continue;
2117         newSelectionOptions.add(option);
2118     }
2119
2120     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2121     for (auto& oldItem : oldItems) {
2122         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2123             oldSelectionOptions.add(option);
2124     }
2125
2126     // Find the added & removed AVMediaSelectionOptions:
2127     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2128     for (auto& oldOption : oldSelectionOptions) {
2129         if (!newSelectionOptions.contains(oldOption))
2130             removedSelectionOptions.add(oldOption);
2131     }
2132
2133     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2134     for (auto& newOption : newSelectionOptions) {
2135         if (!oldSelectionOptions.contains(newOption))
2136             addedSelectionOptions.add(newOption);
2137     }
2138
2139     typedef Vector<RefT> ItemVector;
2140     ItemVector replacementItems;
2141     ItemVector addedItems;
2142     ItemVector removedItems;
2143     for (auto& oldItem : oldItems) {
2144         if (!oldItem->mediaSelectionOption())
2145             removedItems.append(oldItem);
2146         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2147             removedItems.append(oldItem);
2148         else
2149             replacementItems.append(oldItem);
2150     }
2151
2152     for (auto& option : addedSelectionOptions)
2153         addedItems.append(itemFactory(*option.get()));
2154
2155     replacementItems.appendVector(addedItems);
2156     oldItems.swap(replacementItems);
2157     
2158     for (auto& removedItem : removedItems)
2159         (player->*removedFunction)(*removedItem);
2160
2161     for (auto& addedItem : addedItems)
2162         (player->*addedFunction)(*addedItem);
2163 }
2164
2165 #endif
2166
2167 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2168 {
2169 #if !LOG_DISABLED
2170     size_t count = m_audioTracks.size();
2171 #endif
2172
2173 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2174     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2175     if (!m_audibleGroup) {
2176         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2177             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2178     }
2179
2180     if (m_audibleGroup)
2181         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2182     else
2183 #endif
2184         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2185
2186     for (auto& track : m_audioTracks)
2187         track->resetPropertiesFromTrack();
2188
2189 #if !LOG_DISABLED
2190     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2191 #endif
2192 }
2193
2194 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2195 {
2196 #if !LOG_DISABLED
2197     size_t count = m_videoTracks.size();
2198 #endif
2199
2200     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2201
2202 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2203     if (!m_visualGroup) {
2204         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2205             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2206     }
2207
2208     if (m_visualGroup)
2209         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2210 #endif
2211
2212     for (auto& track : m_audioTracks)
2213         track->resetPropertiesFromTrack();
2214
2215 #if !LOG_DISABLED
2216     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2217 #endif
2218 }
2219
2220 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2221 {
2222 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2223     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2224         return true;
2225 #endif
2226     return false;
2227 }
2228
2229 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2230 {
2231 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2232     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2233         return;
2234
2235     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2236     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2237     [m_textTrackRepresentationLayer setFrame:textFrame];
2238 #endif
2239 }
2240
2241 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2242 {
2243 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2244     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2245     if (representationLayer == m_textTrackRepresentationLayer) {
2246         syncTextTrackBounds();
2247         return;
2248     }
2249
2250     if (m_textTrackRepresentationLayer)
2251         [m_textTrackRepresentationLayer removeFromSuperlayer];
2252
2253     m_textTrackRepresentationLayer = representationLayer;
2254
2255     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2256         syncTextTrackBounds();
2257         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2258     }
2259
2260 #else
2261     UNUSED_PARAM(representation);
2262 #endif
2263 }
2264
2265 #endif // ENABLE(VIDEO_TRACK)
2266
2267 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2268
2269 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2270 {
2271     if (!m_provider) {
2272         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2273         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2274     }
2275     return m_provider.get();
2276 }
2277
2278 #endif
2279
2280 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2281 {
2282     if (!m_avAsset)
2283         return;
2284
2285     setNaturalSize(m_cachedPresentationSize);
2286 }
2287     
2288 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2289 {
2290     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2291         return false;
2292     
2293     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2294     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2295     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
2296 }
2297
2298 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2299 {
2300 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2301     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2302     if (!Settings::isAVFoundationNSURLSessionEnabled()
2303         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2304         return false;
2305
2306     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2307     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2308         return session.didPassCORSAccessChecks;
2309 #endif
2310     return false;
2311 }
2312
2313 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2314
2315 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2316 {
2317     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2318
2319     if (!m_avPlayerItem || m_videoOutput)
2320         return;
2321
2322 #if USE(VIDEOTOOLBOX)
2323     NSDictionary* attributes = nil;
2324 #else
2325     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2326 #endif
2327     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2328     ASSERT(m_videoOutput);
2329
2330     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2331
2332     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2333
2334     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2335 }
2336
2337 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2338 {
2339     if (!m_videoOutput)
2340         return;
2341
2342     if (m_avPlayerItem)
2343         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2344     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2345
2346     m_videoOutput = 0;
2347 }
2348
2349 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2350 {
2351     if (!m_videoOutput)
2352         createVideoOutput();
2353     ASSERT(m_videoOutput);
2354
2355     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2356
2357     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2358         return 0;
2359
2360     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2361 }
2362
2363 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2364 {
2365     if (!m_avPlayerItem)
2366         return false;
2367
2368     if (m_lastImage)
2369         return true;
2370
2371     if (!m_videoOutput)
2372         createVideoOutput();
2373
2374     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2375 }
2376
2377 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2378 {
2379     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2380
2381     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2382     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2383     // should be displayed.
2384     if (!pixelBuffer)
2385         return;
2386
2387     if (!m_pixelBufferConformer) {
2388 #if USE(VIDEOTOOLBOX)
2389         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2390 #else
2391         NSDictionary *attributes = nil;
2392 #endif
2393         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2394     }
2395
2396 #if !LOG_DISABLED
2397     double start = monotonicallyIncreasingTime();
2398 #endif
2399
2400     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2401
2402 #if !LOG_DISABLED
2403     double duration = monotonicallyIncreasingTime() - start;
2404     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2405 #endif
2406 }
2407
2408 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2409 {
2410     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2411         waitForVideoOutputMediaDataWillChange();
2412
2413     updateLastImage();
2414
2415     if (!m_lastImage)
2416         return;
2417
2418     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2419     if (!firstEnabledVideoTrack)
2420         return;
2421
2422     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2423
2424     GraphicsContextStateSaver stateSaver(context);
2425     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2426     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2427     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2428
2429     context.concatCTM(videoTransform);
2430     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2431
2432     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2433     // video frame, destroy it now that it is no longer needed.
2434     if (m_imageGenerator)
2435         destroyImageGenerator();
2436
2437 }
2438
2439 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2440 {
2441     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2442
2443     if (!m_avPlayerItem || m_openGLVideoOutput)
2444         return;
2445
2446 #if PLATFORM(IOS)
2447     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2448 #else
2449     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2450 #endif
2451     m_openGLVideoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2452     ASSERT(m_openGLVideoOutput);
2453
2454     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2455
2456     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2457 }
2458
2459 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2460 {
2461     if (!m_openGLVideoOutput)
2462         return;
2463
2464     if (m_avPlayerItem)
2465         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2466     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2467
2468     m_openGLVideoOutput = 0;
2469 }
2470
2471 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2472 {
2473     if (!m_openGLVideoOutput)
2474         return;
2475
2476     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2477     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2478         return;
2479
2480     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2481 }
2482
2483 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2484 {
2485     if (flipY || premultiplyAlpha)
2486         return false;
2487
2488     ASSERT(context);
2489
2490     if (!m_openGLVideoOutput)
2491         createOpenGLVideoOutput();
2492
2493     updateLastOpenGLImage();
2494
2495     if (!m_lastOpenGLImage)
2496         return false;
2497
2498     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2499     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2500
2501     if (!m_textureCache) {
2502         m_textureCache = TextureCacheCV::create(*context);
2503         if (!m_textureCache)
2504             return false;
2505     }
2506
2507     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2508
2509     if (!m_videoTextureCopier)
2510         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2511
2512     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2513 }
2514
2515 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2516 {
2517     updateLastImage();
2518     return m_lastImage;
2519 }
2520
2521 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2522 {
2523     if (!m_videoOutputSemaphore)
2524         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2525
2526     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2527
2528     // Wait for 1 second.
2529     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2530
2531     if (result)
2532         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2533 }
2534
2535 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2536 {
2537     dispatch_semaphore_signal(m_videoOutputSemaphore);
2538 }
2539
2540 #endif
2541
2542 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2543
2544 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2545 {
2546     return m_keyURIToRequestMap.take(keyURI);
2547 }
2548
2549 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2550 {
2551     Vector<String> fulfilledKeyIds;
2552
2553     for (auto& pair : m_keyURIToRequestMap) {
2554         const String& keyId = pair.key;
2555         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2556
2557         auto keyData = player()->cachedKeyForKeyId(keyId);
2558         if (!keyData)
2559             continue;
2560
2561         fulfillRequestWithKeyData(request.get(), keyData.get());
2562         fulfilledKeyIds.append(keyId);
2563     }
2564
2565     for (auto& keyId : fulfilledKeyIds)
2566         m_keyURIToRequestMap.remove(keyId);
2567 }
2568
2569 void MediaPlayerPrivateAVFoundationObjC::removeSession(CDMSession& session)
2570 {
2571     ASSERT_UNUSED(session, &session == m_session);
2572     m_session = nullptr;
2573 }
2574
2575 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2576 {
2577     if (!keySystemIsSupported(keySystem))
2578         return nullptr;
2579     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2580     m_session = session->createWeakPtr();
2581     return WTFMove(session);
2582 }
2583
2584 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2585 {
2586     if (m_session && newValue)
2587         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2588 }
2589
2590 #endif
2591
2592 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2593
2594 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2595 {
2596 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2597     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2598 #endif
2599
2600     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2601     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2602
2603         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2604         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2605             continue;
2606
2607         bool newCCTrack = true;
2608         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2609             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2610                 continue;
2611
2612             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2613             if (track->avPlayerItemTrack() == playerItemTrack) {
2614                 removedTextTracks.remove(i - 1);
2615                 newCCTrack = false;
2616                 break;
2617             }
2618         }
2619
2620         if (!newCCTrack)
2621             continue;
2622         
2623         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2624     }
2625
2626     processNewAndRemovedTextTracks(removedTextTracks);
2627 }
2628
2629 #endif
2630
2631 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2632 {
2633     if (!m_avAsset)
2634         return nil;
2635
2636     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2637         return nil;
2638
2639     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2640 }
2641
2642 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2643
2644 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2645 {
2646     if (!m_avAsset)
2647         return false;
2648
2649     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2650         return false;
2651
2652     return true;
2653 }
2654
2655 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2656 {
2657     if (!hasLoadedMediaSelectionGroups())
2658         return nil;
2659
2660     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2661 }
2662
2663 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2664 {
2665     if (!hasLoadedMediaSelectionGroups())
2666         return nil;
2667
2668     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2669 }
2670
2671 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2672 {
2673     if (!hasLoadedMediaSelectionGroups())
2674         return nil;
2675
2676     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2677 }
2678
2679 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2680 {
2681     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2682     if (!legibleGroup) {
2683         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2684         return;
2685     }
2686
2687     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2688     // but set the selected legible track to nil so text tracks will not be automatically configured.
2689     if (!m_textTracks.size())
2690         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2691
2692     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2693     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2694     for (AVMediaSelectionOptionType *option in legibleOptions) {
2695         bool newTrack = true;
2696         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2697             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2698                 continue;
2699             
2700             RetainPtr<AVMediaSelectionOptionType> currentOption;
2701 #if ENABLE(AVF_CAPTIONS)
2702             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2703                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2704                 currentOption = track->mediaSelectionOption();
2705             } else
2706 #endif
2707             {
2708                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2709                 currentOption = track->mediaSelectionOption();
2710             }
2711             
2712             if ([currentOption.get() isEqual:option]) {
2713                 removedTextTracks.remove(i - 1);
2714                 newTrack = false;
2715                 break;
2716             }
2717         }
2718         if (!newTrack)
2719             continue;
2720
2721 #if ENABLE(AVF_CAPTIONS)
2722         if ([option outOfBandSource]) {
2723             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2724             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2725             continue;
2726         }
2727 #endif
2728
2729         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2730     }
2731
2732     processNewAndRemovedTextTracks(removedTextTracks);
2733 }
2734
2735 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2736 {
2737     if (m_metadataTrack)
2738         return;
2739
2740     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2741     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2742     player()->addTextTrack(*m_metadataTrack);
2743 }
2744
2745 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2746 {
2747     ASSERT(time >= MediaTime::zeroTime());
2748
2749     if (!m_currentTextTrack)
2750         return;
2751
2752     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2753 }
2754
2755 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2756 {
2757     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2758
2759     if (!m_currentTextTrack)
2760         return;
2761     
2762     m_currentTextTrack->resetCueValues();
2763 }
2764
2765 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2766
2767 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2768 {
2769     if (m_currentTextTrack == track)
2770         return;
2771
2772     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2773         
2774     m_currentTextTrack = track;
2775
2776     if (track) {
2777         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2778 #pragma clang diagnostic push
2779 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2780             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2781 #pragma clang diagnostic pop
2782 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2783 #if ENABLE(AVF_CAPTIONS)
2784         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2785             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2786 #endif
2787         else
2788             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2789 #endif
2790     } else {
2791 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2792         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2793 #endif
2794 #pragma clang diagnostic push
2795 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2796         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2797 #pragma clang diagnostic pop
2798     }
2799
2800 }
2801
2802 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2803 {
2804     if (!m_languageOfPrimaryAudioTrack.isNull())
2805         return m_languageOfPrimaryAudioTrack;
2806
2807     if (!m_avPlayerItem.get())
2808         return emptyString();
2809
2810 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2811     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2812     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2813 #pragma clang diagnostic push
2814 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2815     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2816 #pragma clang diagnostic pop
2817     if (currentlySelectedAudibleOption) {
2818         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2819         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2820
2821         return m_languageOfPrimaryAudioTrack;
2822     }
2823 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2824
2825     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2826     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2827     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2828     if (!tracks || [tracks count] != 1) {
2829         m_languageOfPrimaryAudioTrack = emptyString();
2830         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2831         return m_languageOfPrimaryAudioTrack;
2832     }
2833
2834     AVAssetTrack *track = [tracks objectAtIndex:0];
2835     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2836
2837 #if !LOG_DISABLED
2838     if (m_languageOfPrimaryAudioTrack == emptyString())
2839         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2840     else
2841         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2842 #endif
2843
2844     return m_languageOfPrimaryAudioTrack;
2845 }
2846
2847 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2848 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2849 {
2850     bool wirelessTarget = false;
2851
2852 #if !PLATFORM(IOS)
2853     if (m_playbackTarget) {
2854         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2855             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2856         else
2857             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2858     }
2859 #else
2860     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2861 #endif
2862
2863     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2864
2865     return wirelessTarget;
2866 }
2867
2868 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2869 {
2870     if (!m_avPlayer)
2871         return MediaPlayer::TargetTypeNone;
2872
2873 #if PLATFORM(IOS)
2874     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2875     case wkExternalPlaybackTypeNone:
2876         return MediaPlayer::TargetTypeNone;
2877     case wkExternalPlaybackTypeAirPlay:
2878         return MediaPlayer::TargetTypeAirPlay;
2879     case wkExternalPlaybackTypeTVOut:
2880         return MediaPlayer::TargetTypeTVOut;
2881     }
2882
2883     ASSERT_NOT_REACHED();
2884     return MediaPlayer::TargetTypeNone;
2885
2886 #else
2887     return MediaPlayer::TargetTypeAirPlay;
2888 #endif
2889 }
2890
2891 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2892 {
2893     if (!m_avPlayer)
2894         return emptyString();
2895
2896     String wirelessTargetName;
2897 #if !PLATFORM(IOS)
2898     if (m_playbackTarget)
2899         wirelessTargetName = m_playbackTarget->deviceName();
2900 #else
2901     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2902 #endif
2903     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2904
2905     return wirelessTargetName;
2906 }
2907
2908 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2909 {
2910     if (!m_avPlayer)
2911         return !m_allowsWirelessVideoPlayback;
2912
2913     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2914     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2915
2916     return !m_allowsWirelessVideoPlayback;
2917 }
2918
2919 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2920 {
2921     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2922     m_allowsWirelessVideoPlayback = !disabled;
2923     if (!m_avPlayer)
2924         return;
2925
2926     setDelayCallbacks(true);
2927     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2928     setDelayCallbacks(false);
2929 }
2930
2931 #if !PLATFORM(IOS)
2932
2933 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2934 {
2935     m_playbackTarget = WTFMove(target);
2936
2937     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2938
2939     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2940
2941     if (!m_playbackTarget->hasActiveRoute())
2942         setShouldPlayToPlaybackTarget(false);
2943 }
2944
2945 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2946 {
2947     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2948         return;
2949
2950     m_shouldPlayToPlaybackTarget = shouldPlay;
2951
2952     if (!m_playbackTarget)
2953         return;
2954
2955     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2956         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2957
2958         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2959
2960         if (!m_avPlayer)
2961             return;
2962
2963         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2964         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2965             return;
2966
2967         setDelayCallbacks(true);
2968         m_avPlayer.get().outputContext = newContext;
2969         setDelayCallbacks(false);
2970
2971         return;
2972     }
2973
2974     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2975
2976     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2977
2978     setDelayCallbacks(true);
2979     auto weakThis = createWeakPtr();
2980     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2981         if (!weakThis)
2982             return;
2983         weakThis->playbackTargetIsWirelessDidChange();
2984     }));
2985     setDelayCallbacks(false);
2986 }
2987
2988 #endif // !PLATFORM(IOS)
2989
2990 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2991 {
2992 #if PLATFORM(IOS)
2993     if (!m_avPlayer)
2994         return;
2995
2996     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2997 #endif
2998 }
2999
3000 #endif
3001
3002 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3003 {
3004     m_cachedItemStatus = status;
3005
3006     updateStates();
3007 }
3008
3009 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3010 {
3011     m_pendingStatusChanges++;
3012 }
3013
3014 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3015 {
3016     m_cachedLikelyToKeepUp = likelyToKeepUp;
3017
3018     ASSERT(m_pendingStatusChanges);
3019     if (!--m_pendingStatusChanges)
3020         updateStates();
3021 }
3022
3023 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3024 {
3025     m_pendingStatusChanges++;
3026 }
3027
3028 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3029 {
3030     m_cachedBufferEmpty = bufferEmpty;
3031
3032     ASSERT(m_pendingStatusChanges);
3033     if (!--m_pendingStatusChanges)
3034         updateStates();
3035 }
3036
3037 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3038 {
3039     m_pendingStatusChanges++;
3040 }
3041
3042 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3043 {
3044     m_cachedBufferFull = bufferFull;
3045
3046     ASSERT(m_pendingStatusChanges);
3047     if (!--m_pendingStatusChanges)
3048         updateStates();
3049 }
3050