Unreviewed, rolling out r244627.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "VideoFullscreenLayerManagerObjC.h"
63 #import "VideoTextureCopierCV.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import <JavaScriptCore/DataView.h>
69 #import <JavaScriptCore/JSCInlines.h>
70 #import <JavaScriptCore/TypedArrayInlines.h>
71 #import <JavaScriptCore/Uint16Array.h>
72 #import <JavaScriptCore/Uint32Array.h>
73 #import <JavaScriptCore/Uint8Array.h>
74 #import <functional>
75 #import <objc/runtime.h>
76 #import <pal/avfoundation/MediaTimeAVFoundation.h>
77 #import <pal/spi/cocoa/QuartzCoreSPI.h>
78 #import <pal/spi/mac/AVFoundationSPI.h>
79 #import <wtf/BlockObjCExceptions.h>
80 #import <wtf/ListHashSet.h>
81 #import <wtf/NeverDestroyed.h>
82 #import <wtf/OSObjectPtr.h>
83 #import <wtf/URL.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS_FAMILY)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #import <pal/ios/UIKitSoftLink.h>
107 #else
108 #import <Foundation/NSGeometry.h>
109 #import <QuartzCore/CoreImage.h>
110 #endif
111
112 #if USE(VIDEOTOOLBOX)
113 #import <CoreVideo/CoreVideo.h>
114 #import <VideoToolbox/VideoToolbox.h>
115 #endif
116
117 #import "CoreVideoSoftLink.h"
118 #import "MediaRemoteSoftLink.h"
119
120 namespace std {
121 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
122     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
123 };
124 }
125
126 #if ENABLE(AVF_CAPTIONS)
127 // Note: This must be defined before our SOFT_LINK macros:
128 @class AVMediaSelectionOption;
129 @interface AVMediaSelectionOption (OutOfBandExtensions)
130 @property (nonatomic, readonly) NSString* outOfBandSource;
131 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
132 @end
133 #endif
134
135 @interface AVURLAsset (WebKitExtensions)
136 @property (nonatomic, readonly) NSURL *resolvedURL;
137 @end
138
139 typedef AVPlayer AVPlayerType;
140 typedef AVPlayerItem AVPlayerItemType;
141 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
142 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
143 typedef AVMetadataItem AVMetadataItemType;
144 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
145 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
146 typedef AVAssetCache AVAssetCacheType;
147
148 #pragma mark - Soft Linking
149
150 // Soft-linking headers must be included last since they #define functions, constants, etc.
151 #import <pal/cf/CoreMediaSoftLink.h>
152
153 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
154
155 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
156
157 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
158 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
159 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
160 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
161 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
162 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
163 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
164 SOFT_LINK_CLASS(AVFoundation, AVAssetCache)
165
166 SOFT_LINK_CLASS(CoreImage, CIContext)
167 SOFT_LINK_CLASS(CoreImage, CIImage)
168
169 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString *)
170 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString *)
171 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString *)
172 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString *)
173 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeClosedCaption, NSString *)
174 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
175 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeAudio, NSString *)
176 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeMetadata, NSString *)
177 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
178 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
179 SOFT_LINK_CONSTANT(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
180 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
181 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
182 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
183 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
184 SOFT_LINK_CONSTANT(AVFoundation, AVStreamingKeyDeliveryContentKeyType, NSString *)
185
186 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
187 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
188
189 #define AVPlayer initAVPlayer()
190 #define AVPlayerItem initAVPlayerItem()
191 #define AVPlayerLayer initAVPlayerLayer()
192 #define AVURLAsset initAVURLAsset()
193 #define AVAssetImageGenerator initAVAssetImageGenerator()
194 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
195 #define AVMetadataItem initAVMetadataItem()
196 #define AVAssetCache initAVAssetCache()
197
198 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
199 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
200 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
201 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
202 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
203 #define AVMediaTypeVideo getAVMediaTypeVideo()
204 #define AVMediaTypeAudio getAVMediaTypeAudio()
205 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
206 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
207 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
208 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
209 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
210 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
211 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
212 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
213 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
214 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
215 #define AVStreamingKeyDeliveryContentKeyType getAVStreamingKeyDeliveryContentKeyType()
216
217 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
218
219 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
220 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
221
222 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
224 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
225 SOFT_LINK_CLASS(AVFoundation, AVOutputContext)
226
227 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString *)
228 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeSubtitle, NSString *)
229 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
230 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
231
232 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
233 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
234 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
235 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
236 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
237 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
238 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
239
240 #endif
241
242 #if ENABLE(AVF_CAPTIONS)
243
244 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetCacheKey, NSString *)
245 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString *)
246 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
247 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString *)
248 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString *)
249 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString *)
250 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString *)
251 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString *)
252 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString *)
253 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString *)
254 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString *)
255 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString *)
256
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
259 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
260 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
261 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
262 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
263 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
264 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
265 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
266 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
267 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
268 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
269
270 #endif
271
272 #if ENABLE(DATACUE_VALUE)
273
274 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString *)
275 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString *)
276 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString *)
277 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceiTunes, NSString *)
278 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceID3, NSString *)
279
280 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
281 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
282 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
283 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
284 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
285
286 #endif
287
288 #if PLATFORM(IOS_FAMILY)
289
290 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
292 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetHTTPCookiesKey, NSString *)
293 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
294
295 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
296 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
297 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
298 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
299
300 #endif
301
302 SOFT_LINK_FRAMEWORK(MediaToolbox)
303 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
304
305 #if PLATFORM(IOS_FAMILY)
306
307 #if HAVE(CELESTIAL)
308 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
309 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
310 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
311 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
312 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
313 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
314 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
315 #endif // HAVE(CELESTIAL)
316
317 #endif // PLATFORM(IOS_FAMILY)
318
319 using namespace WebCore;
320
321 enum MediaPlayerAVFoundationObservationContext {
322     MediaPlayerAVFoundationObservationContextPlayerItem,
323     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
324     MediaPlayerAVFoundationObservationContextPlayer,
325     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
326 };
327
328 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
329 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
330 #else
331 @interface WebCoreAVFMovieObserver : NSObject
332 #endif
333 {
334     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
335     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
336     int m_delayCallbacks;
337 }
338 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
339 -(void)disconnect;
340 -(void)metadataLoaded;
341 -(void)didEnd:(NSNotification *)notification;
342 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
343 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
344 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
345 - (void)outputSequenceWasFlushed:(id)output;
346 #endif
347 @end
348
349 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
350 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
351     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
352     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
353 }
354 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
355 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
356 @end
357 #endif
358
359 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
360 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
361     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
362 }
363 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
364 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
365 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
366 @end
367 #endif
368
369 namespace WebCore {
370 using namespace PAL;
371
372 static NSArray *assetMetadataKeyNames();
373 static NSArray *itemKVOProperties();
374 static NSArray *assetTrackMetadataKeyNames();
375 static NSArray *playerKVOProperties();
376 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
377
378 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
379 static dispatch_queue_t globalLoaderDelegateQueue()
380 {
381     static dispatch_queue_t globalQueue;
382     static dispatch_once_t onceToken;
383     dispatch_once(&onceToken, ^{
384         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
385     });
386     return globalQueue;
387 }
388 #endif
389
390 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
391 static dispatch_queue_t globalPullDelegateQueue()
392 {
393     static dispatch_queue_t globalQueue;
394     static dispatch_once_t onceToken;
395     dispatch_once(&onceToken, ^{
396         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
397     });
398     return globalQueue;
399 }
400 #endif
401
402 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
403 {
404     if (!isAvailable())
405         return;
406
407     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
408             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
409     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
410 }
411
412 static AVAssetCacheType *assetCacheForPath(const String& path)
413 {
414     NSURL *assetCacheURL;
415     
416     if (path.isEmpty())
417         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
418     else
419         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
420
421     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
422 }
423
424 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
425 {
426     HashSet<RefPtr<SecurityOrigin>> origins;
427     for (NSString *key in [assetCacheForPath(path) allKeys]) {
428         URL keyAsURL = URL(URL(), key);
429         if (keyAsURL.isValid())
430             origins.add(SecurityOrigin::create(keyAsURL));
431     }
432     return origins;
433 }
434
435 static WallTime toSystemClockTime(NSDate *date)
436 {
437     ASSERT(date);
438     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
439 }
440
441 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
442 {
443     AVAssetCacheType* assetCache = assetCacheForPath(path);
444     
445     for (NSString *key in [assetCache allKeys]) {
446         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
447             [assetCache removeEntryForKey:key];
448     }
449
450     NSFileManager *fileManager = [NSFileManager defaultManager];
451     NSURL *baseURL = [assetCache URL];
452
453     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
454         [fileManager removeItemAtURL:baseURL error:nil];
455         return;
456     }
457     
458     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
459     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
460         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
461         errorHandler:nil];
462     
463     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
464     for (NSURL *fileURL : enumerator) {
465         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
466     
467         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
468             continue;
469         
470         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
471             continue;
472         
473         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
474             continue;
475         
476         [urlsToDelete addObject:fileURL];
477     }
478     
479     for (NSURL *fileURL in urlsToDelete.get())
480         [fileManager removeItemAtURL:fileURL error:nil];
481 }
482
483 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
484 {
485     AVAssetCacheType* assetCache = assetCacheForPath(path);
486     for (NSString *key in [assetCache allKeys]) {
487         URL keyAsURL = URL(URL(), key);
488         if (keyAsURL.isValid()) {
489             if (origins.contains(SecurityOrigin::create(keyAsURL)))
490                 [assetCache removeEntryForKey:key];
491         }
492     }
493 }
494
495 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
496     : MediaPlayerPrivateAVFoundation(player)
497     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
498     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
499     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
500     , m_videoFrameHasDrawn(false)
501     , m_haveCheckedPlayability(false)
502 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
503     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
504 #endif
505 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
506     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
507 #endif
508     , m_currentTextTrack(0)
509     , m_cachedRate(0)
510     , m_cachedTotalBytes(0)
511     , m_pendingStatusChanges(0)
512     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
513     , m_cachedLikelyToKeepUp(false)
514     , m_cachedBufferEmpty(false)
515     , m_cachedBufferFull(false)
516     , m_cachedHasEnabledAudio(false)
517     , m_shouldBufferData(true)
518     , m_cachedIsReadyForDisplay(false)
519     , m_haveBeenAskedToCreateLayer(false)
520 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
521     , m_allowsWirelessVideoPlayback(true)
522 #endif
523 {
524 }
525
526 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
527 {
528     m_weakPtrFactory.revokeAll();
529
530 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
531     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
532
533     for (auto& pair : m_resourceLoaderMap)
534         pair.value->invalidate();
535 #endif
536 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
537     [m_videoOutput setDelegate:nil queue:0];
538 #endif
539
540     if (m_videoLayer)
541         destroyVideoLayer();
542
543     cancelLoad();
544 }
545
546 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
547 {
548     INFO_LOG(LOGIDENTIFIER);
549     tearDownVideoRendering();
550
551     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
552     [m_objcObserver.get() disconnect];
553
554     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
555     setIgnoreLoadStateChanges(true);
556     if (m_avAsset) {
557         [m_avAsset.get() cancelLoading];
558         m_avAsset = nil;
559     }
560
561     clearTextTracks();
562
563 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
564     if (m_legibleOutput) {
565         if (m_avPlayerItem)
566             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
567         m_legibleOutput = nil;
568     }
569 #endif
570
571     if (m_avPlayerItem) {
572         for (NSString *keyName in itemKVOProperties())
573             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
574         
575         m_avPlayerItem = nil;
576     }
577     if (m_avPlayer) {
578         if (m_timeObserver)
579             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
580         m_timeObserver = nil;
581
582         for (NSString *keyName in playerKVOProperties())
583             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
584
585         setShouldObserveTimeControlStatus(false);
586
587         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
588 #if !PLATFORM(IOS_FAMILY)
589         [m_avPlayer setOutputContext:nil];
590 #endif
591         m_avPlayer = nil;
592     }
593
594     // Reset cached properties
595     m_pendingStatusChanges = 0;
596     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
597     m_cachedSeekableRanges = nullptr;
598     m_cachedLoadedRanges = nullptr;
599     m_cachedHasEnabledAudio = false;
600     m_cachedPresentationSize = FloatSize();
601     m_cachedDuration = MediaTime::zeroTime();
602
603     for (AVPlayerItemTrack *track in m_cachedTracks.get())
604         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
605     m_cachedTracks = nullptr;
606
607 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
608     if (m_provider) {
609         m_provider->setPlayerItem(nullptr);
610         m_provider->setAudioTrack(nullptr);
611     }
612 #endif
613
614     setIgnoreLoadStateChanges(false);
615 }
616
617 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
618 {
619     return m_haveBeenAskedToCreateLayer;
620 }
621
622 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
623 {
624 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
625     if (m_videoOutput)
626         return true;
627 #endif
628     return m_imageGenerator;
629 }
630
631 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
632 {
633 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
634     createVideoOutput();
635 #else
636     createImageGenerator();
637 #endif
638 }
639
640 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
641 {
642     using namespace PAL;
643     INFO_LOG(LOGIDENTIFIER);
644
645     if (!m_avAsset || m_imageGenerator)
646         return;
647
648     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
649
650     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
651     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
652     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
653     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
654 }
655
656 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
657 {
658 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
659     destroyVideoOutput();
660 #endif
661     destroyImageGenerator();
662 }
663
664 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
665 {
666     if (!m_imageGenerator)
667         return;
668
669     INFO_LOG(LOGIDENTIFIER);
670
671     m_imageGenerator = 0;
672 }
673
674 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
675 {
676     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
677         return;
678
679     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
680         if (!weakThis)
681             return;
682
683         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
684             return;
685         m_haveBeenAskedToCreateLayer = true;
686
687         if (!m_videoLayer)
688             createAVPlayerLayer();
689
690 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
691         if (!m_videoOutput)
692             createVideoOutput();
693 #endif
694
695         player()->client().mediaPlayerRenderingModeChanged(player());
696     });
697 }
698
699 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
700 {
701     if (!m_avPlayer)
702         return;
703
704     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
705     [m_videoLayer setPlayer:m_avPlayer.get()];
706
707 #ifndef NDEBUG
708     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
709 #endif
710     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
711     updateVideoLayerGravity();
712     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
713     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
714     INFO_LOG(LOGIDENTIFIER);
715
716     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
717
718 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
719     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
720         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
721 #endif
722 }
723
724 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
725 {
726     if (!m_videoLayer)
727         return;
728
729     INFO_LOG(LOGIDENTIFIER);
730
731     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
732     [m_videoLayer setPlayer:nil];
733     m_videoFullscreenLayerManager->didDestroyVideoLayer();
734
735     m_videoLayer = nil;
736 }
737
738 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
739 {
740     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
741     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
742
743     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
744     if (!date)
745         return MediaTime::invalidTime();
746
747     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
748
749     // Rounding due to second offset error when subtracting.
750     return MediaTime::createWithDouble(round(date - currentTime));
751 }
752
753 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
754 {
755     if (currentRenderingMode() == MediaRenderingToLayer)
756         return m_cachedIsReadyForDisplay;
757
758 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
759     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
760         return true;
761 #endif
762
763     return m_videoFrameHasDrawn;
764 }
765
766 #if ENABLE(AVF_CAPTIONS)
767 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
768 {
769     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
770     if (manualSelectionMode)
771         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
772
773     // FIXME: Match these to correct types:
774     if (kind == PlatformTextTrack::Caption)
775         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
776
777     if (kind == PlatformTextTrack::Subtitle)
778         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
779
780     if (kind == PlatformTextTrack::Description)
781         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
782
783     if (kind == PlatformTextTrack::Forced)
784         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
785
786     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
787 }
788     
789 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
790 {
791     trackModeChanged();
792 }
793     
794 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
795 {
796     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
797     
798     for (auto& textTrack : m_textTracks) {
799         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
800             continue;
801         
802         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
803         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
804         
805         for (auto& track : outOfBandTrackSources) {
806             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
807             
808             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
809                 continue;
810             
811             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
812             if (track->mode() == PlatformTextTrack::Hidden)
813                 mode = InbandTextTrackPrivate::Hidden;
814             else if (track->mode() == PlatformTextTrack::Disabled)
815                 mode = InbandTextTrackPrivate::Disabled;
816             else if (track->mode() == PlatformTextTrack::Showing)
817                 mode = InbandTextTrackPrivate::Showing;
818             
819             textTrack->setMode(mode);
820             break;
821         }
822     }
823 }
824 #endif
825
826
827 static NSURL *canonicalURL(const URL& url)
828 {
829     NSURL *cocoaURL = url;
830     if (url.isEmpty())
831         return cocoaURL;
832
833     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
834     if (!request)
835         return cocoaURL;
836
837     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
838     if (!canonicalRequest)
839         return cocoaURL;
840
841     return [canonicalRequest URL];
842 }
843
844 #if PLATFORM(IOS_FAMILY)
845 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
846 {
847     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
848     [properties setDictionary:@{
849         NSHTTPCookieName: cookie.name,
850         NSHTTPCookieValue: cookie.value,
851         NSHTTPCookieDomain: cookie.domain,
852         NSHTTPCookiePath: cookie.path,
853         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
854     }];
855     if (cookie.secure)
856         [properties setObject:@YES forKey:NSHTTPCookieSecure];
857     if (cookie.session)
858         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
859
860     return [NSHTTPCookie cookieWithProperties:properties.get()];
861 }
862 #endif
863
864 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
865 {
866     if (m_avAsset)
867         return;
868
869     INFO_LOG(LOGIDENTIFIER);
870
871     setDelayCallbacks(true);
872
873     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
874
875     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
876
877     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
878
879     String referrer = player()->referrer();
880     if (!referrer.isEmpty())
881         [headerFields.get() setObject:referrer forKey:@"Referer"];
882
883     String userAgent = player()->userAgent();
884     if (!userAgent.isEmpty())
885         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
886
887     if ([headerFields.get() count])
888         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
889
890     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
891         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
892
893     if (canLoadAVURLAssetUseClientURLLoadingExclusively())
894         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
895 #if PLATFORM(IOS_FAMILY)
896     else if (canLoadAVURLAssetRequiresCustomURLLoadingKey())
897         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
898     // FIXME: rdar://problem/20354688
899     String identifier = player()->sourceApplicationIdentifier();
900     if (!identifier.isEmpty() && canLoadAVURLAssetClientBundleIdentifierKey())
901         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
902 #endif
903
904     auto type = player()->contentMIMEType();
905     if (canLoadAVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
906         auto codecs = player()->contentTypeCodecs();
907         if (!codecs.isEmpty()) {
908             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
909             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
910         } else
911             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
912     }
913
914 #if ENABLE(AVF_CAPTIONS)
915     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
916     if (!outOfBandTrackSources.isEmpty()) {
917         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
918         for (auto& trackSource : outOfBandTrackSources) {
919             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
920             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
921             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
922             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
923             [outOfBandTracks.get() addObject:@{
924                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
925                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
926                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
927                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
928                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
929                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
930             }];
931         }
932
933         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
934     }
935 #endif
936
937 #if PLATFORM(IOS_FAMILY)
938     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
939     if (!networkInterfaceName.isEmpty())
940         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
941 #endif
942
943 #if PLATFORM(IOS_FAMILY)
944     Vector<Cookie> cookies;
945     if (player()->getRawCookies(url, cookies)) {
946         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
947         for (auto& cookie : cookies)
948             [nsCookies addObject:toNSHTTPCookie(cookie)];
949
950         if (canLoadAVURLAssetHTTPCookiesKey())
951             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
952     }
953 #endif
954
955     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
956     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
957     
958     if (usePersistentCache)
959         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
960
961     NSURL *cocoaURL = canonicalURL(url);
962     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
963
964 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
965     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
966     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
967
968     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
969         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
970         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
971         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
972         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
973         if (mediaResourceLoader)
974             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
975     }
976
977 #endif
978
979     m_haveCheckedPlayability = false;
980
981     setDelayCallbacks(false);
982 }
983
984 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
985 {
986     if (!m_avPlayer)
987         return;
988
989     if (pthread_main_np()) {
990         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
991         return;
992     }
993
994     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
995     RetainPtr<AVPlayerItemType> strongItem = item;
996     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
997         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
998     });
999 }
1000
1001 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1002 {
1003     if (m_avPlayer)
1004         return;
1005
1006     INFO_LOG(LOGIDENTIFIER);
1007
1008     setDelayCallbacks(true);
1009
1010     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1011     for (NSString *keyName in playerKVOProperties())
1012         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1013
1014     setShouldObserveTimeControlStatus(true);
1015
1016 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1017     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1018 #endif
1019
1020 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1021     updateDisableExternalPlayback();
1022     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1023 #endif
1024
1025 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
1026     if (m_shouldPlayToPlaybackTarget) {
1027         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1028         m_shouldPlayToPlaybackTarget = false;
1029         setShouldPlayToPlaybackTarget(true);
1030     }
1031 #endif
1032
1033 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(IOSMAC)
1034     setShouldDisableSleep(player()->shouldDisableSleep());
1035 #endif
1036
1037     if (m_muted) {
1038         // Clear m_muted so setMuted doesn't return without doing anything.
1039         m_muted = false;
1040         [m_avPlayer.get() setMuted:m_muted];
1041     }
1042
1043     if (player()->client().mediaPlayerIsVideo())
1044         createAVPlayerLayer();
1045
1046     if (m_avPlayerItem)
1047         setAVPlayerItem(m_avPlayerItem.get());
1048
1049     setDelayCallbacks(false);
1050 }
1051
1052 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1053 {
1054     if (m_avPlayerItem)
1055         return;
1056
1057     INFO_LOG(LOGIDENTIFIER);
1058
1059     setDelayCallbacks(true);
1060
1061     // Create the player item so we can load media data. 
1062     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1063
1064     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1065
1066     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1067     for (NSString *keyName in itemKVOProperties())
1068         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1069
1070     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1071
1072     if (m_avPlayer)
1073         setAVPlayerItem(m_avPlayerItem.get());
1074
1075 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1076     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1077
1078     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1079     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1080     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1081
1082     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1083     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1084     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1085     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1086 #endif
1087
1088 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1089     if (m_provider) {
1090         m_provider->setPlayerItem(m_avPlayerItem.get());
1091         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1092     }
1093 #endif
1094
1095 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1096     createVideoOutput();
1097 #endif
1098
1099     setDelayCallbacks(false);
1100 }
1101
1102 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1103 {
1104     if (m_haveCheckedPlayability)
1105         return;
1106     m_haveCheckedPlayability = true;
1107
1108     INFO_LOG(LOGIDENTIFIER);
1109     auto weakThis = makeWeakPtr(*this);
1110
1111     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1112         callOnMainThread([weakThis] {
1113             if (weakThis)
1114                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1115         });
1116     }];
1117 }
1118
1119 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1120 {
1121     INFO_LOG(LOGIDENTIFIER);
1122
1123     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1124     dispatch_group_enter(metadataLoadingGroup.get());
1125     auto weakThis = makeWeakPtr(*this);
1126     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1127
1128         callOnMainThread([weakThis, metadataLoadingGroup] {
1129             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1130                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1131                     dispatch_group_enter(metadataLoadingGroup.get());
1132                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1133                         dispatch_group_leave(metadataLoadingGroup.get());
1134                     }];
1135                 }
1136             }
1137             dispatch_group_leave(metadataLoadingGroup.get());
1138         });
1139     }];
1140
1141     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1142         callOnMainThread([weakThis] {
1143             if (weakThis)
1144                 [weakThis->m_objcObserver.get() metadataLoaded];
1145         });
1146     });
1147 }
1148
1149 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1150 {
1151     if (!m_avPlayerItem)
1152         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1153
1154     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1155         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1156     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1157         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1158     if (m_cachedLikelyToKeepUp)
1159         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1160     if (m_cachedBufferFull)
1161         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1162     if (m_cachedBufferEmpty)
1163         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1164
1165     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1166 }
1167
1168 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1169 {
1170     return m_videoFullscreenLayerManager->videoInlineLayer();
1171 }
1172
1173 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1174 {
1175 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1176     updateLastImage(UpdateType::UpdateSynchronously);
1177     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1178 #endif
1179 }
1180
1181 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1182 {
1183 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1184     updateLastImage(UpdateType::UpdateSynchronously);
1185     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1186 #else
1187     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1188 #endif
1189     updateDisableExternalPlayback();
1190 }
1191
1192 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1193 {
1194     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1195 }
1196
1197 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1198 {
1199     m_videoFullscreenGravity = gravity;
1200
1201     if (!m_videoLayer)
1202         return;
1203
1204     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1205     if (gravity == MediaPlayer::VideoGravityResize)
1206         videoGravity = AVLayerVideoGravityResize;
1207     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1208         videoGravity = AVLayerVideoGravityResizeAspect;
1209     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1210         videoGravity = AVLayerVideoGravityResizeAspectFill;
1211     else
1212         ASSERT_NOT_REACHED();
1213     
1214     if ([m_videoLayer videoGravity] == videoGravity)
1215         return;
1216
1217     [m_videoLayer setVideoGravity:videoGravity];
1218     syncTextTrackBounds();
1219 }
1220
1221 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1222 {
1223 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1224     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1225         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1226     updateDisableExternalPlayback();
1227 #else
1228     UNUSED_PARAM(mode);
1229 #endif
1230 }
1231     
1232 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1233 {
1234 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1235     updateDisableExternalPlayback();
1236 #endif
1237 }
1238
1239 #if PLATFORM(IOS_FAMILY)
1240 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1241 {
1242     if (m_currentMetaData)
1243         return m_currentMetaData.get();
1244     return nil;
1245 }
1246
1247 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1248 {
1249     if (!m_avPlayerItem)
1250         return emptyString();
1251     
1252     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1253     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1254
1255     return logString.get();
1256 }
1257
1258 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1259 {
1260     if (!m_avPlayerItem)
1261         return emptyString();
1262
1263     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1264     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1265
1266     return logString.get();
1267 }
1268 #endif
1269
1270 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1271 {
1272     m_requestedPlaying = false;
1273     MediaPlayerPrivateAVFoundation::didEnd();
1274 }
1275
1276 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1277 {
1278     [CATransaction begin];
1279     [CATransaction setDisableActions:YES];    
1280     if (m_videoLayer)
1281         [m_videoLayer.get() setHidden:!isVisible];
1282     [CATransaction commit];
1283 }
1284     
1285 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1286 {
1287     INFO_LOG(LOGIDENTIFIER);
1288     if (!metaDataAvailable())
1289         return;
1290
1291     m_requestedPlaying = true;
1292     setPlayerRate(m_requestedRate);
1293 }
1294
1295 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1296 {
1297     INFO_LOG(LOGIDENTIFIER);
1298     if (!metaDataAvailable())
1299         return;
1300
1301     m_requestedPlaying = false;
1302     setPlayerRate(0);
1303 }
1304
1305 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1306 {
1307     return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1308 }
1309
1310 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1311 {
1312     // Do not ask the asset for duration before it has been loaded or it will fetch the
1313     // answer synchronously.
1314     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1315         return MediaTime::invalidTime();
1316     
1317     CMTime cmDuration;
1318     
1319     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1320     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1321         cmDuration = [m_avPlayerItem.get() duration];
1322     else
1323         cmDuration = [m_avAsset.get() duration];
1324
1325     if (CMTIME_IS_NUMERIC(cmDuration))
1326         return PAL::toMediaTime(cmDuration);
1327
1328     if (CMTIME_IS_INDEFINITE(cmDuration))
1329         return MediaTime::positiveInfiniteTime();
1330
1331     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1332     return MediaTime::invalidTime();
1333 }
1334
1335 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1336 {
1337     if (!metaDataAvailable() || !m_avPlayerItem)
1338         return MediaTime::zeroTime();
1339
1340     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1341     if (CMTIME_IS_NUMERIC(itemTime))
1342         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1343
1344     return MediaTime::zeroTime();
1345 }
1346
1347 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1348 {
1349     // setCurrentTime generates several event callbacks, update afterwards.
1350     setDelayCallbacks(true);
1351
1352     if (m_metadataTrack)
1353         m_metadataTrack->flushPartialCues();
1354
1355     CMTime cmTime = PAL::toCMTime(time);
1356     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1357     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1358
1359     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1360     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1361         cmBefore = kCMTimeZero;
1362     
1363     auto weakThis = makeWeakPtr(*this);
1364
1365     setShouldObserveTimeControlStatus(false);
1366     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1367         callOnMainThread([weakThis, finished] {
1368             auto _this = weakThis.get();
1369             if (!_this)
1370                 return;
1371
1372             _this->setShouldObserveTimeControlStatus(true);
1373             _this->seekCompleted(finished);
1374         });
1375     }];
1376
1377     setDelayCallbacks(false);
1378 }
1379
1380 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1381 {
1382 #if PLATFORM(IOS_FAMILY)
1383     UNUSED_PARAM(volume);
1384     return;
1385 #else
1386
1387     if (!m_avPlayer)
1388         return;
1389
1390     [m_avPlayer.get() setVolume:volume];
1391 #endif
1392 }
1393
1394 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1395 {
1396     if (m_muted == muted)
1397         return;
1398
1399     INFO_LOG(LOGIDENTIFIER, muted);
1400
1401     m_muted = muted;
1402
1403     if (!m_avPlayer)
1404         return;
1405
1406     [m_avPlayer.get() setMuted:m_muted];
1407 }
1408
1409 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1410 {
1411     UNUSED_PARAM(closedCaptionsVisible);
1412
1413     if (!metaDataAvailable())
1414         return;
1415
1416     INFO_LOG(LOGIDENTIFIER, closedCaptionsVisible);
1417 }
1418
1419 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1420 {
1421     m_requestedRate = rate;
1422     if (m_requestedPlaying)
1423         setPlayerRate(rate);
1424 }
1425
1426 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1427 {
1428     setDelayCallbacks(true);
1429     m_cachedRate = rate;
1430     setShouldObserveTimeControlStatus(false);
1431     [m_avPlayer setRate:rate];
1432     m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1433     setShouldObserveTimeControlStatus(true);
1434     setDelayCallbacks(false);
1435 }
1436
1437 double MediaPlayerPrivateAVFoundationObjC::rate() const
1438 {
1439     if (!metaDataAvailable())
1440         return 0;
1441
1442     return m_cachedRate;
1443 }
1444
1445 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1446 {
1447 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1448     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1449 #else
1450     return 0;
1451 #endif
1452 }
1453
1454 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1455 {
1456 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1457     return [m_avPlayerItem liveUpdateInterval];
1458 #else
1459     return 0;
1460 #endif
1461 }
1462
1463 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1464 {
1465     if (m_avPlayerItem)
1466         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1467 }
1468
1469 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1470 {
1471     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1472
1473     if (!m_avPlayerItem)
1474         return timeRanges;
1475
1476     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1477         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1478         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1479             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1480     }
1481     return timeRanges;
1482 }
1483
1484 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1485 {
1486     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1487         return MediaTime::zeroTime();
1488
1489     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1490     bool hasValidRange = false;
1491     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1492         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1493         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1494             continue;
1495
1496         hasValidRange = true;
1497         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1498         if (minTimeSeekable > startOfRange)
1499             minTimeSeekable = startOfRange;
1500     }
1501     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1502 }
1503
1504 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1505 {
1506     if (!m_cachedSeekableRanges)
1507         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1508
1509     MediaTime maxTimeSeekable;
1510     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1511         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1512         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1513             continue;
1514         
1515         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1516         if (maxTimeSeekable < endOfRange)
1517             maxTimeSeekable = endOfRange;
1518     }
1519     return maxTimeSeekable;
1520 }
1521
1522 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1523 {
1524     if (!m_cachedLoadedRanges)
1525         return MediaTime::zeroTime();
1526
1527     MediaTime maxTimeLoaded;
1528     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1529         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1530         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1531             continue;
1532         
1533         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1534         if (maxTimeLoaded < endOfRange)
1535             maxTimeLoaded = endOfRange;
1536     }
1537
1538     return maxTimeLoaded;   
1539 }
1540
1541 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1542 {
1543     if (!metaDataAvailable())
1544         return 0;
1545
1546     if (m_cachedTotalBytes)
1547         return m_cachedTotalBytes;
1548
1549     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1550         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1551
1552     return m_cachedTotalBytes;
1553 }
1554
1555 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1556 {
1557     m_avAsset = WTFMove(asset);
1558 }
1559
1560 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1561 {
1562     if (!m_avAsset)
1563         return MediaPlayerAVAssetStatusDoesNotExist;
1564
1565     for (NSString *keyName in assetMetadataKeyNames()) {
1566         NSError *error = nil;
1567         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1568
1569         if (error)
1570             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1571
1572         if (keyStatus < AVKeyValueStatusLoaded)
1573             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1574         
1575         if (keyStatus == AVKeyValueStatusFailed)
1576             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1577
1578         if (keyStatus == AVKeyValueStatusCancelled)
1579             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1580     }
1581
1582     if (!player()->shouldCheckHardwareSupport())
1583         m_tracksArePlayable = true;
1584
1585     if (!m_tracksArePlayable) {
1586         m_tracksArePlayable = true;
1587         for (AVAssetTrack *track in [m_avAsset tracks]) {
1588             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1589                 m_tracksArePlayable = false;
1590                 break;
1591             }
1592         }
1593     }
1594
1595     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1596         return MediaPlayerAVAssetStatusPlayable;
1597
1598     return MediaPlayerAVAssetStatusLoaded;
1599 }
1600
1601 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1602 {
1603     if (!m_avAsset)
1604         return 0;
1605
1606     NSError *error = nil;
1607     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1608     return [error code];
1609 }
1610
1611 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1612 {
1613     if (!metaDataAvailable() || context.paintingDisabled())
1614         return;
1615
1616     setDelayCallbacks(true);
1617     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1618
1619 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1620     if (videoOutputHasAvailableFrame())
1621         paintWithVideoOutput(context, rect);
1622     else
1623 #endif
1624         paintWithImageGenerator(context, rect);
1625
1626     END_BLOCK_OBJC_EXCEPTIONS;
1627     setDelayCallbacks(false);
1628
1629     m_videoFrameHasDrawn = true;
1630 }
1631
1632 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1633 {
1634     if (!metaDataAvailable() || context.paintingDisabled())
1635         return;
1636
1637     // We can ignore the request if we are already rendering to a layer.
1638     if (currentRenderingMode() == MediaRenderingToLayer)
1639         return;
1640
1641     // paint() is best effort, so only paint if we already have an image generator or video output available.
1642     if (!hasContextRenderer())
1643         return;
1644
1645     paintCurrentFrameInContext(context, rect);
1646 }
1647
1648 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1649 {
1650     INFO_LOG(LOGIDENTIFIER);
1651
1652     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1653     if (image) {
1654         GraphicsContextStateSaver stateSaver(context);
1655         context.translate(rect.x(), rect.y() + rect.height());
1656         context.scale(FloatSize(1.0f, -1.0f));
1657         context.setImageInterpolationQuality(InterpolationLow);
1658         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1659         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1660     }
1661 }
1662
1663 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1664 {
1665     if (!m_imageGenerator)
1666         createImageGenerator();
1667     ASSERT(m_imageGenerator);
1668
1669 #if !RELEASE_LOG_DISABLED
1670     MonotonicTime start = MonotonicTime::now();
1671 #endif
1672
1673     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1674     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1675     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1676
1677 #if !RELEASE_LOG_DISABLED
1678     INFO_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1679 #endif
1680
1681     return image;
1682 }
1683
1684 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1685 {
1686     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1687
1688
1689 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1690 static bool keySystemIsSupported(const String& keySystem)
1691 {
1692     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1693         return true;
1694     return false;
1695 }
1696 #endif
1697
1698 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1699 {
1700 #if ENABLE(MEDIA_SOURCE)
1701     if (parameters.isMediaSource)
1702         return MediaPlayer::IsNotSupported;
1703 #endif
1704 #if ENABLE(MEDIA_STREAM)
1705     if (parameters.isMediaStream)
1706         return MediaPlayer::IsNotSupported;
1707 #endif
1708
1709     auto containerType = parameters.type.containerType();
1710     if (isUnsupportedMIMEType(containerType))
1711         return MediaPlayer::IsNotSupported;
1712
1713     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(containerType))
1714         return MediaPlayer::IsNotSupported;
1715
1716     // The spec says:
1717     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1718     if (parameters.type.codecs().isEmpty())
1719         return MediaPlayer::MayBeSupported;
1720
1721     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1722         return MediaPlayer::IsNotSupported;
1723
1724     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1725     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1726 }
1727
1728 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1729 {
1730 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1731     if (!keySystem.isEmpty()) {
1732         // "Clear Key" is only supported with HLS:
1733         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1734             return MediaPlayer::IsNotSupported;
1735
1736         if (!keySystemIsSupported(keySystem))
1737             return false;
1738
1739         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1740             return false;
1741
1742         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType))
1743             return false;
1744
1745         return true;
1746     }
1747 #else
1748     UNUSED_PARAM(keySystem);
1749     UNUSED_PARAM(mimeType);
1750 #endif
1751     return false;
1752 }
1753
1754 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1755 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1756 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1757 {
1758     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1759         [infoRequest setContentLength:keyData->byteLength()];
1760         [infoRequest setByteRangeAccessSupported:YES];
1761     }
1762
1763     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1764         long long start = [dataRequest currentOffset];
1765         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1766
1767         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1768             [request finishLoadingWithError:nil];
1769             return;
1770         }
1771
1772         ASSERT(start <= std::numeric_limits<int>::max());
1773         ASSERT(end <= std::numeric_limits<int>::max());
1774         auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1775         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1776         [dataRequest respondWithData:nsData.get()];
1777     }
1778
1779     [request finishLoading];
1780 }
1781 #endif
1782
1783 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1784 {
1785     String scheme = [[[avRequest request] URL] scheme];
1786     String keyURI = [[[avRequest request] URL] absoluteString];
1787
1788 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1789     if (scheme == "skd") {
1790 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1791         // Create an initData with the following layout:
1792         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1793         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1794         auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1795         unsigned byteLength = initDataBuffer->byteLength();
1796         auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1797         initDataView->set<uint32_t>(0, keyURISize, true);
1798
1799         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1800         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1801
1802         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1803         if (!player()->keyNeeded(initData.ptr()))
1804             return false;
1805 #endif
1806
1807 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1808         if (m_cdmInstance) {
1809             avRequest.contentInformationRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
1810             [avRequest finishLoading];
1811             return true;
1812         }
1813
1814         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1815         m_keyID = SharedBuffer::create(keyURIData.get());
1816         player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1817         setWaitingForKey(true);
1818 #endif
1819         m_keyURIToRequestMap.set(keyURI, avRequest);
1820
1821         return true;
1822     }
1823
1824 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1825     if (scheme == "clearkey") {
1826         String keyID = [[[avRequest request] URL] resourceSpecifier];
1827         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1828
1829         auto initData = Uint8Array::create(encodedKeyId.size());
1830         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1831
1832         auto keyData = player()->cachedKeyForKeyId(keyID);
1833         if (keyData) {
1834             fulfillRequestWithKeyData(avRequest, keyData.get());
1835             return false;
1836         }
1837
1838         if (!player()->keyNeeded(initData.ptr()))
1839             return false;
1840
1841         m_keyURIToRequestMap.set(keyID, avRequest);
1842         return true;
1843     }
1844 #endif
1845 #endif
1846
1847     auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1848     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1849     resourceLoader->startLoading();
1850     return true;
1851 }
1852
1853 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1854 {
1855     String scheme = [[[avRequest request] URL] scheme];
1856
1857     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1858
1859     if (resourceLoader)
1860         resourceLoader->stopLoading();
1861 }
1862
1863 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1864 {
1865     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1866 }
1867 #endif
1868
1869 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1870 {
1871     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1872 }
1873
1874 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1875 {
1876     if (!metaDataAvailable())
1877         return timeValue;
1878
1879     // FIXME - impossible to implement until rdar://8721510 is fixed.
1880     return timeValue;
1881 }
1882
1883 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1884 {
1885     return 0;
1886 }
1887
1888 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1889 {
1890     if (!m_videoLayer)
1891         return;
1892
1893     // Do not attempt to change the video gravity while in full screen mode.
1894     // See setVideoFullscreenGravity().
1895     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1896         return;
1897
1898     [CATransaction begin];
1899     [CATransaction setDisableActions:YES];    
1900     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1901     [m_videoLayer.get() setVideoGravity:gravity];
1902     [CATransaction commit];
1903 }
1904
1905 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1906 {
1907     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1908         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1909     }];
1910     if (index == NSNotFound)
1911         return nil;
1912     return [tracks objectAtIndex:index];
1913 }
1914
1915 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1916 {
1917     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1918     m_languageOfPrimaryAudioTrack = String();
1919
1920     if (!m_avAsset)
1921         return;
1922
1923     setDelayCharacteristicsChangedNotification(true);
1924
1925     bool haveCCTrack = false;
1926     bool hasCaptions = false;
1927
1928     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1929     // asked about those fairly fequently.
1930     if (!m_avPlayerItem) {
1931         // We don't have a player item yet, so check with the asset because some assets support inspection
1932         // prior to becoming ready to play.
1933         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1934         setHasVideo(firstEnabledVideoTrack);
1935         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1936 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1937         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1938 #endif
1939         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1940         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1941         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1942         if (size.width() < 0)
1943             size.setWidth(-size.width());
1944         if (size.height() < 0)
1945             size.setHeight(-size.height());
1946         presentationSizeDidChange(size);
1947     } else {
1948         bool hasVideo = false;
1949         bool hasAudio = false;
1950         bool hasMetaData = false;
1951         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1952             if ([track isEnabled]) {
1953                 AVAssetTrack *assetTrack = [track assetTrack];
1954                 NSString *mediaType = [assetTrack mediaType];
1955                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1956                     hasVideo = true;
1957                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1958                     hasAudio = true;
1959                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1960 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1961                     hasCaptions = true;
1962 #endif
1963                     haveCCTrack = true;
1964                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1965                     hasMetaData = true;
1966                 }
1967             }
1968         }
1969
1970 #if ENABLE(VIDEO_TRACK)
1971         updateAudioTracks();
1972         updateVideoTracks();
1973
1974 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1975         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1976         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1977 #endif
1978 #endif
1979
1980         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1981         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1982         // when it is not.
1983         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1984
1985         setHasAudio(hasAudio);
1986 #if ENABLE(DATACUE_VALUE)
1987         if (hasMetaData)
1988             processMetadataTrack();
1989 #endif
1990     }
1991
1992 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1993     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1994     if (legibleGroup && m_cachedTracks) {
1995         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1996         if (hasCaptions)
1997             processMediaSelectionOptions();
1998     }
1999 #endif
2000
2001 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2002     if (!hasCaptions && haveCCTrack)
2003         processLegacyClosedCaptionsTracks();
2004 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2005     if (haveCCTrack)
2006         processLegacyClosedCaptionsTracks();
2007 #endif
2008
2009     setHasClosedCaptions(hasCaptions);
2010
2011     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
2012
2013     sizeChanged();
2014
2015     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2016         characteristicsChanged();
2017
2018 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2019     if (m_provider)
2020         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2021 #endif
2022
2023     setDelayCharacteristicsChangedNotification(false);
2024 }
2025
2026 #if ENABLE(VIDEO_TRACK)
2027
2028 template <typename RefT, typename PassRefT>
2029 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2030 {
2031     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2032         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2033     }]]]);
2034     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2035
2036     for (auto& oldItem : oldItems) {
2037         if (oldItem->playerItemTrack())
2038             [oldTracks addObject:oldItem->playerItemTrack()];
2039     }
2040
2041     // Find the added & removed AVPlayerItemTracks:
2042     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2043     [removedTracks minusSet:newTracks.get()];
2044
2045     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2046     [addedTracks minusSet:oldTracks.get()];
2047
2048     typedef Vector<RefT> ItemVector;
2049     ItemVector replacementItems;
2050     ItemVector addedItems;
2051     ItemVector removedItems;
2052     for (auto& oldItem : oldItems) {
2053         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2054             removedItems.append(oldItem);
2055         else
2056             replacementItems.append(oldItem);
2057     }
2058
2059     for (AVPlayerItemTrack* track in addedTracks.get())
2060         addedItems.append(itemFactory(track));
2061
2062     replacementItems.appendVector(addedItems);
2063     oldItems.swap(replacementItems);
2064
2065     for (auto& removedItem : removedItems)
2066         (player->*removedFunction)(*removedItem);
2067
2068     for (auto& addedItem : addedItems)
2069         (player->*addedFunction)(*addedItem);
2070 }
2071
2072 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2073
2074 template <typename RefT, typename PassRefT>
2075 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2076 {
2077     group->updateOptions(characteristics);
2078
2079     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2080     for (auto& option : group->options()) {
2081         if (!option)
2082             continue;
2083         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2084         if (!avOption)
2085             continue;
2086         newSelectionOptions.add(option);
2087     }
2088
2089     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2090     for (auto& oldItem : oldItems) {
2091         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2092             oldSelectionOptions.add(option);
2093     }
2094
2095     // Find the added & removed AVMediaSelectionOptions:
2096     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2097     for (auto& oldOption : oldSelectionOptions) {
2098         if (!newSelectionOptions.contains(oldOption))
2099             removedSelectionOptions.add(oldOption);
2100     }
2101
2102     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2103     for (auto& newOption : newSelectionOptions) {
2104         if (!oldSelectionOptions.contains(newOption))
2105             addedSelectionOptions.add(newOption);
2106     }
2107
2108     typedef Vector<RefT> ItemVector;
2109     ItemVector replacementItems;
2110     ItemVector addedItems;
2111     ItemVector removedItems;
2112     for (auto& oldItem : oldItems) {
2113         if (!oldItem->mediaSelectionOption())
2114             removedItems.append(oldItem);
2115         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2116             removedItems.append(oldItem);
2117         else
2118             replacementItems.append(oldItem);
2119     }
2120
2121     for (auto& option : addedSelectionOptions)
2122         addedItems.append(itemFactory(*option.get()));
2123
2124     replacementItems.appendVector(addedItems);
2125     oldItems.swap(replacementItems);
2126     
2127     for (auto& removedItem : removedItems)
2128         (player->*removedFunction)(*removedItem);
2129
2130     for (auto& addedItem : addedItems)
2131         (player->*addedFunction)(*addedItem);
2132 }
2133
2134 #endif
2135
2136 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2137 {
2138 #if !RELEASE_LOG_DISABLED
2139     size_t count = m_audioTracks.size();
2140 #endif
2141
2142 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2143     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2144     if (!m_audibleGroup) {
2145         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2146             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2147     }
2148
2149     if (m_audibleGroup)
2150         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2151     else
2152 #endif
2153         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2154
2155     for (auto& track : m_audioTracks)
2156         track->resetPropertiesFromTrack();
2157
2158 #if !RELEASE_LOG_DISABLED
2159     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2160 #endif
2161 }
2162
2163 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2164 {
2165 #if !RELEASE_LOG_DISABLED
2166     size_t count = m_videoTracks.size();
2167 #endif
2168
2169     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2170
2171 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2172     if (!m_visualGroup) {
2173         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2174             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2175     }
2176
2177     if (m_visualGroup)
2178         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2179 #endif
2180
2181     for (auto& track : m_audioTracks)
2182         track->resetPropertiesFromTrack();
2183
2184 #if !RELEASE_LOG_DISABLED
2185     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2186 #endif
2187 }
2188
2189 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2190 {
2191     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2192 }
2193
2194 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2195 {
2196     m_videoFullscreenLayerManager->syncTextTrackBounds();
2197 }
2198
2199 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2200 {
2201     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2202 }
2203
2204 #endif // ENABLE(VIDEO_TRACK)
2205
2206 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2207
2208 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2209 {
2210     if (!m_provider) {
2211         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2212         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2213     }
2214     return m_provider.get();
2215 }
2216
2217 #endif
2218
2219 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2220 {
2221     if (!m_avAsset)
2222         return;
2223
2224     setNaturalSize(m_cachedPresentationSize);
2225 }
2226
2227 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2228 {
2229     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2230 }
2231
2232 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2233 {
2234     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2235     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2236         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2237         return false;
2238
2239     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2240     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2241         return session.didPassCORSAccessChecks;
2242
2243     return false;
2244 }
2245
2246 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2247 {
2248     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2249     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2250         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2251         return false;
2252
2253     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2254     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2255         return [session wouldTaintOrigin:origin];
2256
2257     return WTF::nullopt;
2258 }
2259
2260
2261 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2262
2263 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2264 {
2265     INFO_LOG(LOGIDENTIFIER);
2266
2267     if (!m_avPlayerItem || m_videoOutput)
2268         return;
2269
2270 #if USE(VIDEOTOOLBOX)
2271     NSDictionary* attributes = nil;
2272 #else
2273     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2274 #endif
2275     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2276     ASSERT(m_videoOutput);
2277
2278     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2279
2280     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2281 }
2282
2283 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2284 {
2285     if (!m_videoOutput)
2286         return;
2287
2288     if (m_avPlayerItem)
2289         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2290
2291     INFO_LOG(LOGIDENTIFIER);
2292
2293     m_videoOutput = 0;
2294 }
2295
2296 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2297 {
2298     if (!m_avPlayerItem)
2299         return false;
2300
2301     if (!m_videoOutput)
2302         createVideoOutput();
2303     ASSERT(m_videoOutput);
2304
2305     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2306
2307     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2308         return false;
2309
2310     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2311     m_lastImage = nullptr;
2312     return true;
2313 }
2314
2315 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2316 {
2317     if (!m_avPlayerItem)
2318         return false;
2319
2320     if (m_lastImage)
2321         return true;
2322
2323     if (!m_videoOutput)
2324         createVideoOutput();
2325
2326     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2327 }
2328
2329 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2330 {
2331 #if HAVE(CORE_VIDEO)
2332     if (!m_avPlayerItem)
2333         return;
2334
2335     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2336         waitForVideoOutputMediaDataWillChange();
2337
2338     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2339     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2340     // should be displayed.
2341     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2342         return;
2343
2344     if (!m_pixelBufferConformer) {
2345 #if USE(VIDEOTOOLBOX)
2346         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2347 #else
2348         NSDictionary *attributes = nil;
2349 #endif
2350         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2351     }
2352
2353 #if !RELEASE_LOG_DISABLED
2354     MonotonicTime start = MonotonicTime::now();
2355 #endif
2356
2357     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2358
2359 #if !RELEASE_LOG_DISABLED
2360     INFO_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2361 #endif
2362 #endif // HAVE(CORE_VIDEO)
2363 }
2364
2365 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2366 {
2367     updateLastImage(UpdateType::UpdateSynchronously);
2368     if (!m_lastImage)
2369         return;
2370
2371     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2372     if (!firstEnabledVideoTrack)
2373         return;
2374
2375     INFO_LOG(LOGIDENTIFIER);
2376
2377     GraphicsContextStateSaver stateSaver(context);
2378     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2379     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2380     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2381
2382     context.concatCTM(videoTransform);
2383     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2384
2385     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2386     // video frame, destroy it now that it is no longer needed.
2387     if (m_imageGenerator)
2388         destroyImageGenerator();
2389
2390 }
2391
2392 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2393 {
2394     ASSERT(context);
2395
2396     updateLastPixelBuffer();
2397     if (!m_lastPixelBuffer)
2398         return false;
2399
2400     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2401     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2402
2403     if (!m_videoTextureCopier)
2404         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2405
2406     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2407 }
2408
2409 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2410 {
2411     updateLastImage();
2412     return m_lastImage;
2413 }
2414
2415 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2416 {
2417     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2418
2419     // Wait for 1 second.
2420     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2421     if (!satisfied)
2422         ERROR_LOG(LOGIDENTIFIER, "timed out");
2423 }
2424
2425 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2426 {
2427     m_videoOutputSemaphore.signal();
2428 }
2429
2430 #endif
2431
2432 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2433
2434 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2435 {
2436     return m_keyURIToRequestMap.take(keyURI);
2437 }
2438
2439 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2440 {
2441     Vector<String> fulfilledKeyIds;
2442
2443     for (auto& pair : m_keyURIToRequestMap) {
2444         const String& keyId = pair.key;
2445         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2446
2447         auto keyData = player()->cachedKeyForKeyId(keyId);
2448         if (!keyData)
2449             continue;
2450
2451         fulfillRequestWithKeyData(request.get(), keyData.get());
2452         fulfilledKeyIds.append(keyId);
2453     }
2454
2455     for (auto& keyId : fulfilledKeyIds)
2456         m_keyURIToRequestMap.remove(keyId);
2457 }
2458
2459 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2460 {
2461     ASSERT_UNUSED(session, &session == m_session);
2462     m_session = nullptr;
2463 }
2464
2465 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2466 {
2467     if (!keySystemIsSupported(keySystem))
2468         return nullptr;
2469     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2470     m_session = makeWeakPtr(*session);
2471     return WTFMove(session);
2472 }
2473 #endif
2474
2475 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2476 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2477 {
2478 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2479     if (m_session && newValue)
2480         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2481 #endif
2482
2483 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2484     if (m_cdmInstance)
2485         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2486 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2487     UNUSED_PARAM(newValue);
2488 #endif
2489 }
2490 #endif
2491
2492 #if ENABLE(ENCRYPTED_MEDIA)
2493 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2494 {
2495 #if HAVE(AVCONTENTKEYSESSION)
2496     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2497         return;
2498
2499     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2500     if (&fpsInstance == m_cdmInstance)
2501         return;
2502
2503     if (m_cdmInstance)
2504         cdmInstanceDetached(*m_cdmInstance);
2505
2506     m_cdmInstance = &fpsInstance;
2507 #else
2508     UNUSED_PARAM(instance);
2509 #endif
2510 }
2511
2512 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2513 {
2514 #if HAVE(AVCONTENTKEYSESSION)
2515     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2516     m_cdmInstance = nullptr;
2517 #else
2518     UNUSED_PARAM(instance);
2519 #endif
2520 }
2521
2522 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2523 {
2524 #if HAVE(AVCONTENTKEYSESSION)
2525     if (!m_keyID || !m_cdmInstance)
2526         return;
2527
2528     auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2529     if (!instanceSession)
2530         return;
2531
2532     [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2533
2534     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2535     for (auto& request : keyURIToRequestMap.values()) {
2536         if (auto *infoRequest = request.get().contentInformationRequest)
2537             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2538         [request finishLoading];
2539     }
2540     setWaitingForKey(false);
2541 #endif
2542 }
2543
2544 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2545 {
2546     if (m_waitingForKey == waitingForKey)
2547         return;
2548
2549     m_waitingForKey = waitingForKey;
2550     player()->waitingForKeyChanged();
2551 }
2552 #endif
2553
2554 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2555
2556 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2557 {
2558 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2559     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2560 #endif
2561
2562     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2563     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2564
2565         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2566         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2567             continue;
2568
2569         bool newCCTrack = true;
2570         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2571             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2572                 continue;
2573
2574             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2575             if (track->avPlayerItemTrack() == playerItemTrack) {
2576                 removedTextTracks.remove(i - 1);
2577                 newCCTrack = false;
2578                 break;
2579             }
2580         }
2581
2582         if (!newCCTrack)
2583             continue;
2584         
2585         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2586     }
2587
2588     processNewAndRemovedTextTracks(removedTextTracks);
2589 }
2590
2591 #endif
2592
2593 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2594 {
2595     if (!m_avAsset)
2596         return nil;
2597
2598     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2599         return nil;
2600
2601     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2602 }
2603
2604 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2605
2606 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2607 {
2608     if (!m_avAsset)
2609         return false;
2610
2611     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2612         return false;
2613
2614     return true;
2615 }
2616
2617 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2618 {
2619     if (!hasLoadedMediaSelectionGroups())
2620         return nil;
2621
2622     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2623 }
2624
2625 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2626 {
2627     if (!hasLoadedMediaSelectionGroups())
2628         return nil;
2629
2630     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2631 }
2632
2633 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2634 {
2635     if (!hasLoadedMediaSelectionGroups())
2636         return nil;
2637
2638     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2639 }
2640
2641 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2642 {
2643     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2644     if (!legibleGroup) {
2645         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2646         return;
2647     }
2648
2649     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2650     // but set the selected legible track to nil so text tracks will not be automatically configured.
2651     if (!m_textTracks.size())
2652         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2653
2654     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2655     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2656     for (AVMediaSelectionOptionType *option in legibleOptions) {
2657         bool newTrack = true;
2658         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2659             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2660                 continue;
2661             
2662             RetainPtr<AVMediaSelectionOptionType> currentOption;
2663 #if ENABLE(AVF_CAPTIONS)
2664             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2665                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2666                 currentOption = track->mediaSelectionOption();
2667             } else
2668 #endif
2669             {
2670                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2671                 currentOption = track->mediaSelectionOption();
2672             }
2673             
2674             if ([currentOption.get() isEqual:option]) {
2675                 removedTextTracks.remove(i - 1);
2676                 newTrack = false;
2677                 break;
2678             }
2679         }
2680         if (!newTrack)
2681             continue;
2682
2683 #if ENABLE(AVF_CAPTIONS)
2684         if ([option outOfBandSource]) {
2685             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2686             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2687             continue;
2688         }
2689 #endif
2690
2691         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2692     }
2693
2694     processNewAndRemovedTextTracks(removedTextTracks);
2695 }
2696
2697 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2698 {
2699     if (m_metadataTrack)
2700         return;
2701
2702     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2703     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2704     player()->addTextTrack(*m_metadataTrack);
2705 }
2706
2707 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2708 {
2709     ASSERT(time >= MediaTime::zeroTime());
2710
2711     if (!m_currentTextTrack)
2712         return;
2713
2714     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2715 }
2716
2717 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2718 {
2719     INFO_LOG(LOGIDENTIFIER);
2720
2721     if (!m_currentTextTrack)
2722         return;
2723     
2724     m_currentTextTrack->resetCueValues();
2725 }
2726
2727 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2728
2729 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2730 {
2731     if (m_currentTextTrack == track)
2732         return;
2733
2734     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2735
2736     m_currentTextTrack = track;
2737
2738     if (track) {
2739         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2740             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2741             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2742             ALLOW_DEPRECATED_DECLARATIONS_END
2743 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2744 #if ENABLE(AVF_CAPTIONS)
2745         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2746             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2747 #endif
2748         else
2749             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2750 #endif
2751     } else {
2752 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2753         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2754 #endif
2755         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2756         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2757         ALLOW_DEPRECATED_DECLARATIONS_END
2758     }
2759
2760 }
2761
2762 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2763 {
2764     if (!m_languageOfPrimaryAudioTrack.isNull())
2765         return m_languageOfPrimaryAudioTrack;
2766
2767     if (!m_avPlayerItem.get())
2768         return emptyString();
2769
2770 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2771     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2772     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2773     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2774     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2775     ALLOW_DEPRECATED_DECLARATIONS_END
2776     if (currentlySelectedAudibleOption) {
2777         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2778         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2779
2780         return m_languageOfPrimaryAudioTrack;
2781     }
2782 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2783
2784     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2785     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2786     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2787     if (!tracks || [tracks count] != 1) {
2788         m_languageOfPrimaryAudioTrack = emptyString();
2789         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2790         return m_languageOfPrimaryAudioTrack;
2791     }
2792
2793     AVAssetTrack *track = [tracks objectAtIndex:0];
2794     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2795
2796     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2797
2798     return m_languageOfPrimaryAudioTrack;
2799 }
2800
2801 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2802 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2803 {
2804     bool wirelessTarget = false;
2805
2806 #if !PLATFORM(IOS_FAMILY)
2807     if (m_playbackTarget) {
2808         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2809             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2810         else
2811             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2812     }
2813 #else
2814     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2815 #endif
2816
2817     INFO_LOG(LOGIDENTIFIER, wirelessTarget);
2818
2819     return wirelessTarget;
2820 }
2821
2822 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2823 {
2824     if (!m_avPlayer)
2825         return MediaPlayer::TargetTypeNone;
2826
2827 #if PLATFORM(IOS_FAMILY)
2828     if (!AVFoundationLibrary())
2829         return MediaPlayer::TargetTypeNone;
2830
2831     switch ([m_avPlayer externalPlaybackType]) {
2832     case AVPlayerExternalPlaybackTypeNone:
2833         return MediaPlayer::TargetTypeNone;
2834     case AVPlayerExternalPlaybackTypeAirPlay:
2835         return MediaPlayer::TargetTypeAirPlay;
2836     case AVPlayerExternalPlaybackTypeTVOut:
2837         return MediaPlayer::TargetTypeTVOut;
2838     }
2839
2840     ASSERT_NOT_REACHED();
2841     return MediaPlayer::TargetTypeNone;
2842
2843 #else
2844     return MediaPlayer::TargetTypeAirPlay;
2845 #endif
2846 }
2847     
2848 #if PLATFORM(IOS_FAMILY)
2849 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2850 {
2851 #if HAVE(CELESTIAL)
2852     if (!AVFoundationLibrary())
2853         return nil;
2854
2855     if ([getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2856         AVOutputContext *outputContext = [getAVOutputContextClass() sharedAudioPresentationOutputContext];
2857
2858         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2859             || ![outputContext supportsMultipleOutputDevices]
2860             || ![outputContext respondsToSelector:@selector(outputDevices)])
2861             return [outputContext deviceName];
2862
2863         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2864         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2865 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2866             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2867 ALLOW_DEPRECATED_DECLARATIONS_END
2868             [outputDeviceNames addObject:outputDeviceName.get()];
2869         }
2870
2871         return [outputDeviceNames componentsJoinedByString:@" + "];
2872     }
2873
2874     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2875         return nil;
2876
2877     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2878     if (!pickableRoutes.count)
2879         return nil;
2880
2881     NSString *displayName = nil;
2882     for (NSDictionary *pickableRoute in pickableRoutes) {
2883         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2884             continue;
2885
2886         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2887
2888         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2889         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2890             break;
2891
2892         // The route is a speaker or HDMI out, override the name to be the localized device model.
2893         NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2894
2895         // In cases where a route with that name already exists, prefix the name with the model.
2896         BOOL includeLocalizedDeviceModelName = NO;
2897         for (NSDictionary *otherRoute in pickableRoutes) {
2898             if (otherRoute == pickableRoute)
2899                 continue;
2900
2901             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2902                 includeLocalizedDeviceModelName = YES;
2903                 break;
2904             }
2905         }
2906
2907         if (includeLocalizedDeviceModelName)
2908             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2909         else
2910             displayName = localizedDeviceModel;
2911
2912         break;
2913     }
2914
2915     return displayName;
2916 #else
2917     UNUSED_PARAM(player);
2918     return nil;
2919 #endif
2920 }
2921 #endif
2922
2923 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2924 {
2925     if (!m_avPlayer)
2926         return emptyString();
2927
2928     String wirelessTargetName;
2929 #if !PLATFORM(IOS_FAMILY)
2930     if (m_playbackTarget)
2931         wirelessTargetName = m_playbackTarget->deviceName();
2932 #else
2933     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2934 #endif
2935
2936     return wirelessTargetName;
2937 }
2938
2939 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2940 {
2941     if (!m_avPlayer)
2942         return !m_allowsWirelessVideoPlayback;
2943
2944     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2945     INFO_LOG(LOGIDENTIFIER, !m_allowsWirelessVideoPlayback);
2946
2947     return !m_allowsWirelessVideoPlayback;
2948 }
2949
2950 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2951 {
2952     INFO_LOG(LOGIDENTIFIER, disabled);
2953     m_allowsWirelessVideoPlayback = !disabled;
2954     if (!m_avPlayer)
2955         return;
2956
2957     setDelayCallbacks(true);
2958     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2959     setDelayCallbacks(false);
2960 }
2961
2962 #if !PLATFORM(IOS_FAMILY)
2963
2964 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2965 {
2966     m_playbackTarget = WTFMove(target);
2967
2968     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2969
2970     INFO_LOG(LOGIDENTIFIER);
2971
2972     if (!m_playbackTarget->hasActiveRoute())
2973         setShouldPlayToPlaybackTarget(false);
2974 }
2975
2976 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2977 {
2978     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2979         return;
2980
2981     m_shouldPlayToPlaybackTarget = shouldPlay;
2982
2983     if (!m_playbackTarget)
2984         return;
2985
2986     INFO_LOG(LOGIDENTIFIER, shouldPlay);
2987
2988     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2989         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2990
2991         if (!m_avPlayer)
2992             return;
2993
2994         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2995         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2996             return;
2997
2998         setDelayCallbacks(true);
2999         m_avPlayer.get().outputContext = newContext;
3000         setDelayCallbacks(false);
3001
3002         return;
3003     }
3004
3005     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
3006
3007     setDelayCallbacks(true);
3008     auto weakThis = makeWeakPtr(*this);
3009     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
3010         if (!weakThis)
3011             return;
3012         weakThis->playbackTargetIsWirelessDidChange();
3013     }));
3014     setDelayCallbacks(false);
3015 }
3016
3017 #endif // !PLATFORM(IOS_FAMILY)
3018
3019 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
3020 {
3021 #if PLATFORM(IOS_FAMILY)
3022     if (!m_avPlayer)
3023         return;
3024
3025     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
3026         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
3027 #endif
3028 }
3029
3030 #endif
3031
3032 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3033 {
3034     m_cachedItemStatus = status;
3035
3036     updateStates();
3037 }
3038
3039 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3040 {
3041     m_pendingStatusChanges++;
3042 }
3043
3044 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3045 {
3046     m_cachedLikelyToKeepUp = likelyToKeepUp;
3047
3048     ASSERT(m_pendingStatusChanges);
3049     if (!--m_pendingStatusChanges)
3050         updateStates();
3051 }
3052
3053 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3054 {
3055     m_pendingStatusChanges++;
3056 }
3057
3058 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3059 {
3060     m_cachedBufferEmpty = bufferEmpty;
3061