758c5c48f2d3ba48a6ae7eb39b4c363f6e2b7874
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "VideoFullscreenLayerManagerObjC.h"
63 #import "VideoTextureCopierCV.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import <JavaScriptCore/DataView.h>
69 #import <JavaScriptCore/JSCInlines.h>
70 #import <JavaScriptCore/TypedArrayInlines.h>
71 #import <JavaScriptCore/Uint16Array.h>
72 #import <JavaScriptCore/Uint32Array.h>
73 #import <JavaScriptCore/Uint8Array.h>
74 #import <functional>
75 #import <objc/runtime.h>
76 #import <pal/avfoundation/MediaTimeAVFoundation.h>
77 #import <pal/spi/cocoa/QuartzCoreSPI.h>
78 #import <pal/spi/mac/AVFoundationSPI.h>
79 #import <wtf/BlockObjCExceptions.h>
80 #import <wtf/ListHashSet.h>
81 #import <wtf/NeverDestroyed.h>
82 #import <wtf/OSObjectPtr.h>
83 #import <wtf/URL.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS_FAMILY)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #import <pal/ios/UIKitSoftLink.h>
107 #else
108 #import <Foundation/NSGeometry.h>
109 #import <QuartzCore/CoreImage.h>
110 #endif
111
112 #if USE(VIDEOTOOLBOX)
113 #import <CoreVideo/CoreVideo.h>
114 #import <VideoToolbox/VideoToolbox.h>
115 #endif
116
117 #import "CoreVideoSoftLink.h"
118 #import "MediaRemoteSoftLink.h"
119
120 namespace std {
121 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
122     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
123 };
124 }
125
126 #if ENABLE(AVF_CAPTIONS)
127 // Note: This must be defined before our SOFT_LINK macros:
128 @class AVMediaSelectionOption;
129 @interface AVMediaSelectionOption (OutOfBandExtensions)
130 @property (nonatomic, readonly) NSString* outOfBandSource;
131 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
132 @end
133 #endif
134
135 @interface AVURLAsset (WebKitExtensions)
136 @property (nonatomic, readonly) NSURL *resolvedURL;
137 @end
138
139 typedef AVPlayer AVPlayerType;
140 typedef AVPlayerItem AVPlayerItemType;
141 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
142 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
143 typedef AVMetadataItem AVMetadataItemType;
144 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
145 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
146 typedef AVAssetCache AVAssetCacheType;
147
148 #pragma mark - Soft Linking
149
150 // Soft-linking headers must be included last since they #define functions, constants, etc.
151 #import <pal/cf/CoreMediaSoftLink.h>
152
153 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
154
155 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
156
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
165
166 SOFT_LINK_CLASS(CoreImage, CIContext)
167 SOFT_LINK_CLASS(CoreImage, CIImage)
168
169 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString *)
170 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString *)
171 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString *)
172 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString *)
173 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeClosedCaption, NSString *)
174 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
175 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeAudio, NSString *)
176 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeMetadata, NSString *)
177 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
178 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
179 SOFT_LINK_CONSTANT(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
180 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
181 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
182 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
183 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
184 SOFT_LINK_CONSTANT(AVFoundation, AVStreamingKeyDeliveryContentKeyType, NSString *)
185
186 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
187 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
188
189 #define AVPlayer initAVPlayer()
190 #define AVPlayerItem initAVPlayerItem()
191 #define AVPlayerLayer initAVPlayerLayer()
192 #define AVURLAsset initAVURLAsset()
193 #define AVAssetImageGenerator initAVAssetImageGenerator()
194 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
195 #define AVMetadataItem initAVMetadataItem()
196 #define AVAssetCache initAVAssetCache()
197
198 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
199 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
200 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
201 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
202 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
203 #define AVMediaTypeVideo getAVMediaTypeVideo()
204 #define AVMediaTypeAudio getAVMediaTypeAudio()
205 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
206 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
207 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
208 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
209 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
210 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
211 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
212 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
213 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
214 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
215 #define AVStreamingKeyDeliveryContentKeyType getAVStreamingKeyDeliveryContentKeyType()
216
217 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
218
219 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
220 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
221
222 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
224 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
225 SOFT_LINK_CLASS(AVFoundation, AVOutputContext)
226
227 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString *)
228 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeSubtitle, NSString *)
229 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
230 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
231
232 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
233 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
234 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
235 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
236 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
237 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
238 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
239
240 #endif
241
242 #if ENABLE(AVF_CAPTIONS)
243
244 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetCacheKey, NSString *)
245 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString *)
246 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
247 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString *)
248 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString *)
249 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString *)
250 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString *)
251 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString *)
252 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString *)
253 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString *)
254 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString *)
255 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString *)
256
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
259 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
260 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
261 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
262 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
263 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
264 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
265 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
266 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
267 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
268 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
269
270 #endif
271
272 #if ENABLE(DATACUE_VALUE)
273
274 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString *)
275 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString *)
276 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString *)
277 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceiTunes, NSString *)
278 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceID3, NSString *)
279
280 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
281 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
282 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
283 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
284 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
285
286 #endif
287
288 #if PLATFORM(IOS_FAMILY)
289
290 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
292 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetHTTPCookiesKey, NSString *)
293 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
294
295 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
296 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
297 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
298 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
299
300 #endif
301
302 SOFT_LINK_FRAMEWORK(MediaToolbox)
303 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
304
305 #if PLATFORM(IOS_FAMILY)
306
307 #if HAVE(CELESTIAL)
308
309 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
310 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
311 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
312 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
313 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
314 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
315 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
316
317 #endif // HAVE(CELESTIAL)
318
319 #endif // PLATFORM(IOS_FAMILY)
320
321 using namespace WebCore;
322
323 enum MediaPlayerAVFoundationObservationContext {
324     MediaPlayerAVFoundationObservationContextPlayerItem,
325     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
326     MediaPlayerAVFoundationObservationContextPlayer,
327     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
328 };
329
330 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
331 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
332 #else
333 @interface WebCoreAVFMovieObserver : NSObject
334 #endif
335 {
336     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
337     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
338     int m_delayCallbacks;
339 }
340 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
341 -(void)disconnect;
342 -(void)metadataLoaded;
343 -(void)didEnd:(NSNotification *)notification;
344 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
345 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
346 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
347 - (void)outputSequenceWasFlushed:(id)output;
348 #endif
349 @end
350
351 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
352 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
353     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
354     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
355 }
356 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
357 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
358 @end
359 #endif
360
361 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
362 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
363     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
364 }
365 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
366 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
367 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
368 @end
369 #endif
370
371 namespace WebCore {
372 using namespace PAL;
373
374 static NSArray *assetMetadataKeyNames();
375 static NSArray *itemKVOProperties();
376 static NSArray *assetTrackMetadataKeyNames();
377 static NSArray *playerKVOProperties();
378 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
379
380 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
381 static dispatch_queue_t globalLoaderDelegateQueue()
382 {
383     static dispatch_queue_t globalQueue;
384     static dispatch_once_t onceToken;
385     dispatch_once(&onceToken, ^{
386         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
387     });
388     return globalQueue;
389 }
390 #endif
391
392 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
393 static dispatch_queue_t globalPullDelegateQueue()
394 {
395     static dispatch_queue_t globalQueue;
396     static dispatch_once_t onceToken;
397     dispatch_once(&onceToken, ^{
398         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
399     });
400     return globalQueue;
401 }
402 #endif
403
404 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
405 {
406     if (!isAvailable())
407         return;
408
409     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
410             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
411     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
412 }
413
414 static AVAssetCacheType *assetCacheForPath(const String& path)
415 {
416     NSURL *assetCacheURL;
417     
418     if (path.isEmpty())
419         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
420     else
421         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
422
423     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
424 }
425
426 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
427 {
428     HashSet<RefPtr<SecurityOrigin>> origins;
429     for (NSString *key in [assetCacheForPath(path) allKeys]) {
430         URL keyAsURL = URL(URL(), key);
431         if (keyAsURL.isValid())
432             origins.add(SecurityOrigin::create(keyAsURL));
433     }
434     return origins;
435 }
436
437 static WallTime toSystemClockTime(NSDate *date)
438 {
439     ASSERT(date);
440     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
441 }
442
443 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
444 {
445     AVAssetCacheType* assetCache = assetCacheForPath(path);
446     
447     for (NSString *key in [assetCache allKeys]) {
448         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
449             [assetCache removeEntryForKey:key];
450     }
451
452     NSFileManager *fileManager = [NSFileManager defaultManager];
453     NSURL *baseURL = [assetCache URL];
454
455     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
456         [fileManager removeItemAtURL:baseURL error:nil];
457         return;
458     }
459     
460     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
461     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
462         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
463         errorHandler:nil];
464     
465     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
466     for (NSURL *fileURL : enumerator) {
467         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
468     
469         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
470             continue;
471         
472         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
473             continue;
474         
475         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
476             continue;
477         
478         [urlsToDelete addObject:fileURL];
479     }
480     
481     for (NSURL *fileURL in urlsToDelete.get())
482         [fileManager removeItemAtURL:fileURL error:nil];
483 }
484
485 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
486 {
487     AVAssetCacheType* assetCache = assetCacheForPath(path);
488     for (NSString *key in [assetCache allKeys]) {
489         URL keyAsURL = URL(URL(), key);
490         if (keyAsURL.isValid()) {
491             if (origins.contains(SecurityOrigin::create(keyAsURL)))
492                 [assetCache removeEntryForKey:key];
493         }
494     }
495 }
496
497 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
498     : MediaPlayerPrivateAVFoundation(player)
499     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
500     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
501     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
502     , m_videoFrameHasDrawn(false)
503     , m_haveCheckedPlayability(false)
504 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
505     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
506 #endif
507 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
508     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
509 #endif
510     , m_currentTextTrack(0)
511     , m_cachedRate(0)
512     , m_cachedTotalBytes(0)
513     , m_pendingStatusChanges(0)
514     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
515     , m_cachedLikelyToKeepUp(false)
516     , m_cachedBufferEmpty(false)
517     , m_cachedBufferFull(false)
518     , m_cachedHasEnabledAudio(false)
519     , m_shouldBufferData(true)
520     , m_cachedIsReadyForDisplay(false)
521     , m_haveBeenAskedToCreateLayer(false)
522 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
523     , m_allowsWirelessVideoPlayback(true)
524 #endif
525 {
526 }
527
528 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
529 {
530     m_weakPtrFactory.revokeAll();
531
532 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
533     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
534
535     for (auto& pair : m_resourceLoaderMap)
536         pair.value->invalidate();
537 #endif
538 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
539     [m_videoOutput setDelegate:nil queue:0];
540 #endif
541
542     if (m_videoLayer)
543         destroyVideoLayer();
544
545     cancelLoad();
546 }
547
548 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
549 {
550     INFO_LOG(LOGIDENTIFIER);
551     tearDownVideoRendering();
552
553     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
554     [m_objcObserver.get() disconnect];
555
556     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
557     setIgnoreLoadStateChanges(true);
558     if (m_avAsset) {
559         [m_avAsset.get() cancelLoading];
560         m_avAsset = nil;
561     }
562
563     clearTextTracks();
564
565 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
566     if (m_legibleOutput) {
567         if (m_avPlayerItem)
568             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
569         m_legibleOutput = nil;
570     }
571 #endif
572
573     if (m_avPlayerItem) {
574         for (NSString *keyName in itemKVOProperties())
575             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
576         
577         m_avPlayerItem = nil;
578     }
579     if (m_avPlayer) {
580         if (m_timeObserver)
581             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
582         m_timeObserver = nil;
583
584         for (NSString *keyName in playerKVOProperties())
585             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
586
587         setShouldObserveTimeControlStatus(false);
588
589         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
590 #if !PLATFORM(IOS_FAMILY)
591         [m_avPlayer setOutputContext:nil];
592 #endif
593         m_avPlayer = nil;
594     }
595
596     // Reset cached properties
597     m_pendingStatusChanges = 0;
598     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
599     m_cachedSeekableRanges = nullptr;
600     m_cachedLoadedRanges = nullptr;
601     m_cachedHasEnabledAudio = false;
602     m_cachedPresentationSize = FloatSize();
603     m_cachedDuration = MediaTime::zeroTime();
604
605     for (AVPlayerItemTrack *track in m_cachedTracks.get())
606         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
607     m_cachedTracks = nullptr;
608
609 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
610     if (m_provider) {
611         m_provider->setPlayerItem(nullptr);
612         m_provider->setAudioTrack(nullptr);
613     }
614 #endif
615
616     setIgnoreLoadStateChanges(false);
617 }
618
619 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
620 {
621     return m_haveBeenAskedToCreateLayer;
622 }
623
624 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
625 {
626 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
627     if (m_videoOutput)
628         return true;
629 #endif
630     return m_imageGenerator;
631 }
632
633 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
634 {
635 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
636     createVideoOutput();
637 #else
638     createImageGenerator();
639 #endif
640 }
641
642 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
643 {
644     using namespace PAL;
645     INFO_LOG(LOGIDENTIFIER);
646
647     if (!m_avAsset || m_imageGenerator)
648         return;
649
650     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
651
652     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
653     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
654     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
655     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
656 }
657
658 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
659 {
660 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
661     destroyVideoOutput();
662 #endif
663     destroyImageGenerator();
664 }
665
666 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
667 {
668     if (!m_imageGenerator)
669         return;
670
671     INFO_LOG(LOGIDENTIFIER);
672
673     m_imageGenerator = 0;
674 }
675
676 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
677 {
678     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
679         return;
680
681     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
682         if (!weakThis)
683             return;
684
685         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
686             return;
687         m_haveBeenAskedToCreateLayer = true;
688
689         if (!m_videoLayer)
690             createAVPlayerLayer();
691
692 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
693         if (!m_videoOutput)
694             createVideoOutput();
695 #endif
696
697         player()->client().mediaPlayerRenderingModeChanged(player());
698     });
699 }
700
701 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
702 {
703     if (!m_avPlayer)
704         return;
705
706     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
707     [m_videoLayer setPlayer:m_avPlayer.get()];
708
709 #ifndef NDEBUG
710     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
711 #endif
712     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
713     updateVideoLayerGravity();
714     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
715     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
716     INFO_LOG(LOGIDENTIFIER);
717
718     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
719
720 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
721     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
722         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
723 #endif
724 }
725
726 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
727 {
728     if (!m_videoLayer)
729         return;
730
731     INFO_LOG(LOGIDENTIFIER);
732
733     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
734     [m_videoLayer setPlayer:nil];
735     m_videoFullscreenLayerManager->didDestroyVideoLayer();
736
737     m_videoLayer = nil;
738 }
739
740 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
741 {
742     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
743     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
744
745     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
746     if (!date)
747         return MediaTime::invalidTime();
748
749     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
750
751     // Rounding due to second offset error when subtracting.
752     return MediaTime::createWithDouble(round(date - currentTime));
753 }
754
755 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
756 {
757     if (currentRenderingMode() == MediaRenderingToLayer)
758         return m_cachedIsReadyForDisplay;
759
760 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
761     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
762         return true;
763 #endif
764
765     return m_videoFrameHasDrawn;
766 }
767
768 #if ENABLE(AVF_CAPTIONS)
769 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
770 {
771     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
772     if (manualSelectionMode)
773         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
774
775     // FIXME: Match these to correct types:
776     if (kind == PlatformTextTrack::Caption)
777         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
778
779     if (kind == PlatformTextTrack::Subtitle)
780         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
781
782     if (kind == PlatformTextTrack::Description)
783         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
784
785     if (kind == PlatformTextTrack::Forced)
786         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
787
788     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
789 }
790     
791 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
792 {
793     trackModeChanged();
794 }
795     
796 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
797 {
798     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
799     
800     for (auto& textTrack : m_textTracks) {
801         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
802             continue;
803         
804         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
805         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
806         
807         for (auto& track : outOfBandTrackSources) {
808             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
809             
810             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
811                 continue;
812             
813             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
814             if (track->mode() == PlatformTextTrack::Hidden)
815                 mode = InbandTextTrackPrivate::Hidden;
816             else if (track->mode() == PlatformTextTrack::Disabled)
817                 mode = InbandTextTrackPrivate::Disabled;
818             else if (track->mode() == PlatformTextTrack::Showing)
819                 mode = InbandTextTrackPrivate::Showing;
820             
821             textTrack->setMode(mode);
822             break;
823         }
824     }
825 }
826 #endif
827
828
829 static NSURL *canonicalURL(const URL& url)
830 {
831     NSURL *cocoaURL = url;
832     if (url.isEmpty())
833         return cocoaURL;
834
835     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
836     if (!request)
837         return cocoaURL;
838
839     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
840     if (!canonicalRequest)
841         return cocoaURL;
842
843     return [canonicalRequest URL];
844 }
845
846 #if PLATFORM(IOS_FAMILY)
847 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
848 {
849     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
850     [properties setDictionary:@{
851         NSHTTPCookieName: cookie.name,
852         NSHTTPCookieValue: cookie.value,
853         NSHTTPCookieDomain: cookie.domain,
854         NSHTTPCookiePath: cookie.path,
855         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
856     }];
857     if (cookie.secure)
858         [properties setObject:@YES forKey:NSHTTPCookieSecure];
859     if (cookie.session)
860         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
861
862     return [NSHTTPCookie cookieWithProperties:properties.get()];
863 }
864 #endif
865
866 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
867 {
868     if (m_avAsset)
869         return;
870
871     INFO_LOG(LOGIDENTIFIER);
872
873     setDelayCallbacks(true);
874
875     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
876
877     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
878
879     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
880
881     String referrer = player()->referrer();
882     if (!referrer.isEmpty())
883         [headerFields.get() setObject:referrer forKey:@"Referer"];
884
885     String userAgent = player()->userAgent();
886     if (!userAgent.isEmpty())
887         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
888
889     if ([headerFields.get() count])
890         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
891
892     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
893         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
894
895     if (canLoadAVURLAssetUseClientURLLoadingExclusively())
896         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
897 #if PLATFORM(IOS_FAMILY)
898     else if (canLoadAVURLAssetRequiresCustomURLLoadingKey())
899         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
900     // FIXME: rdar://problem/20354688
901     String identifier = player()->sourceApplicationIdentifier();
902     if (!identifier.isEmpty() && canLoadAVURLAssetClientBundleIdentifierKey())
903         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
904 #endif
905
906     auto type = player()->contentMIMEType();
907     if (canLoadAVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
908         auto codecs = player()->contentTypeCodecs();
909         if (!codecs.isEmpty()) {
910             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
911             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
912         } else
913             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
914     }
915
916 #if ENABLE(AVF_CAPTIONS)
917     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
918     if (!outOfBandTrackSources.isEmpty()) {
919         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
920         for (auto& trackSource : outOfBandTrackSources) {
921             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
922             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
923             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
924             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
925             [outOfBandTracks.get() addObject:@{
926                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
927                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
928                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
929                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
930                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
931                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
932             }];
933         }
934
935         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
936     }
937 #endif
938
939 #if PLATFORM(IOS_FAMILY)
940     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
941     if (!networkInterfaceName.isEmpty())
942         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
943 #endif
944
945 #if PLATFORM(IOS_FAMILY)
946     Vector<Cookie> cookies;
947     if (player()->getRawCookies(url, cookies)) {
948         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
949         for (auto& cookie : cookies)
950             [nsCookies addObject:toNSHTTPCookie(cookie)];
951
952         if (canLoadAVURLAssetHTTPCookiesKey())
953             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
954     }
955 #endif
956
957     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
958     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
959     
960     if (usePersistentCache)
961         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
962
963     NSURL *cocoaURL = canonicalURL(url);
964     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
965
966 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
967     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
968     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
969
970     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
971         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
972         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
973         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
974         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
975         if (mediaResourceLoader)
976             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
977     }
978
979 #endif
980
981     m_haveCheckedPlayability = false;
982
983     setDelayCallbacks(false);
984 }
985
986 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
987 {
988     if (!m_avPlayer)
989         return;
990
991     if (pthread_main_np()) {
992         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
993         return;
994     }
995
996     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
997     RetainPtr<AVPlayerItemType> strongItem = item;
998     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
999         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1000     });
1001 }
1002
1003 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1004 {
1005     if (m_avPlayer)
1006         return;
1007
1008     INFO_LOG(LOGIDENTIFIER);
1009
1010     setDelayCallbacks(true);
1011
1012     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1013     for (NSString *keyName in playerKVOProperties())
1014         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1015
1016     setShouldObserveTimeControlStatus(true);
1017
1018 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1019     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1020 #endif
1021
1022 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1023     updateDisableExternalPlayback();
1024     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1025 #endif
1026
1027 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
1028     if (m_shouldPlayToPlaybackTarget) {
1029         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1030         m_shouldPlayToPlaybackTarget = false;
1031         setShouldPlayToPlaybackTarget(true);
1032     }
1033 #endif
1034
1035 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(IOSMAC)
1036     setShouldDisableSleep(player()->shouldDisableSleep());
1037 #endif
1038
1039     if (m_muted) {
1040         // Clear m_muted so setMuted doesn't return without doing anything.
1041         m_muted = false;
1042         [m_avPlayer.get() setMuted:m_muted];
1043     }
1044
1045     if (player()->client().mediaPlayerIsVideo())
1046         createAVPlayerLayer();
1047
1048     if (m_avPlayerItem)
1049         setAVPlayerItem(m_avPlayerItem.get());
1050
1051     setDelayCallbacks(false);
1052 }
1053
1054 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1055 {
1056     if (m_avPlayerItem)
1057         return;
1058
1059     INFO_LOG(LOGIDENTIFIER);
1060
1061     setDelayCallbacks(true);
1062
1063     // Create the player item so we can load media data. 
1064     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1065
1066     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1067
1068     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1069     for (NSString *keyName in itemKVOProperties())
1070         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1071
1072     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1073
1074     if (m_avPlayer)
1075         setAVPlayerItem(m_avPlayerItem.get());
1076
1077 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1078     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1079
1080     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1081     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1082     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1083
1084     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1085     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1086     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1087     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1088 #endif
1089
1090 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1091     if (m_provider) {
1092         m_provider->setPlayerItem(m_avPlayerItem.get());
1093         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1094     }
1095 #endif
1096
1097 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1098     createVideoOutput();
1099 #endif
1100
1101     setDelayCallbacks(false);
1102 }
1103
1104 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1105 {
1106     if (m_haveCheckedPlayability)
1107         return;
1108     m_haveCheckedPlayability = true;
1109
1110     INFO_LOG(LOGIDENTIFIER);
1111     auto weakThis = makeWeakPtr(*this);
1112
1113     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1114         callOnMainThread([weakThis] {
1115             if (weakThis)
1116                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1117         });
1118     }];
1119 }
1120
1121 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1122 {
1123     INFO_LOG(LOGIDENTIFIER);
1124
1125     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1126     dispatch_group_enter(metadataLoadingGroup.get());
1127     auto weakThis = makeWeakPtr(*this);
1128     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1129
1130         callOnMainThread([weakThis, metadataLoadingGroup] {
1131             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1132                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1133                     dispatch_group_enter(metadataLoadingGroup.get());
1134                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1135                         dispatch_group_leave(metadataLoadingGroup.get());
1136                     }];
1137                 }
1138             }
1139             dispatch_group_leave(metadataLoadingGroup.get());
1140         });
1141     }];
1142
1143     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1144         callOnMainThread([weakThis] {
1145             if (weakThis)
1146                 [weakThis->m_objcObserver.get() metadataLoaded];
1147         });
1148     });
1149 }
1150
1151 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1152 {
1153     if (!m_avPlayerItem)
1154         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1155
1156     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1157         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1158     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1159         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1160     if (m_cachedLikelyToKeepUp)
1161         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1162     if (m_cachedBufferFull)
1163         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1164     if (m_cachedBufferEmpty)
1165         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1166
1167     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1168 }
1169
1170 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1171 {
1172     return m_videoFullscreenLayerManager->videoInlineLayer();
1173 }
1174
1175 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1176 {
1177 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1178     updateLastImage(UpdateType::UpdateSynchronously);
1179     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1180 #endif
1181 }
1182
1183 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1184 {
1185 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1186     updateLastImage(UpdateType::UpdateSynchronously);
1187     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1188 #else
1189     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1190 #endif
1191     updateDisableExternalPlayback();
1192 }
1193
1194 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1195 {
1196     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1197 }
1198
1199 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1200 {
1201     m_videoFullscreenGravity = gravity;
1202
1203     if (!m_videoLayer)
1204         return;
1205
1206     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1207     if (gravity == MediaPlayer::VideoGravityResize)
1208         videoGravity = AVLayerVideoGravityResize;
1209     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1210         videoGravity = AVLayerVideoGravityResizeAspect;
1211     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1212         videoGravity = AVLayerVideoGravityResizeAspectFill;
1213     else
1214         ASSERT_NOT_REACHED();
1215     
1216     if ([m_videoLayer videoGravity] == videoGravity)
1217         return;
1218
1219     [m_videoLayer setVideoGravity:videoGravity];
1220     syncTextTrackBounds();
1221 }
1222
1223 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1224 {
1225 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1226     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1227         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1228     updateDisableExternalPlayback();
1229 #else
1230     UNUSED_PARAM(mode);
1231 #endif
1232 }
1233     
1234 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1235 {
1236 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1237     updateDisableExternalPlayback();
1238 #endif
1239 }
1240
1241 #if PLATFORM(IOS_FAMILY)
1242 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1243 {
1244     if (m_currentMetaData)
1245         return m_currentMetaData.get();
1246     return nil;
1247 }
1248
1249 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1250 {
1251     if (!m_avPlayerItem)
1252         return emptyString();
1253     
1254     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1255     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1256
1257     return logString.get();
1258 }
1259
1260 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1261 {
1262     if (!m_avPlayerItem)
1263         return emptyString();
1264
1265     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1266     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1267
1268     return logString.get();
1269 }
1270 #endif
1271
1272 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1273 {
1274     m_requestedPlaying = false;
1275     MediaPlayerPrivateAVFoundation::didEnd();
1276 }
1277
1278 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1279 {
1280     [CATransaction begin];
1281     [CATransaction setDisableActions:YES];    
1282     if (m_videoLayer)
1283         [m_videoLayer.get() setHidden:!isVisible];
1284     [CATransaction commit];
1285 }
1286     
1287 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1288 {
1289     INFO_LOG(LOGIDENTIFIER);
1290     if (!metaDataAvailable())
1291         return;
1292
1293     m_requestedPlaying = true;
1294     setPlayerRate(m_requestedRate);
1295 }
1296
1297 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1298 {
1299     INFO_LOG(LOGIDENTIFIER);
1300     if (!metaDataAvailable())
1301         return;
1302
1303     m_requestedPlaying = false;
1304     setPlayerRate(0);
1305 }
1306
1307 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1308 {
1309     return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1310 }
1311
1312 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1313 {
1314     // Do not ask the asset for duration before it has been loaded or it will fetch the
1315     // answer synchronously.
1316     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1317         return MediaTime::invalidTime();
1318     
1319     CMTime cmDuration;
1320     
1321     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1322     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1323         cmDuration = [m_avPlayerItem.get() duration];
1324     else
1325         cmDuration = [m_avAsset.get() duration];
1326
1327     if (CMTIME_IS_NUMERIC(cmDuration))
1328         return PAL::toMediaTime(cmDuration);
1329
1330     if (CMTIME_IS_INDEFINITE(cmDuration))
1331         return MediaTime::positiveInfiniteTime();
1332
1333     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1334     return MediaTime::invalidTime();
1335 }
1336
1337 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1338 {
1339     if (!metaDataAvailable() || !m_avPlayerItem)
1340         return MediaTime::zeroTime();
1341
1342     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1343     if (CMTIME_IS_NUMERIC(itemTime))
1344         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1345
1346     return MediaTime::zeroTime();
1347 }
1348
1349 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1350 {
1351     // setCurrentTime generates several event callbacks, update afterwards.
1352     setDelayCallbacks(true);
1353
1354     if (m_metadataTrack)
1355         m_metadataTrack->flushPartialCues();
1356
1357     CMTime cmTime = PAL::toCMTime(time);
1358     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1359     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1360
1361     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1362     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1363         cmBefore = kCMTimeZero;
1364     
1365     auto weakThis = makeWeakPtr(*this);
1366
1367     setShouldObserveTimeControlStatus(false);
1368     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1369         callOnMainThread([weakThis, finished] {
1370             auto _this = weakThis.get();
1371             if (!_this)
1372                 return;
1373
1374             _this->setShouldObserveTimeControlStatus(true);
1375             _this->seekCompleted(finished);
1376         });
1377     }];
1378
1379     setDelayCallbacks(false);
1380 }
1381
1382 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1383 {
1384 #if PLATFORM(IOS_FAMILY)
1385     UNUSED_PARAM(volume);
1386     return;
1387 #else
1388     if (!m_avPlayer)
1389         return;
1390
1391     [m_avPlayer.get() setVolume:volume];
1392 #endif
1393 }
1394
1395 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1396 {
1397     if (m_muted == muted)
1398         return;
1399
1400     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1401
1402     m_muted = muted;
1403
1404     if (!m_avPlayer)
1405         return;
1406
1407     [m_avPlayer.get() setMuted:m_muted];
1408 }
1409
1410 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1411 {
1412     UNUSED_PARAM(closedCaptionsVisible);
1413
1414     if (!metaDataAvailable())
1415         return;
1416
1417     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1418 }
1419
1420 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1421 {
1422     m_requestedRate = rate;
1423     if (m_requestedPlaying)
1424         setPlayerRate(rate);
1425 }
1426
1427 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1428 {
1429     setDelayCallbacks(true);
1430     m_cachedRate = rate;
1431     setShouldObserveTimeControlStatus(false);
1432     [m_avPlayer setRate:rate];
1433     m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1434     setShouldObserveTimeControlStatus(true);
1435     setDelayCallbacks(false);
1436 }
1437
1438 double MediaPlayerPrivateAVFoundationObjC::rate() const
1439 {
1440     if (!metaDataAvailable())
1441         return 0;
1442
1443     return m_cachedRate;
1444 }
1445
1446 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1447 {
1448 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1449     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1450 #else
1451     return 0;
1452 #endif
1453 }
1454
1455 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1456 {
1457 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1458     return [m_avPlayerItem liveUpdateInterval];
1459 #else
1460     return 0;
1461 #endif
1462 }
1463
1464 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1465 {
1466     if (m_avPlayerItem)
1467         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1468 }
1469
1470 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1471 {
1472     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1473
1474     if (!m_avPlayerItem)
1475         return timeRanges;
1476
1477     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1478         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1479         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1480             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1481     }
1482     return timeRanges;
1483 }
1484
1485 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1486 {
1487     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1488         return MediaTime::zeroTime();
1489
1490     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1491     bool hasValidRange = false;
1492     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1493         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1494         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1495             continue;
1496
1497         hasValidRange = true;
1498         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1499         if (minTimeSeekable > startOfRange)
1500             minTimeSeekable = startOfRange;
1501     }
1502     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1503 }
1504
1505 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1506 {
1507     if (!m_cachedSeekableRanges)
1508         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1509
1510     MediaTime maxTimeSeekable;
1511     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1512         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1513         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1514             continue;
1515         
1516         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1517         if (maxTimeSeekable < endOfRange)
1518             maxTimeSeekable = endOfRange;
1519     }
1520     return maxTimeSeekable;
1521 }
1522
1523 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1524 {
1525     if (!m_cachedLoadedRanges)
1526         return MediaTime::zeroTime();
1527
1528     MediaTime maxTimeLoaded;
1529     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1530         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1531         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1532             continue;
1533         
1534         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1535         if (maxTimeLoaded < endOfRange)
1536             maxTimeLoaded = endOfRange;
1537     }
1538
1539     return maxTimeLoaded;   
1540 }
1541
1542 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1543 {
1544     if (!metaDataAvailable())
1545         return 0;
1546
1547     if (m_cachedTotalBytes)
1548         return m_cachedTotalBytes;
1549
1550     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1551         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1552
1553     return m_cachedTotalBytes;
1554 }
1555
1556 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1557 {
1558     m_avAsset = WTFMove(asset);
1559 }
1560
1561 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1562 {
1563     if (!m_avAsset)
1564         return MediaPlayerAVAssetStatusDoesNotExist;
1565
1566     for (NSString *keyName in assetMetadataKeyNames()) {
1567         NSError *error = nil;
1568         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1569
1570         if (error)
1571             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1572
1573         if (keyStatus < AVKeyValueStatusLoaded)
1574             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1575         
1576         if (keyStatus == AVKeyValueStatusFailed)
1577             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1578
1579         if (keyStatus == AVKeyValueStatusCancelled)
1580             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1581     }
1582
1583     if (!player()->shouldCheckHardwareSupport())
1584         m_tracksArePlayable = true;
1585
1586     if (!m_tracksArePlayable) {
1587         m_tracksArePlayable = true;
1588         for (AVAssetTrack *track in [m_avAsset tracks]) {
1589             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1590                 m_tracksArePlayable = false;
1591                 break;
1592             }
1593         }
1594     }
1595
1596     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1597         return MediaPlayerAVAssetStatusPlayable;
1598
1599     return MediaPlayerAVAssetStatusLoaded;
1600 }
1601
1602 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1603 {
1604     if (!m_avAsset)
1605         return 0;
1606
1607     NSError *error = nil;
1608     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1609     return [error code];
1610 }
1611
1612 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1613 {
1614     if (!metaDataAvailable() || context.paintingDisabled())
1615         return;
1616
1617     setDelayCallbacks(true);
1618     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1619
1620 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1621     if (videoOutputHasAvailableFrame())
1622         paintWithVideoOutput(context, rect);
1623     else
1624 #endif
1625         paintWithImageGenerator(context, rect);
1626
1627     END_BLOCK_OBJC_EXCEPTIONS;
1628     setDelayCallbacks(false);
1629
1630     m_videoFrameHasDrawn = true;
1631 }
1632
1633 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1634 {
1635     if (!metaDataAvailable() || context.paintingDisabled())
1636         return;
1637
1638     // We can ignore the request if we are already rendering to a layer.
1639     if (currentRenderingMode() == MediaRenderingToLayer)
1640         return;
1641
1642     // paint() is best effort, so only paint if we already have an image generator or video output available.
1643     if (!hasContextRenderer())
1644         return;
1645
1646     paintCurrentFrameInContext(context, rect);
1647 }
1648
1649 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1650 {
1651     INFO_LOG(LOGIDENTIFIER);
1652
1653     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1654     if (image) {
1655         GraphicsContextStateSaver stateSaver(context);
1656         context.translate(rect.x(), rect.y() + rect.height());
1657         context.scale(FloatSize(1.0f, -1.0f));
1658         context.setImageInterpolationQuality(InterpolationLow);
1659         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1660         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1661     }
1662 }
1663
1664 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1665 {
1666     if (!m_imageGenerator)
1667         createImageGenerator();
1668     ASSERT(m_imageGenerator);
1669
1670 #if !RELEASE_LOG_DISABLED
1671     MonotonicTime start = MonotonicTime::now();
1672 #endif
1673
1674     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1675     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1676     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1677
1678 #if !RELEASE_LOG_DISABLED
1679     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1680 #endif
1681
1682     return image;
1683 }
1684
1685 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1686 {
1687     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1688
1689
1690 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1691 static bool keySystemIsSupported(const String& keySystem)
1692 {
1693     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1694         return true;
1695     return false;
1696 }
1697 #endif
1698
1699 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1700 {
1701 #if ENABLE(MEDIA_SOURCE)
1702     if (parameters.isMediaSource)
1703         return MediaPlayer::IsNotSupported;
1704 #endif
1705 #if ENABLE(MEDIA_STREAM)
1706     if (parameters.isMediaStream)
1707         return MediaPlayer::IsNotSupported;
1708 #endif
1709
1710     auto containerType = parameters.type.containerType();
1711     if (isUnsupportedMIMEType(containerType))
1712         return MediaPlayer::IsNotSupported;
1713
1714     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(containerType))
1715         return MediaPlayer::IsNotSupported;
1716
1717     // The spec says:
1718     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1719     if (parameters.type.codecs().isEmpty())
1720         return MediaPlayer::MayBeSupported;
1721
1722     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1723         return MediaPlayer::IsNotSupported;
1724
1725     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1726     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1727 }
1728
1729 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1730 {
1731 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1732     if (!keySystem.isEmpty()) {
1733         // "Clear Key" is only supported with HLS:
1734         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1735             return MediaPlayer::IsNotSupported;
1736
1737         if (!keySystemIsSupported(keySystem))
1738             return false;
1739
1740         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1741             return false;
1742
1743         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType))
1744             return false;
1745
1746         return true;
1747     }
1748 #else
1749     UNUSED_PARAM(keySystem);
1750     UNUSED_PARAM(mimeType);
1751 #endif
1752     return false;
1753 }
1754
1755 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1756 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1757 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1758 {
1759     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1760         [infoRequest setContentLength:keyData->byteLength()];
1761         [infoRequest setByteRangeAccessSupported:YES];
1762     }
1763
1764     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1765         long long start = [dataRequest currentOffset];
1766         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1767
1768         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1769             [request finishLoadingWithError:nil];
1770             return;
1771         }
1772
1773         ASSERT(start <= std::numeric_limits<int>::max());
1774         ASSERT(end <= std::numeric_limits<int>::max());
1775         auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1776         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1777         [dataRequest respondWithData:nsData.get()];
1778     }
1779
1780     [request finishLoading];
1781 }
1782 #endif
1783
1784 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1785 {
1786     String scheme = [[[avRequest request] URL] scheme];
1787     String keyURI = [[[avRequest request] URL] absoluteString];
1788
1789 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1790     if (scheme == "skd") {
1791 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1792         // Create an initData with the following layout:
1793         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1794         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1795         auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1796         unsigned byteLength = initDataBuffer->byteLength();
1797         auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1798         initDataView->set<uint32_t>(0, keyURISize, true);
1799
1800         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1801         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1802
1803         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1804         if (!player()->keyNeeded(initData.ptr()))
1805             return false;
1806 #endif
1807         m_keyURIToRequestMap.set(keyURI, avRequest);
1808 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1809         if (m_cdmInstance)
1810             return false;
1811
1812         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1813         m_keyID = SharedBuffer::create(keyURIData.get());
1814         player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1815         setWaitingForKey(true);
1816 #endif
1817         return true;
1818     }
1819
1820 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1821     if (scheme == "clearkey") {
1822         String keyID = [[[avRequest request] URL] resourceSpecifier];
1823         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1824
1825         auto initData = Uint8Array::create(encodedKeyId.size());
1826         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1827
1828         auto keyData = player()->cachedKeyForKeyId(keyID);
1829         if (keyData) {
1830             fulfillRequestWithKeyData(avRequest, keyData.get());
1831             return false;
1832         }
1833
1834         if (!player()->keyNeeded(initData.ptr()))
1835             return false;
1836
1837         m_keyURIToRequestMap.set(keyID, avRequest);
1838         return true;
1839     }
1840 #endif
1841 #endif
1842
1843     auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1844     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1845     resourceLoader->startLoading();
1846     return true;
1847 }
1848
1849 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1850 {
1851     String scheme = [[[avRequest request] URL] scheme];
1852
1853     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1854
1855     if (resourceLoader)
1856         resourceLoader->stopLoading();
1857 }
1858
1859 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1860 {
1861     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1862 }
1863 #endif
1864
1865 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1866 {
1867     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1868 }
1869
1870 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1871 {
1872     if (!metaDataAvailable())
1873         return timeValue;
1874
1875     // FIXME - impossible to implement until rdar://8721510 is fixed.
1876     return timeValue;
1877 }
1878
1879 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1880 {
1881     return 0;
1882 }
1883
1884 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1885 {
1886     if (!m_videoLayer)
1887         return;
1888
1889     // Do not attempt to change the video gravity while in full screen mode.
1890     // See setVideoFullscreenGravity().
1891     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1892         return;
1893
1894     [CATransaction begin];
1895     [CATransaction setDisableActions:YES];    
1896     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1897     [m_videoLayer.get() setVideoGravity:gravity];
1898     [CATransaction commit];
1899 }
1900
1901 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1902 {
1903     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1904         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1905     }];
1906     if (index == NSNotFound)
1907         return nil;
1908     return [tracks objectAtIndex:index];
1909 }
1910
1911 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1912 {
1913     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1914     m_languageOfPrimaryAudioTrack = String();
1915
1916     if (!m_avAsset)
1917         return;
1918
1919     setDelayCharacteristicsChangedNotification(true);
1920
1921     bool haveCCTrack = false;
1922     bool hasCaptions = false;
1923
1924     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1925     // asked about those fairly fequently.
1926     if (!m_avPlayerItem) {
1927         // We don't have a player item yet, so check with the asset because some assets support inspection
1928         // prior to becoming ready to play.
1929         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1930         setHasVideo(firstEnabledVideoTrack);
1931         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1932 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1933         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1934 #endif
1935         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1936         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1937         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1938         if (size.width() < 0)
1939             size.setWidth(-size.width());
1940         if (size.height() < 0)
1941             size.setHeight(-size.height());
1942         presentationSizeDidChange(size);
1943     } else {
1944         bool hasVideo = false;
1945         bool hasAudio = false;
1946         bool hasMetaData = false;
1947         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1948             if ([track isEnabled]) {
1949                 AVAssetTrack *assetTrack = [track assetTrack];
1950                 NSString *mediaType = [assetTrack mediaType];
1951                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1952                     hasVideo = true;
1953                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1954                     hasAudio = true;
1955                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1956 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1957                     hasCaptions = true;
1958 #endif
1959                     haveCCTrack = true;
1960                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1961                     hasMetaData = true;
1962                 }
1963             }
1964         }
1965
1966 #if ENABLE(VIDEO_TRACK)
1967         updateAudioTracks();
1968         updateVideoTracks();
1969
1970 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1971         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1972         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1973 #endif
1974 #endif
1975
1976         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1977         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1978         // when it is not.
1979         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1980
1981         setHasAudio(hasAudio);
1982 #if ENABLE(DATACUE_VALUE)
1983         if (hasMetaData)
1984             processMetadataTrack();
1985 #endif
1986     }
1987
1988 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1989     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1990     if (legibleGroup && m_cachedTracks) {
1991         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1992         if (hasCaptions)
1993             processMediaSelectionOptions();
1994     }
1995 #endif
1996
1997 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1998     if (!hasCaptions && haveCCTrack)
1999         processLegacyClosedCaptionsTracks();
2000 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2001     if (haveCCTrack)
2002         processLegacyClosedCaptionsTracks();
2003 #endif
2004
2005     setHasClosedCaptions(hasCaptions);
2006
2007     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
2008
2009     sizeChanged();
2010
2011     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2012         characteristicsChanged();
2013
2014 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2015     if (m_provider)
2016         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2017 #endif
2018
2019     setDelayCharacteristicsChangedNotification(false);
2020 }
2021
2022 #if ENABLE(VIDEO_TRACK)
2023
2024 template <typename RefT, typename PassRefT>
2025 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2026 {
2027     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2028         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2029     }]]]);
2030     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2031
2032     for (auto& oldItem : oldItems) {
2033         if (oldItem->playerItemTrack())
2034             [oldTracks addObject:oldItem->playerItemTrack()];
2035     }
2036
2037     // Find the added & removed AVPlayerItemTracks:
2038     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2039     [removedTracks minusSet:newTracks.get()];
2040
2041     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2042     [addedTracks minusSet:oldTracks.get()];
2043
2044     typedef Vector<RefT> ItemVector;
2045     ItemVector replacementItems;
2046     ItemVector addedItems;
2047     ItemVector removedItems;
2048     for (auto& oldItem : oldItems) {
2049         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2050             removedItems.append(oldItem);
2051         else
2052             replacementItems.append(oldItem);
2053     }
2054
2055     for (AVPlayerItemTrack* track in addedTracks.get())
2056         addedItems.append(itemFactory(track));
2057
2058     replacementItems.appendVector(addedItems);
2059     oldItems.swap(replacementItems);
2060
2061     for (auto& removedItem : removedItems)
2062         (player->*removedFunction)(*removedItem);
2063
2064     for (auto& addedItem : addedItems)
2065         (player->*addedFunction)(*addedItem);
2066 }
2067
2068 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2069
2070 template <typename RefT, typename PassRefT>
2071 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2072 {
2073     group->updateOptions(characteristics);
2074
2075     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2076     for (auto& option : group->options()) {
2077         if (!option)
2078             continue;
2079         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2080         if (!avOption)
2081             continue;
2082         newSelectionOptions.add(option);
2083     }
2084
2085     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2086     for (auto& oldItem : oldItems) {
2087         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2088             oldSelectionOptions.add(option);
2089     }
2090
2091     // Find the added & removed AVMediaSelectionOptions:
2092     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2093     for (auto& oldOption : oldSelectionOptions) {
2094         if (!newSelectionOptions.contains(oldOption))
2095             removedSelectionOptions.add(oldOption);
2096     }
2097
2098     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2099     for (auto& newOption : newSelectionOptions) {
2100         if (!oldSelectionOptions.contains(newOption))
2101             addedSelectionOptions.add(newOption);
2102     }
2103
2104     typedef Vector<RefT> ItemVector;
2105     ItemVector replacementItems;
2106     ItemVector addedItems;
2107     ItemVector removedItems;
2108     for (auto& oldItem : oldItems) {
2109         if (!oldItem->mediaSelectionOption())
2110             removedItems.append(oldItem);
2111         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2112             removedItems.append(oldItem);
2113         else
2114             replacementItems.append(oldItem);
2115     }
2116
2117     for (auto& option : addedSelectionOptions)
2118         addedItems.append(itemFactory(*option.get()));
2119
2120     replacementItems.appendVector(addedItems);
2121     oldItems.swap(replacementItems);
2122     
2123     for (auto& removedItem : removedItems)
2124         (player->*removedFunction)(*removedItem);
2125
2126     for (auto& addedItem : addedItems)
2127         (player->*addedFunction)(*addedItem);
2128 }
2129
2130 #endif
2131
2132 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2133 {
2134 #if !RELEASE_LOG_DISABLED
2135     size_t count = m_audioTracks.size();
2136 #endif
2137
2138 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2139     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2140     if (!m_audibleGroup) {
2141         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2142             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2143     }
2144
2145     if (m_audibleGroup)
2146         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2147     else
2148 #endif
2149         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2150
2151     for (auto& track : m_audioTracks)
2152         track->resetPropertiesFromTrack();
2153
2154 #if !RELEASE_LOG_DISABLED
2155     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2156 #endif
2157 }
2158
2159 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2160 {
2161 #if !RELEASE_LOG_DISABLED
2162     size_t count = m_videoTracks.size();
2163 #endif
2164
2165     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2166
2167 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2168     if (!m_visualGroup) {
2169         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2170             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2171     }
2172
2173     if (m_visualGroup)
2174         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2175 #endif
2176
2177     for (auto& track : m_audioTracks)
2178         track->resetPropertiesFromTrack();
2179
2180 #if !RELEASE_LOG_DISABLED
2181     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2182 #endif
2183 }
2184
2185 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2186 {
2187     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2188 }
2189
2190 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2191 {
2192     m_videoFullscreenLayerManager->syncTextTrackBounds();
2193 }
2194
2195 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2196 {
2197     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2198 }
2199
2200 #endif // ENABLE(VIDEO_TRACK)
2201
2202 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2203
2204 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2205 {
2206     if (!m_provider) {
2207         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2208         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2209     }
2210     return m_provider.get();
2211 }
2212
2213 #endif
2214
2215 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2216 {
2217     if (!m_avAsset)
2218         return;
2219
2220     setNaturalSize(m_cachedPresentationSize);
2221 }
2222
2223 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2224 {
2225     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2226 }
2227
2228 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2229 {
2230     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2231     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2232         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2233         return false;
2234
2235     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2236     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2237         return session.didPassCORSAccessChecks;
2238
2239     return false;
2240 }
2241
2242 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2243 {
2244     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2245     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2246         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2247         return false;
2248
2249     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2250     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2251         return [session wouldTaintOrigin:origin];
2252
2253     return WTF::nullopt;
2254 }
2255
2256
2257 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2258
2259 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2260 {
2261     INFO_LOG(LOGIDENTIFIER);
2262
2263     if (!m_avPlayerItem || m_videoOutput)
2264         return;
2265
2266 #if USE(VIDEOTOOLBOX)
2267     NSDictionary* attributes = nil;
2268 #else
2269     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2270 #endif
2271     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2272     ASSERT(m_videoOutput);
2273
2274     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2275
2276     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2277 }
2278
2279 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2280 {
2281     if (!m_videoOutput)
2282         return;
2283
2284     if (m_avPlayerItem)
2285         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2286
2287     INFO_LOG(LOGIDENTIFIER);
2288
2289     m_videoOutput = 0;
2290 }
2291
2292 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2293 {
2294     if (!m_avPlayerItem)
2295         return false;
2296
2297     if (!m_videoOutput)
2298         createVideoOutput();
2299     ASSERT(m_videoOutput);
2300
2301     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2302
2303     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2304         return false;
2305
2306     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2307     m_lastImage = nullptr;
2308     return true;
2309 }
2310
2311 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2312 {
2313     if (!m_avPlayerItem)
2314         return false;
2315
2316     if (m_lastImage)
2317         return true;
2318
2319     if (!m_videoOutput)
2320         createVideoOutput();
2321
2322     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2323 }
2324
2325 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2326 {
2327 #if HAVE(CORE_VIDEO)
2328     if (!m_avPlayerItem)
2329         return;
2330
2331     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2332         waitForVideoOutputMediaDataWillChange();
2333
2334     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2335     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2336     // should be displayed.
2337     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2338         return;
2339
2340     if (!m_pixelBufferConformer) {
2341 #if USE(VIDEOTOOLBOX)
2342         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2343 #else
2344         NSDictionary *attributes = nil;
2345 #endif
2346         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2347     }
2348
2349 #if !RELEASE_LOG_DISABLED
2350     MonotonicTime start = MonotonicTime::now();
2351 #endif
2352
2353     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2354
2355 #if !RELEASE_LOG_DISABLED
2356     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2357 #endif
2358 #endif // HAVE(CORE_VIDEO)
2359 }
2360
2361 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2362 {
2363     updateLastImage(UpdateType::UpdateSynchronously);
2364     if (!m_lastImage)
2365         return;
2366
2367     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2368     if (!firstEnabledVideoTrack)
2369         return;
2370
2371     INFO_LOG(LOGIDENTIFIER);
2372
2373     GraphicsContextStateSaver stateSaver(context);
2374     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2375     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2376     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2377
2378     context.concatCTM(videoTransform);
2379     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2380
2381     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2382     // video frame, destroy it now that it is no longer needed.
2383     if (m_imageGenerator)
2384         destroyImageGenerator();
2385
2386 }
2387
2388 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2389 {
2390     ASSERT(context);
2391
2392     updateLastPixelBuffer();
2393     if (!m_lastPixelBuffer)
2394         return false;
2395
2396     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2397     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2398
2399     if (!m_videoTextureCopier)
2400         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2401
2402     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2403 }
2404
2405 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2406 {
2407     updateLastImage();
2408     return m_lastImage;
2409 }
2410
2411 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2412 {
2413     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2414
2415     // Wait for 1 second.
2416     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2417     if (!satisfied)
2418         ERROR_LOG(LOGIDENTIFIER, "timed out");
2419 }
2420
2421 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2422 {
2423     m_videoOutputSemaphore.signal();
2424 }
2425
2426 #endif
2427
2428 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2429
2430 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2431 {
2432     return m_keyURIToRequestMap.take(keyURI);
2433 }
2434
2435 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2436 {
2437     Vector<String> fulfilledKeyIds;
2438
2439     for (auto& pair : m_keyURIToRequestMap) {
2440         const String& keyId = pair.key;
2441         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2442
2443         auto keyData = player()->cachedKeyForKeyId(keyId);
2444         if (!keyData)
2445             continue;
2446
2447         fulfillRequestWithKeyData(request.get(), keyData.get());
2448         fulfilledKeyIds.append(keyId);
2449     }
2450
2451     for (auto& keyId : fulfilledKeyIds)
2452         m_keyURIToRequestMap.remove(keyId);
2453 }
2454
2455 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2456 {
2457     ASSERT_UNUSED(session, &session == m_session);
2458     m_session = nullptr;
2459 }
2460
2461 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2462 {
2463     if (!keySystemIsSupported(keySystem))
2464         return nullptr;
2465     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2466     m_session = makeWeakPtr(*session);
2467     return WTFMove(session);
2468 }
2469 #endif
2470
2471 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2472 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2473 {
2474 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2475     if (m_session && newValue)
2476         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2477 #endif
2478
2479 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2480     if (m_cdmInstance)
2481         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2482 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2483     UNUSED_PARAM(newValue);
2484 #endif
2485 }
2486 #endif
2487
2488 #if ENABLE(ENCRYPTED_MEDIA)
2489 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2490 {
2491 #if HAVE(AVCONTENTKEYSESSION)
2492     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2493         return;
2494
2495     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2496     if (&fpsInstance == m_cdmInstance)
2497         return;
2498
2499     if (m_cdmInstance)
2500         cdmInstanceDetached(*m_cdmInstance);
2501
2502     m_cdmInstance = &fpsInstance;
2503 #else
2504     UNUSED_PARAM(instance);
2505 #endif
2506 }
2507
2508 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2509 {
2510 #if HAVE(AVCONTENTKEYSESSION)
2511     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2512     m_cdmInstance = nullptr;
2513 #else
2514     UNUSED_PARAM(instance);
2515 #endif
2516 }
2517
2518 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2519 {
2520 #if HAVE(AVCONTENTKEYSESSION)
2521     if (!m_keyID || !m_cdmInstance)
2522         return;
2523
2524     auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2525     if (!instanceSession)
2526         return;
2527
2528     [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2529
2530     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2531     for (auto& request : keyURIToRequestMap.values()) {
2532         if (auto *infoRequest = request.get().contentInformationRequest)
2533             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2534         [request finishLoading];
2535     }
2536     setWaitingForKey(false);
2537 #endif
2538 }
2539
2540 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2541 {
2542     if (m_waitingForKey == waitingForKey)
2543         return;
2544
2545     m_waitingForKey = waitingForKey;
2546     player()->waitingForKeyChanged();
2547 }
2548 #endif
2549
2550 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2551
2552 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2553 {
2554 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2555     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2556 #endif
2557
2558     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2559     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2560
2561         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2562         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2563             continue;
2564
2565         bool newCCTrack = true;
2566         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2567             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2568                 continue;
2569
2570             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2571             if (track->avPlayerItemTrack() == playerItemTrack) {
2572                 removedTextTracks.remove(i - 1);
2573                 newCCTrack = false;
2574                 break;
2575             }
2576         }
2577
2578         if (!newCCTrack)
2579             continue;
2580         
2581         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2582     }
2583
2584     processNewAndRemovedTextTracks(removedTextTracks);
2585 }
2586
2587 #endif
2588
2589 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2590 {
2591     if (!m_avAsset)
2592         return nil;
2593
2594     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2595         return nil;
2596
2597     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2598 }
2599
2600 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2601
2602 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2603 {
2604     if (!m_avAsset)
2605         return false;
2606
2607     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2608         return false;
2609
2610     return true;
2611 }
2612
2613 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2614 {
2615     if (!hasLoadedMediaSelectionGroups())
2616         return nil;
2617
2618     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2619 }
2620
2621 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2622 {
2623     if (!hasLoadedMediaSelectionGroups())
2624         return nil;
2625
2626     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2627 }
2628
2629 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2630 {
2631     if (!hasLoadedMediaSelectionGroups())
2632         return nil;
2633
2634     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2635 }
2636
2637 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2638 {
2639     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2640     if (!legibleGroup) {
2641         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2642         return;
2643     }
2644
2645     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2646     // but set the selected legible track to nil so text tracks will not be automatically configured.
2647     if (!m_textTracks.size())
2648         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2649
2650     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2651     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2652     for (AVMediaSelectionOptionType *option in legibleOptions) {
2653         bool newTrack = true;
2654         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2655             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2656                 continue;
2657             
2658             RetainPtr<AVMediaSelectionOptionType> currentOption;
2659 #if ENABLE(AVF_CAPTIONS)
2660             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2661                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2662                 currentOption = track->mediaSelectionOption();
2663             } else
2664 #endif
2665             {
2666                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2667                 currentOption = track->mediaSelectionOption();
2668             }
2669             
2670             if ([currentOption.get() isEqual:option]) {
2671                 removedTextTracks.remove(i - 1);
2672                 newTrack = false;
2673                 break;
2674             }
2675         }
2676         if (!newTrack)
2677             continue;
2678
2679 #if ENABLE(AVF_CAPTIONS)
2680         if ([option outOfBandSource]) {
2681             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2682             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2683             continue;
2684         }
2685 #endif
2686
2687         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2688     }
2689
2690     processNewAndRemovedTextTracks(removedTextTracks);
2691 }
2692
2693 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2694 {
2695     if (m_metadataTrack)
2696         return;
2697
2698     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2699     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2700     player()->addTextTrack(*m_metadataTrack);
2701 }
2702
2703 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2704 {
2705     ASSERT(time >= MediaTime::zeroTime());
2706
2707     if (!m_currentTextTrack)
2708         return;
2709
2710     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2711 }
2712
2713 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2714 {
2715     INFO_LOG(LOGIDENTIFIER);
2716
2717     if (!m_currentTextTrack)
2718         return;
2719     
2720     m_currentTextTrack->resetCueValues();
2721 }
2722
2723 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2724
2725 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2726 {
2727     if (m_currentTextTrack == track)
2728         return;
2729
2730     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2731
2732     m_currentTextTrack = track;
2733
2734     if (track) {
2735         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2736             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2737             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2738             ALLOW_DEPRECATED_DECLARATIONS_END
2739 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2740 #if ENABLE(AVF_CAPTIONS)
2741         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2742             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2743 #endif
2744         else
2745             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2746 #endif
2747     } else {
2748 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2749         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2750 #endif
2751         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2752         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2753         ALLOW_DEPRECATED_DECLARATIONS_END
2754     }
2755
2756 }
2757
2758 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2759 {
2760     if (!m_languageOfPrimaryAudioTrack.isNull())
2761         return m_languageOfPrimaryAudioTrack;
2762
2763     if (!m_avPlayerItem.get())
2764         return emptyString();
2765
2766 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2767     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2768     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2769     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2770     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2771     ALLOW_DEPRECATED_DECLARATIONS_END
2772     if (currentlySelectedAudibleOption) {
2773         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2774         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2775
2776         return m_languageOfPrimaryAudioTrack;
2777     }
2778 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2779
2780     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2781     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2782     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2783     if (!tracks || [tracks count] != 1) {
2784         m_languageOfPrimaryAudioTrack = emptyString();
2785         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2786         return m_languageOfPrimaryAudioTrack;
2787     }
2788
2789     AVAssetTrack *track = [tracks objectAtIndex:0];
2790     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2791
2792     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2793
2794     return m_languageOfPrimaryAudioTrack;
2795 }
2796
2797 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2798 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2799 {
2800     bool wirelessTarget = false;
2801
2802 #if !PLATFORM(IOS_FAMILY)
2803     if (m_playbackTarget) {
2804         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2805             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2806         else
2807             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2808     }
2809 #else
2810     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2811 #endif
2812
2813     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2814
2815     return wirelessTarget;
2816 }
2817
2818 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2819 {
2820     if (!m_avPlayer)
2821         return MediaPlayer::TargetTypeNone;
2822
2823 #if PLATFORM(IOS_FAMILY)
2824     if (!AVFoundationLibrary())
2825         return MediaPlayer::TargetTypeNone;
2826
2827     switch ([m_avPlayer externalPlaybackType]) {
2828     case AVPlayerExternalPlaybackTypeNone:
2829         return MediaPlayer::TargetTypeNone;
2830     case AVPlayerExternalPlaybackTypeAirPlay:
2831         return MediaPlayer::TargetTypeAirPlay;
2832     case AVPlayerExternalPlaybackTypeTVOut:
2833         return MediaPlayer::TargetTypeTVOut;
2834     }
2835
2836     ASSERT_NOT_REACHED();
2837     return MediaPlayer::TargetTypeNone;
2838
2839 #else
2840     return MediaPlayer::TargetTypeAirPlay;
2841 #endif
2842 }
2843     
2844 #if PLATFORM(IOS_FAMILY)
2845 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2846 {
2847 #if HAVE(CELESTIAL)
2848     if (!AVFoundationLibrary())
2849         return nil;
2850
2851     if ([getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2852         AVOutputContext *outputContext = [getAVOutputContextClass() sharedAudioPresentationOutputContext];
2853
2854         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2855             || ![outputContext supportsMultipleOutputDevices]
2856             || ![outputContext respondsToSelector:@selector(outputDevices)])
2857             return [outputContext deviceName];
2858
2859         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2860         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2861 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2862             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2863 ALLOW_DEPRECATED_DECLARATIONS_END
2864             [outputDeviceNames addObject:outputDeviceName.get()];
2865         }
2866
2867         return [outputDeviceNames componentsJoinedByString:@" + "];
2868     }
2869
2870     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2871         return nil;
2872
2873     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2874     if (!pickableRoutes.count)
2875         return nil;
2876
2877     NSString *displayName = nil;
2878     for (NSDictionary *pickableRoute in pickableRoutes) {
2879         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2880             continue;
2881
2882         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2883
2884         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2885         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2886             break;
2887
2888         // The route is a speaker or HDMI out, override the name to be the localized device model.
2889         NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2890
2891         // In cases where a route with that name already exists, prefix the name with the model.
2892         BOOL includeLocalizedDeviceModelName = NO;
2893         for (NSDictionary *otherRoute in pickableRoutes) {
2894             if (otherRoute == pickableRoute)
2895                 continue;
2896
2897             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2898                 includeLocalizedDeviceModelName = YES;
2899                 break;
2900             }
2901         }
2902
2903         if (includeLocalizedDeviceModelName)
2904             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2905         else
2906             displayName = localizedDeviceModel;
2907
2908         break;
2909     }
2910
2911     return displayName;
2912 #else
2913     UNUSED_PARAM(player);
2914     return nil;
2915 #endif
2916 }
2917 #endif
2918
2919 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2920 {
2921     if (!m_avPlayer)
2922         return emptyString();
2923
2924     String wirelessTargetName;
2925 #if !PLATFORM(IOS_FAMILY)
2926     if (m_playbackTarget)
2927         wirelessTargetName = m_playbackTarget->deviceName();
2928 #else
2929     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2930 #endif
2931
2932     return wirelessTargetName;
2933 }
2934
2935 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2936 {
2937     if (!m_avPlayer)
2938         return !m_allowsWirelessVideoPlayback;
2939
2940     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2941     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2942
2943     return !m_allowsWirelessVideoPlayback;
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2947 {
2948     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2949     m_allowsWirelessVideoPlayback = !disabled;
2950     if (!m_avPlayer)
2951         return;
2952
2953     setDelayCallbacks(true);
2954     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2955     setDelayCallbacks(false);
2956 }
2957
2958 #if !PLATFORM(IOS_FAMILY)
2959
2960 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2961 {
2962     m_playbackTarget = WTFMove(target);
2963
2964     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2965
2966     INFO_LOG(LOGIDENTIFIER);
2967
2968     if (!m_playbackTarget->hasActiveRoute())
2969         setShouldPlayToPlaybackTarget(false);
2970 }
2971
2972 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2973 {
2974     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2975         return;
2976
2977     m_shouldPlayToPlaybackTarget = shouldPlay;
2978
2979     if (!m_playbackTarget)
2980         return;
2981
2982     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2983
2984     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2985         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2986
2987         if (!m_avPlayer)
2988             return;
2989
2990         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2991         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2992             return;
2993
2994         setDelayCallbacks(true);
2995         m_avPlayer.get().outputContext = newContext;
2996         setDelayCallbacks(false);
2997
2998         return;
2999     }
3000
3001     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
3002
3003     setDelayCallbacks(true);
3004     auto weakThis = makeWeakPtr(*this);
3005     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
3006         if (!weakThis)
3007             return;
3008         weakThis->playbackTargetIsWirelessDidChange();
3009     }));
3010     setDelayCallbacks(false);
3011 }
3012
3013 #endif // !PLATFORM(IOS_FAMILY)
3014
3015 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
3016 {
3017 #if PLATFORM(IOS_FAMILY)
3018     if (!m_avPlayer)
3019         return;
3020
3021     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
3022         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
3023 #endif
3024 }
3025
3026 #endif
3027
3028 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3029 {
3030     m_cachedItemStatus = status;
3031
3032     updateStates();
3033 }
3034
3035 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3036 {
3037     m_pendingStatusChanges++;
3038 }
3039
3040 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3041 {
3042     m_cachedLikelyToKeepUp = likelyToKeepUp;
3043
3044     ASSERT(m_pendingStatusChanges);
3045     if (!--m_pendingStatusChanges)
3046         updateStates();
3047 }
3048
3049 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3050 {
3051     m_pendingStatusChanges++;
3052 }
3053
3054 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3055 {
3056     m_cachedBufferEmpty = bufferEmpty;
3057
3058     ASSERT(m_pendingStatusChanges);
3059     if (!--m_pendingStatusChanges)
3060         updateStates();
3061 }
3062
3063 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3064 {