0fb1308d9a8a1170694d2b3216946d4432ac06ee
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "URL.h"
63 #import "VideoFullscreenLayerManagerObjC.h"
64 #import "VideoTextureCopierCV.h"
65 #import "VideoTrackPrivateAVFObjC.h"
66 #import "WebCoreAVFResourceLoader.h"
67 #import "WebCoreCALayerExtras.h"
68 #import "WebCoreNSURLSession.h"
69 #import <JavaScriptCore/DataView.h>
70 #import <JavaScriptCore/JSCInlines.h>
71 #import <JavaScriptCore/TypedArrayInlines.h>
72 #import <JavaScriptCore/Uint16Array.h>
73 #import <JavaScriptCore/Uint32Array.h>
74 #import <JavaScriptCore/Uint8Array.h>
75 #import <functional>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #else
107 #import <Foundation/NSGeometry.h>
108 #import <QuartzCore/CoreImage.h>
109 #endif
110
111 #if USE(VIDEOTOOLBOX)
112 #import <CoreVideo/CoreVideo.h>
113 #import <VideoToolbox/VideoToolbox.h>
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117 #import "MediaRemoteSoftLink.h"
118
119 namespace std {
120 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
121     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
122 };
123 }
124
125 #if ENABLE(AVF_CAPTIONS)
126 // Note: This must be defined before our SOFT_LINK macros:
127 @class AVMediaSelectionOption;
128 @interface AVMediaSelectionOption (OutOfBandExtensions)
129 @property (nonatomic, readonly) NSString* outOfBandSource;
130 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145 typedef AVAssetCache AVAssetCacheType;
146
147 #pragma mark - Soft Linking
148
149 // Soft-linking headers must be included last since they #define functions, constants, etc.
150 #import <pal/cf/CoreMediaSoftLink.h>
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
153
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155
156 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
164
165 SOFT_LINK_CLASS(CoreImage, CIContext)
166 SOFT_LINK_CLASS(CoreImage, CIImage)
167
168 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString *)
169 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString *)
170 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString *)
171 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString *)
172 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeClosedCaption, NSString *)
173 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
174 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeAudio, NSString *)
175 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeMetadata, NSString *)
176 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
177 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
178 SOFT_LINK_CONSTANT(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
179 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
180 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
181 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
182 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
183 SOFT_LINK_CONSTANT(AVFoundation, AVStreamingKeyDeliveryContentKeyType, NSString *)
184
185 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
186 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
187
188 #define AVPlayer initAVPlayer()
189 #define AVPlayerItem initAVPlayerItem()
190 #define AVPlayerLayer initAVPlayerLayer()
191 #define AVURLAsset initAVURLAsset()
192 #define AVAssetImageGenerator initAVAssetImageGenerator()
193 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
194 #define AVMetadataItem initAVMetadataItem()
195 #define AVAssetCache initAVAssetCache()
196
197 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
198 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
199 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
200 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
201 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
202 #define AVMediaTypeVideo getAVMediaTypeVideo()
203 #define AVMediaTypeAudio getAVMediaTypeAudio()
204 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
205 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
206 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
207 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
208 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
209 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
210 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
211 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
212 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
213 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
214 #define AVStreamingKeyDeliveryContentKeyType getAVStreamingKeyDeliveryContentKeyType()
215
216 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
217
218 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
219 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
220
221 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
222 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
224 SOFT_LINK_CLASS(AVFoundation, AVOutputContext)
225
226 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString *)
227 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeSubtitle, NSString *)
228 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
229 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
230
231 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
232 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
233 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
234 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
235 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
236 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
237 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
238
239 #endif
240
241 #if ENABLE(AVF_CAPTIONS)
242
243 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetCacheKey, NSString *)
244 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString *)
245 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
246 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString *)
247 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString *)
248 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString *)
249 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString *)
250 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString *)
251 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString *)
252 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString *)
253 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString *)
254 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString *)
255
256 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
257 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
258 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
259 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
260 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
261 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
262 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
263 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
264 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
265 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
266 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
267 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
268
269 #endif
270
271 #if ENABLE(DATACUE_VALUE)
272
273 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString *)
274 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString *)
275 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString *)
276 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceiTunes, NSString *)
277 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceID3, NSString *)
278
279 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
280 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
281 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
282 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
283 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
284
285 #endif
286
287 #if PLATFORM(IOS)
288
289 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
290 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
291 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetHTTPCookiesKey, NSString *)
292 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
293
294 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
295 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
296 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
297 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
298
299 #endif
300
301 SOFT_LINK_FRAMEWORK(MediaToolbox)
302 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
303
304 #if PLATFORM(IOS)
305
306 #if HAVE(CELESTIAL)
307
308 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
309 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
310 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
311 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
312 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
313 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
314 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
315
316 #endif // HAVE(CELESTIAL)
317
318 SOFT_LINK_FRAMEWORK(UIKit)
319 SOFT_LINK_CLASS(UIKit, UIDevice)
320 #define UIDevice getUIDeviceClass()
321
322 #endif // PLATFORM(IOS)
323
324 using namespace WebCore;
325
326 enum MediaPlayerAVFoundationObservationContext {
327     MediaPlayerAVFoundationObservationContextPlayerItem,
328     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
329     MediaPlayerAVFoundationObservationContextPlayer,
330     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
331 };
332
333 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
334 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
335 #else
336 @interface WebCoreAVFMovieObserver : NSObject
337 #endif
338 {
339     MediaPlayerPrivateAVFoundationObjC* m_callback;
340     int m_delayCallbacks;
341 }
342 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 -(void)disconnect;
344 -(void)metadataLoaded;
345 -(void)didEnd:(NSNotification *)notification;
346 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
347 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
348 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
349 - (void)outputSequenceWasFlushed:(id)output;
350 #endif
351 @end
352
353 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
354 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
355     MediaPlayerPrivateAVFoundationObjC* m_callback;
356 }
357 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
358 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
359 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
360 @end
361 #endif
362
363 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
364 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
365     MediaPlayerPrivateAVFoundationObjC *m_callback;
366     dispatch_semaphore_t m_semaphore;
367 }
368 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
369 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
370 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
371 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
372 @end
373 #endif
374
375 namespace WebCore {
376 using namespace PAL;
377
378 static NSArray *assetMetadataKeyNames();
379 static NSArray *itemKVOProperties();
380 static NSArray *assetTrackMetadataKeyNames();
381 static NSArray *playerKVOProperties();
382 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
383
384 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
385 static dispatch_queue_t globalLoaderDelegateQueue()
386 {
387     static dispatch_queue_t globalQueue;
388     static dispatch_once_t onceToken;
389     dispatch_once(&onceToken, ^{
390         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
391     });
392     return globalQueue;
393 }
394 #endif
395
396 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
397 static dispatch_queue_t globalPullDelegateQueue()
398 {
399     static dispatch_queue_t globalQueue;
400     static dispatch_once_t onceToken;
401     dispatch_once(&onceToken, ^{
402         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
403     });
404     return globalQueue;
405 }
406 #endif
407
408 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
409 {
410     if (!isAvailable())
411         return;
412
413     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
414             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
415     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
416 }
417
418 static AVAssetCacheType *assetCacheForPath(const String& path)
419 {
420     NSURL *assetCacheURL;
421     
422     if (path.isEmpty())
423         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
424     else
425         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
426
427     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
428 }
429
430 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
431 {
432     HashSet<RefPtr<SecurityOrigin>> origins;
433     for (NSString *key in [assetCacheForPath(path) allKeys]) {
434         URL keyAsURL = URL(URL(), key);
435         if (keyAsURL.isValid())
436             origins.add(SecurityOrigin::create(keyAsURL));
437     }
438     return origins;
439 }
440
441 static WallTime toSystemClockTime(NSDate *date)
442 {
443     ASSERT(date);
444     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
445 }
446
447 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
448 {
449     AVAssetCacheType* assetCache = assetCacheForPath(path);
450     
451     for (NSString *key in [assetCache allKeys]) {
452         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
453             [assetCache removeEntryForKey:key];
454     }
455
456     NSFileManager *fileManager = [NSFileManager defaultManager];
457     NSURL *baseURL = [assetCache URL];
458
459     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
460         [fileManager removeItemAtURL:baseURL error:nil];
461         return;
462     }
463     
464     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
465     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
466         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
467         errorHandler:nil];
468     
469     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
470     for (NSURL *fileURL : enumerator) {
471         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
472     
473         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
474             continue;
475         
476         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
477             continue;
478         
479         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
480             continue;
481         
482         [urlsToDelete addObject:fileURL];
483     }
484     
485     for (NSURL *fileURL in urlsToDelete.get())
486         [fileManager removeItemAtURL:fileURL error:nil];
487 }
488
489 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
490 {
491     AVAssetCacheType* assetCache = assetCacheForPath(path);
492     for (NSString *key in [assetCache allKeys]) {
493         URL keyAsURL = URL(URL(), key);
494         if (keyAsURL.isValid()) {
495             if (origins.contains(SecurityOrigin::create(keyAsURL)))
496                 [assetCache removeEntryForKey:key];
497         }
498     }
499 }
500
501 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
502     : MediaPlayerPrivateAVFoundation(player)
503     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
504     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
505     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
506     , m_videoFrameHasDrawn(false)
507     , m_haveCheckedPlayability(false)
508 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
509     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
510     , m_videoOutputSemaphore(nullptr)
511 #endif
512 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
513     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
514 #endif
515     , m_currentTextTrack(0)
516     , m_cachedRate(0)
517     , m_cachedTotalBytes(0)
518     , m_pendingStatusChanges(0)
519     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
520     , m_cachedLikelyToKeepUp(false)
521     , m_cachedBufferEmpty(false)
522     , m_cachedBufferFull(false)
523     , m_cachedHasEnabledAudio(false)
524     , m_shouldBufferData(true)
525     , m_cachedIsReadyForDisplay(false)
526     , m_haveBeenAskedToCreateLayer(false)
527 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
528     , m_allowsWirelessVideoPlayback(true)
529 #endif
530 {
531 }
532
533 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
534 {
535 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
536     [m_loaderDelegate.get() setCallback:0];
537     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
538
539     for (auto& pair : m_resourceLoaderMap)
540         pair.value->invalidate();
541 #endif
542 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
543     [m_videoOutputDelegate setCallback:0];
544     [m_videoOutput setDelegate:nil queue:0];
545     if (m_videoOutputSemaphore)
546         dispatch_release(m_videoOutputSemaphore);
547 #endif
548
549     if (m_videoLayer)
550         destroyVideoLayer();
551
552     cancelLoad();
553 }
554
555 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
556 {
557     INFO_LOG(LOGIDENTIFIER);
558     tearDownVideoRendering();
559
560     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
561     [m_objcObserver.get() disconnect];
562
563     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
564     setIgnoreLoadStateChanges(true);
565     if (m_avAsset) {
566         [m_avAsset.get() cancelLoading];
567         m_avAsset = nil;
568     }
569
570     clearTextTracks();
571
572 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
573     if (m_legibleOutput) {
574         if (m_avPlayerItem)
575             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
576         m_legibleOutput = nil;
577     }
578 #endif
579
580     if (m_avPlayerItem) {
581         for (NSString *keyName in itemKVOProperties())
582             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
583         
584         m_avPlayerItem = nil;
585     }
586     if (m_avPlayer) {
587         if (m_timeObserver)
588             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
589         m_timeObserver = nil;
590
591         for (NSString *keyName in playerKVOProperties())
592             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
593
594         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
595         m_avPlayer = nil;
596     }
597
598     // Reset cached properties
599     m_pendingStatusChanges = 0;
600     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
601     m_cachedSeekableRanges = nullptr;
602     m_cachedLoadedRanges = nullptr;
603     m_cachedHasEnabledAudio = false;
604     m_cachedPresentationSize = FloatSize();
605     m_cachedDuration = MediaTime::zeroTime();
606
607     for (AVPlayerItemTrack *track in m_cachedTracks.get())
608         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
609     m_cachedTracks = nullptr;
610
611 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
612     if (m_provider) {
613         m_provider->setPlayerItem(nullptr);
614         m_provider->setAudioTrack(nullptr);
615     }
616 #endif
617
618     setIgnoreLoadStateChanges(false);
619 }
620
621 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
622 {
623     return m_haveBeenAskedToCreateLayer;
624 }
625
626 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
627 {
628 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
629     if (m_videoOutput)
630         return true;
631 #endif
632     return m_imageGenerator;
633 }
634
635 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
636 {
637 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
638     createVideoOutput();
639 #else
640     createImageGenerator();
641 #endif
642 }
643
644 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
645 {
646     using namespace PAL;
647     INFO_LOG(LOGIDENTIFIER);
648
649     if (!m_avAsset || m_imageGenerator)
650         return;
651
652     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
653
654     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
655     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
656     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
657     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
658 }
659
660 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
661 {
662 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
663     destroyVideoOutput();
664 #endif
665     destroyImageGenerator();
666 }
667
668 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
669 {
670     if (!m_imageGenerator)
671         return;
672
673     INFO_LOG(LOGIDENTIFIER);
674
675     m_imageGenerator = 0;
676 }
677
678 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
679 {
680     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
681         return;
682
683     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
684         if (!weakThis)
685             return;
686
687         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
688             return;
689         m_haveBeenAskedToCreateLayer = true;
690
691         if (!m_videoLayer)
692             createAVPlayerLayer();
693
694 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
695         if (!m_videoOutput)
696             createVideoOutput();
697 #endif
698
699         player()->client().mediaPlayerRenderingModeChanged(player());
700     });
701 }
702
703 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
704 {
705     if (!m_avPlayer)
706         return;
707
708     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
709     [m_videoLayer setPlayer:m_avPlayer.get()];
710
711 #ifndef NDEBUG
712     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
713 #endif
714     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
715     updateVideoLayerGravity();
716     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
717     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
718     INFO_LOG(LOGIDENTIFIER);
719
720     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
721
722 #if PLATFORM(IOS) && !PLATFORM(WATCHOS)
723     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
724         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
725 #endif
726 }
727
728 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
729 {
730     if (!m_videoLayer)
731         return;
732
733     INFO_LOG(LOGIDENTIFIER);
734
735     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
736     [m_videoLayer setPlayer:nil];
737     m_videoFullscreenLayerManager->didDestroyVideoLayer();
738
739     m_videoLayer = nil;
740 }
741
742 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
743 {
744     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
745     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
746
747     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
748     if (!date)
749         return MediaTime::invalidTime();
750
751     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
752
753     // Rounding due to second offset error when subtracting.
754     return MediaTime::createWithDouble(round(date - currentTime));
755 }
756
757 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
758 {
759     if (currentRenderingMode() == MediaRenderingToLayer)
760         return m_cachedIsReadyForDisplay;
761
762 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
763     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
764         return true;
765 #endif
766
767     return m_videoFrameHasDrawn;
768 }
769
770 #if ENABLE(AVF_CAPTIONS)
771 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
772 {
773     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
774     if (manualSelectionMode)
775         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
776
777     // FIXME: Match these to correct types:
778     if (kind == PlatformTextTrack::Caption)
779         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
780
781     if (kind == PlatformTextTrack::Subtitle)
782         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
783
784     if (kind == PlatformTextTrack::Description)
785         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
786
787     if (kind == PlatformTextTrack::Forced)
788         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
789
790     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
791 }
792     
793 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
794 {
795     trackModeChanged();
796 }
797     
798 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
799 {
800     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
801     
802     for (auto& textTrack : m_textTracks) {
803         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
804             continue;
805         
806         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
807         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
808         
809         for (auto& track : outOfBandTrackSources) {
810             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
811             
812             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
813                 continue;
814             
815             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
816             if (track->mode() == PlatformTextTrack::Hidden)
817                 mode = InbandTextTrackPrivate::Hidden;
818             else if (track->mode() == PlatformTextTrack::Disabled)
819                 mode = InbandTextTrackPrivate::Disabled;
820             else if (track->mode() == PlatformTextTrack::Showing)
821                 mode = InbandTextTrackPrivate::Showing;
822             
823             textTrack->setMode(mode);
824             break;
825         }
826     }
827 }
828 #endif
829
830
831 static NSURL *canonicalURL(const URL& url)
832 {
833     NSURL *cocoaURL = url;
834     if (url.isEmpty())
835         return cocoaURL;
836
837     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
838     if (!request)
839         return cocoaURL;
840
841     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
842     if (!canonicalRequest)
843         return cocoaURL;
844
845     return [canonicalRequest URL];
846 }
847
848 #if PLATFORM(IOS)
849 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
850 {
851     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
852     [properties setDictionary:@{
853         NSHTTPCookieName: cookie.name,
854         NSHTTPCookieValue: cookie.value,
855         NSHTTPCookieDomain: cookie.domain,
856         NSHTTPCookiePath: cookie.path,
857         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
858     }];
859     if (cookie.secure)
860         [properties setObject:@YES forKey:NSHTTPCookieSecure];
861     if (cookie.session)
862         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
863
864     return [NSHTTPCookie cookieWithProperties:properties.get()];
865 }
866 #endif
867
868 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
869 {
870     if (m_avAsset)
871         return;
872
873     INFO_LOG(LOGIDENTIFIER);
874
875     setDelayCallbacks(true);
876
877     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
878
879     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
880
881     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
882
883     String referrer = player()->referrer();
884     if (!referrer.isEmpty())
885         [headerFields.get() setObject:referrer forKey:@"Referer"];
886
887     String userAgent = player()->userAgent();
888     if (!userAgent.isEmpty())
889         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
890
891     if ([headerFields.get() count])
892         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
893
894     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
895         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
896
897     if (canLoadAVURLAssetUseClientURLLoadingExclusively())
898         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
899 #if PLATFORM(IOS)
900     else if (canLoadAVURLAssetRequiresCustomURLLoadingKey())
901         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
902     // FIXME: rdar://problem/20354688
903     String identifier = player()->sourceApplicationIdentifier();
904     if (!identifier.isEmpty() && canLoadAVURLAssetClientBundleIdentifierKey())
905         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
906 #endif
907
908     auto type = player()->contentMIMEType();
909     if (canLoadAVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
910         auto codecs = player()->contentTypeCodecs();
911         if (!codecs.isEmpty()) {
912             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
913             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
914         } else
915             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
916     }
917
918 #if ENABLE(AVF_CAPTIONS)
919     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
920     if (!outOfBandTrackSources.isEmpty()) {
921         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
922         for (auto& trackSource : outOfBandTrackSources) {
923             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
924             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
925             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
926             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
927             [outOfBandTracks.get() addObject:@{
928                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
929                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
930                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
931                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
932                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
933                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
934             }];
935         }
936
937         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
938     }
939 #endif
940
941 #if PLATFORM(IOS)
942     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
943     if (!networkInterfaceName.isEmpty())
944         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
945 #endif
946
947 #if PLATFORM(IOS)
948     Vector<Cookie> cookies;
949     if (player()->getRawCookies(url, cookies)) {
950         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
951         for (auto& cookie : cookies)
952             [nsCookies addObject:toNSHTTPCookie(cookie)];
953
954         if (canLoadAVURLAssetHTTPCookiesKey())
955             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
956     }
957 #endif
958
959     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
960     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
961     
962     if (usePersistentCache)
963         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
964
965     NSURL *cocoaURL = canonicalURL(url);
966     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
967
968 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
969     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
970     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
971
972 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
973     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
974         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
975         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
976         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
977         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
978         if (mediaResourceLoader)
979             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
980     }
981 #endif
982
983 #endif
984
985     m_haveCheckedPlayability = false;
986
987     setDelayCallbacks(false);
988 }
989
990 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
991 {
992     if (!m_avPlayer)
993         return;
994
995     if (pthread_main_np()) {
996         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
997         return;
998     }
999
1000     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1001     RetainPtr<AVPlayerItemType> strongItem = item;
1002     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1003         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1004     });
1005 }
1006
1007 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1008 {
1009     if (m_avPlayer)
1010         return;
1011
1012     INFO_LOG(LOGIDENTIFIER);
1013
1014     setDelayCallbacks(true);
1015
1016     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1017     for (NSString *keyName in playerKVOProperties())
1018         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1019
1020 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1021     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1022 #endif
1023
1024 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1025     updateDisableExternalPlayback();
1026     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1027 #endif
1028
1029 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1030     if (m_shouldPlayToPlaybackTarget) {
1031         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1032         m_shouldPlayToPlaybackTarget = false;
1033         setShouldPlayToPlaybackTarget(true);
1034     }
1035 #endif
1036
1037 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR) && !PLATFORM(IOSMAC)
1038     setShouldDisableSleep(player()->shouldDisableSleep());
1039 #endif
1040
1041     if (m_muted) {
1042         // Clear m_muted so setMuted doesn't return without doing anything.
1043         m_muted = false;
1044         [m_avPlayer.get() setMuted:m_muted];
1045     }
1046
1047     if (player()->client().mediaPlayerIsVideo())
1048         createAVPlayerLayer();
1049
1050     if (m_avPlayerItem)
1051         setAVPlayerItem(m_avPlayerItem.get());
1052
1053     setDelayCallbacks(false);
1054 }
1055
1056 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1057 {
1058     if (m_avPlayerItem)
1059         return;
1060
1061     INFO_LOG(LOGIDENTIFIER);
1062
1063     setDelayCallbacks(true);
1064
1065     // Create the player item so we can load media data. 
1066     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1067
1068     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1069
1070     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1071     for (NSString *keyName in itemKVOProperties())
1072         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1073
1074     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1075
1076     if (m_avPlayer)
1077         setAVPlayerItem(m_avPlayerItem.get());
1078
1079 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1080     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1081
1082     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1083     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1084     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1085
1086     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1087     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1088     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1089     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1090 #endif
1091
1092 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1093     if (m_provider) {
1094         m_provider->setPlayerItem(m_avPlayerItem.get());
1095         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1096     }
1097 #endif
1098
1099 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1100     createVideoOutput();
1101 #endif
1102
1103     setDelayCallbacks(false);
1104 }
1105
1106 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1107 {
1108     if (m_haveCheckedPlayability)
1109         return;
1110     m_haveCheckedPlayability = true;
1111
1112     INFO_LOG(LOGIDENTIFIER);
1113     auto weakThis = makeWeakPtr(*this);
1114
1115     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1116         callOnMainThread([weakThis] {
1117             if (weakThis)
1118                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1119         });
1120     }];
1121 }
1122
1123 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1124 {
1125     INFO_LOG(LOGIDENTIFIER);
1126
1127     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1128     dispatch_group_enter(metadataLoadingGroup.get());
1129     auto weakThis = makeWeakPtr(*this);
1130     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1131
1132         callOnMainThread([weakThis, metadataLoadingGroup] {
1133             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1134                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1135                     dispatch_group_enter(metadataLoadingGroup.get());
1136                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1137                         dispatch_group_leave(metadataLoadingGroup.get());
1138                     }];
1139                 }
1140             }
1141             dispatch_group_leave(metadataLoadingGroup.get());
1142         });
1143     }];
1144
1145     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1146         callOnMainThread([weakThis] {
1147             if (weakThis)
1148                 [weakThis->m_objcObserver.get() metadataLoaded];
1149         });
1150     });
1151 }
1152
1153 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1154 {
1155     if (!m_avPlayerItem)
1156         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1157
1158     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1159         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1160     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1161         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1162     if (m_cachedLikelyToKeepUp)
1163         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1164     if (m_cachedBufferFull)
1165         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1166     if (m_cachedBufferEmpty)
1167         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1168
1169     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1170 }
1171
1172 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1173 {
1174     return m_videoFullscreenLayerManager->videoInlineLayer();
1175 }
1176
1177 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1178 {
1179 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1180     updateLastImage(UpdateType::UpdateSynchronously);
1181     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1182 #endif
1183 }
1184
1185 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1186 {
1187 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1188     updateLastImage(UpdateType::UpdateSynchronously);
1189     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1190 #else
1191     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1192 #endif
1193     updateDisableExternalPlayback();
1194 }
1195
1196 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1197 {
1198     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1199 }
1200
1201 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1202 {
1203     m_videoFullscreenGravity = gravity;
1204
1205     if (!m_videoLayer)
1206         return;
1207
1208     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1209     if (gravity == MediaPlayer::VideoGravityResize)
1210         videoGravity = AVLayerVideoGravityResize;
1211     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1212         videoGravity = AVLayerVideoGravityResizeAspect;
1213     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1214         videoGravity = AVLayerVideoGravityResizeAspectFill;
1215     else
1216         ASSERT_NOT_REACHED();
1217     
1218     if ([m_videoLayer videoGravity] == videoGravity)
1219         return;
1220
1221     [m_videoLayer setVideoGravity:videoGravity];
1222     syncTextTrackBounds();
1223 }
1224
1225 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1226 {
1227 #if PLATFORM(IOS) && !PLATFORM(WATCHOS)
1228     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1229         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1230     updateDisableExternalPlayback();
1231 #else
1232     UNUSED_PARAM(mode);
1233 #endif
1234 }
1235
1236 #if PLATFORM(IOS)
1237 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1238 {
1239     if (m_currentMetaData)
1240         return m_currentMetaData.get();
1241     return nil;
1242 }
1243
1244 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1245 {
1246     if (!m_avPlayerItem)
1247         return emptyString();
1248     
1249     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1250     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1251
1252     return logString.get();
1253 }
1254
1255 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1256 {
1257     if (!m_avPlayerItem)
1258         return emptyString();
1259
1260     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1261     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1262
1263     return logString.get();
1264 }
1265 #endif
1266
1267 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1268 {
1269     [CATransaction begin];
1270     [CATransaction setDisableActions:YES];    
1271     if (m_videoLayer)
1272         [m_videoLayer.get() setHidden:!isVisible];
1273     [CATransaction commit];
1274 }
1275     
1276 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1277 {
1278     INFO_LOG(LOGIDENTIFIER);
1279     if (!metaDataAvailable())
1280         return;
1281
1282     setDelayCallbacks(true);
1283     m_cachedRate = requestedRate();
1284     [m_avPlayer.get() setRate:requestedRate()];
1285     setDelayCallbacks(false);
1286 }
1287
1288 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1289 {
1290     INFO_LOG(LOGIDENTIFIER);
1291     if (!metaDataAvailable())
1292         return;
1293
1294     setDelayCallbacks(true);
1295     m_cachedRate = 0;
1296     [m_avPlayer.get() setRate:0];
1297     setDelayCallbacks(false);
1298 }
1299
1300 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1301 {
1302     // Do not ask the asset for duration before it has been loaded or it will fetch the
1303     // answer synchronously.
1304     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1305         return MediaTime::invalidTime();
1306     
1307     CMTime cmDuration;
1308     
1309     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1310     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1311         cmDuration = [m_avPlayerItem.get() duration];
1312     else
1313         cmDuration = [m_avAsset.get() duration];
1314
1315     if (CMTIME_IS_NUMERIC(cmDuration))
1316         return PAL::toMediaTime(cmDuration);
1317
1318     if (CMTIME_IS_INDEFINITE(cmDuration))
1319         return MediaTime::positiveInfiniteTime();
1320
1321     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1322     return MediaTime::invalidTime();
1323 }
1324
1325 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1326 {
1327     if (!metaDataAvailable() || !m_avPlayerItem)
1328         return MediaTime::zeroTime();
1329
1330     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1331     if (CMTIME_IS_NUMERIC(itemTime))
1332         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1333
1334     return MediaTime::zeroTime();
1335 }
1336
1337 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1338 {
1339     // setCurrentTime generates several event callbacks, update afterwards.
1340     setDelayCallbacks(true);
1341
1342     if (m_metadataTrack)
1343         m_metadataTrack->flushPartialCues();
1344
1345     CMTime cmTime = PAL::toCMTime(time);
1346     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1347     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1348
1349     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1350     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1351         cmBefore = kCMTimeZero;
1352     
1353     auto weakThis = makeWeakPtr(*this);
1354
1355     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1356         callOnMainThread([weakThis, finished] {
1357             auto _this = weakThis.get();
1358             if (!_this)
1359                 return;
1360
1361             _this->seekCompleted(finished);
1362         });
1363     }];
1364
1365     setDelayCallbacks(false);
1366 }
1367
1368 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1369 {
1370 #if PLATFORM(IOS)
1371     UNUSED_PARAM(volume);
1372     return;
1373 #else
1374     if (!m_avPlayer)
1375         return;
1376
1377     [m_avPlayer.get() setVolume:volume];
1378 #endif
1379 }
1380
1381 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1382 {
1383     if (m_muted == muted)
1384         return;
1385
1386     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1387
1388     m_muted = muted;
1389
1390     if (!m_avPlayer)
1391         return;
1392
1393     [m_avPlayer.get() setMuted:m_muted];
1394 }
1395
1396 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1397 {
1398     UNUSED_PARAM(closedCaptionsVisible);
1399
1400     if (!metaDataAvailable())
1401         return;
1402
1403     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1404 }
1405
1406 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1407 {
1408     setDelayCallbacks(true);
1409     m_cachedRate = rate;
1410     [m_avPlayer.get() setRate:rate];
1411     setDelayCallbacks(false);
1412 }
1413
1414 double MediaPlayerPrivateAVFoundationObjC::rate() const
1415 {
1416     if (!metaDataAvailable())
1417         return 0;
1418
1419     return m_cachedRate;
1420 }
1421
1422 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1423 {
1424 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1425     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1426 #else
1427     return 0;
1428 #endif
1429 }
1430
1431 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1432 {
1433 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1434     return [m_avPlayerItem liveUpdateInterval];
1435 #else
1436     return 0;
1437 #endif
1438 }
1439
1440 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1441 {
1442     if (m_avPlayerItem)
1443         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1444 }
1445
1446 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1447 {
1448     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1449
1450     if (!m_avPlayerItem)
1451         return timeRanges;
1452
1453     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1454         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1455         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1456             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1457     }
1458     return timeRanges;
1459 }
1460
1461 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1462 {
1463     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1464         return MediaTime::zeroTime();
1465
1466     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1467     bool hasValidRange = false;
1468     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1469         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1470         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1471             continue;
1472
1473         hasValidRange = true;
1474         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1475         if (minTimeSeekable > startOfRange)
1476             minTimeSeekable = startOfRange;
1477     }
1478     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1479 }
1480
1481 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1482 {
1483     if (!m_cachedSeekableRanges)
1484         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1485
1486     MediaTime maxTimeSeekable;
1487     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1488         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1489         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1490             continue;
1491         
1492         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1493         if (maxTimeSeekable < endOfRange)
1494             maxTimeSeekable = endOfRange;
1495     }
1496     return maxTimeSeekable;
1497 }
1498
1499 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1500 {
1501     if (!m_cachedLoadedRanges)
1502         return MediaTime::zeroTime();
1503
1504     MediaTime maxTimeLoaded;
1505     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1506         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1507         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1508             continue;
1509         
1510         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1511         if (maxTimeLoaded < endOfRange)
1512             maxTimeLoaded = endOfRange;
1513     }
1514
1515     return maxTimeLoaded;   
1516 }
1517
1518 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1519 {
1520     if (!metaDataAvailable())
1521         return 0;
1522
1523     if (m_cachedTotalBytes)
1524         return m_cachedTotalBytes;
1525
1526     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1527         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1528
1529     return m_cachedTotalBytes;
1530 }
1531
1532 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1533 {
1534     m_avAsset = asset;
1535 }
1536
1537 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1538 {
1539     if (!m_avAsset)
1540         return MediaPlayerAVAssetStatusDoesNotExist;
1541
1542     for (NSString *keyName in assetMetadataKeyNames()) {
1543         NSError *error = nil;
1544         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1545
1546         if (error)
1547             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1548
1549         if (keyStatus < AVKeyValueStatusLoaded)
1550             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1551         
1552         if (keyStatus == AVKeyValueStatusFailed)
1553             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1554
1555         if (keyStatus == AVKeyValueStatusCancelled)
1556             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1557     }
1558
1559     if (!player()->shouldCheckHardwareSupport())
1560         m_tracksArePlayable = true;
1561
1562     if (!m_tracksArePlayable) {
1563         m_tracksArePlayable = true;
1564         for (AVAssetTrack *track in [m_avAsset tracks]) {
1565             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1566                 m_tracksArePlayable = false;
1567                 break;
1568             }
1569         }
1570     }
1571
1572     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1573         return MediaPlayerAVAssetStatusPlayable;
1574
1575     return MediaPlayerAVAssetStatusLoaded;
1576 }
1577
1578 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1579 {
1580     if (!m_avAsset)
1581         return 0;
1582
1583     NSError *error = nil;
1584     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1585     return [error code];
1586 }
1587
1588 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1589 {
1590     if (!metaDataAvailable() || context.paintingDisabled())
1591         return;
1592
1593     setDelayCallbacks(true);
1594     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1595
1596 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1597     if (videoOutputHasAvailableFrame())
1598         paintWithVideoOutput(context, rect);
1599     else
1600 #endif
1601         paintWithImageGenerator(context, rect);
1602
1603     END_BLOCK_OBJC_EXCEPTIONS;
1604     setDelayCallbacks(false);
1605
1606     m_videoFrameHasDrawn = true;
1607 }
1608
1609 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1610 {
1611     if (!metaDataAvailable() || context.paintingDisabled())
1612         return;
1613
1614     // We can ignore the request if we are already rendering to a layer.
1615     if (currentRenderingMode() == MediaRenderingToLayer)
1616         return;
1617
1618     // paint() is best effort, so only paint if we already have an image generator or video output available.
1619     if (!hasContextRenderer())
1620         return;
1621
1622     paintCurrentFrameInContext(context, rect);
1623 }
1624
1625 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1626 {
1627     INFO_LOG(LOGIDENTIFIER);
1628
1629     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1630     if (image) {
1631         GraphicsContextStateSaver stateSaver(context);
1632         context.translate(rect.x(), rect.y() + rect.height());
1633         context.scale(FloatSize(1.0f, -1.0f));
1634         context.setImageInterpolationQuality(InterpolationLow);
1635         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1636         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1637     }
1638 }
1639
1640 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1641 {
1642     if (!m_imageGenerator)
1643         createImageGenerator();
1644     ASSERT(m_imageGenerator);
1645
1646 #if !RELEASE_LOG_DISABLED
1647     MonotonicTime start = MonotonicTime::now();
1648 #endif
1649
1650     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1651     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1652     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1653
1654 #if !RELEASE_LOG_DISABLED
1655     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1656 #endif
1657
1658     return image;
1659 }
1660
1661 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1662 {
1663     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1664
1665
1666 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1667 static bool keySystemIsSupported(const String& keySystem)
1668 {
1669     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1670         return true;
1671     return false;
1672 }
1673 #endif
1674
1675 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1676 {
1677 #if ENABLE(MEDIA_SOURCE)
1678     if (parameters.isMediaSource)
1679         return MediaPlayer::IsNotSupported;
1680 #endif
1681 #if ENABLE(MEDIA_STREAM)
1682     if (parameters.isMediaStream)
1683         return MediaPlayer::IsNotSupported;
1684 #endif
1685
1686     auto containerType = parameters.type.containerType();
1687     if (isUnsupportedMIMEType(containerType))
1688         return MediaPlayer::IsNotSupported;
1689
1690     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(containerType))
1691         return MediaPlayer::IsNotSupported;
1692
1693     // The spec says:
1694     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1695     if (parameters.type.codecs().isEmpty())
1696         return MediaPlayer::MayBeSupported;
1697
1698     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1699         return MediaPlayer::IsNotSupported;
1700
1701     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1702     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1703 }
1704
1705 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1706 {
1707 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1708     if (!keySystem.isEmpty()) {
1709         // "Clear Key" is only supported with HLS:
1710         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1711             return MediaPlayer::IsNotSupported;
1712
1713         if (!keySystemIsSupported(keySystem))
1714             return false;
1715
1716         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1717             return false;
1718
1719         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType))
1720             return false;
1721
1722         return true;
1723     }
1724 #else
1725     UNUSED_PARAM(keySystem);
1726     UNUSED_PARAM(mimeType);
1727 #endif
1728     return false;
1729 }
1730
1731 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1732 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1733 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1734 {
1735     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1736         [infoRequest setContentLength:keyData->byteLength()];
1737         [infoRequest setByteRangeAccessSupported:YES];
1738     }
1739
1740     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1741         long long start = [dataRequest currentOffset];
1742         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1743
1744         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1745             [request finishLoadingWithError:nil];
1746             return;
1747         }
1748
1749         ASSERT(start <= std::numeric_limits<int>::max());
1750         ASSERT(end <= std::numeric_limits<int>::max());
1751         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1752         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1753         [dataRequest respondWithData:nsData.get()];
1754     }
1755
1756     [request finishLoading];
1757 }
1758 #endif
1759
1760 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1761 {
1762     String scheme = [[[avRequest request] URL] scheme];
1763     String keyURI = [[[avRequest request] URL] absoluteString];
1764
1765 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1766     if (scheme == "skd") {
1767 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1768         // Create an initData with the following layout:
1769         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1770         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1771         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1772         unsigned byteLength = initDataBuffer->byteLength();
1773         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1774         initDataView->set<uint32_t>(0, keyURISize, true);
1775
1776         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1777         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1778
1779         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1780         if (!player()->keyNeeded(initData.get()))
1781             return false;
1782 #endif
1783         m_keyURIToRequestMap.set(keyURI, avRequest);
1784 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1785         if (m_cdmInstance)
1786             return false;
1787
1788         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1789         auto keyURIBuffer = SharedBuffer::create(keyURIData.get());
1790         player()->initializationDataEncountered("skd"_s, keyURIBuffer->tryCreateArrayBuffer());
1791 #endif
1792         return true;
1793     }
1794
1795 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1796     if (scheme == "clearkey") {
1797         String keyID = [[[avRequest request] URL] resourceSpecifier];
1798         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1799
1800         auto initData = Uint8Array::create(encodedKeyId.size());
1801         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1802
1803         auto keyData = player()->cachedKeyForKeyId(keyID);
1804         if (keyData) {
1805             fulfillRequestWithKeyData(avRequest, keyData.get());
1806             return false;
1807         }
1808
1809         if (!player()->keyNeeded(initData.get()))
1810             return false;
1811
1812         m_keyURIToRequestMap.set(keyID, avRequest);
1813         return true;
1814     }
1815 #endif
1816 #endif
1817
1818     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1819     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader);
1820     resourceLoader->startLoading();
1821     return true;
1822 }
1823
1824 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1825 {
1826     String scheme = [[[avRequest request] URL] scheme];
1827
1828     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1829
1830     if (resourceLoader)
1831         resourceLoader->stopLoading();
1832 }
1833
1834 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1835 {
1836     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1837 }
1838 #endif
1839
1840 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1841 {
1842     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1843 }
1844
1845 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1846 {
1847     if (!metaDataAvailable())
1848         return timeValue;
1849
1850     // FIXME - impossible to implement until rdar://8721510 is fixed.
1851     return timeValue;
1852 }
1853
1854 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1855 {
1856 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1857     return 0;
1858 #else
1859     return 5;
1860 #endif
1861 }
1862
1863 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1864 {
1865     if (!m_videoLayer)
1866         return;
1867
1868     // Do not attempt to change the video gravity while in full screen mode.
1869     // See setVideoFullscreenGravity().
1870     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1871         return;
1872
1873     [CATransaction begin];
1874     [CATransaction setDisableActions:YES];    
1875     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1876     [m_videoLayer.get() setVideoGravity:gravity];
1877     [CATransaction commit];
1878 }
1879
1880 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1881 {
1882     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1883         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1884     }];
1885     if (index == NSNotFound)
1886         return nil;
1887     return [tracks objectAtIndex:index];
1888 }
1889
1890 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1891 {
1892     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1893     m_languageOfPrimaryAudioTrack = String();
1894
1895     if (!m_avAsset)
1896         return;
1897
1898     setDelayCharacteristicsChangedNotification(true);
1899
1900     bool haveCCTrack = false;
1901     bool hasCaptions = false;
1902
1903     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1904     // asked about those fairly fequently.
1905     if (!m_avPlayerItem) {
1906         // We don't have a player item yet, so check with the asset because some assets support inspection
1907         // prior to becoming ready to play.
1908         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1909         setHasVideo(firstEnabledVideoTrack);
1910         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1911 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1912         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1913 #endif
1914         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1915         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1916         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1917         if (size.width() < 0)
1918             size.setWidth(-size.width());
1919         if (size.height() < 0)
1920             size.setHeight(-size.height());
1921         presentationSizeDidChange(size);
1922     } else {
1923         bool hasVideo = false;
1924         bool hasAudio = false;
1925         bool hasMetaData = false;
1926         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1927             if ([track isEnabled]) {
1928                 AVAssetTrack *assetTrack = [track assetTrack];
1929                 NSString *mediaType = [assetTrack mediaType];
1930                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1931                     hasVideo = true;
1932                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1933                     hasAudio = true;
1934                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1935 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1936                     hasCaptions = true;
1937 #endif
1938                     haveCCTrack = true;
1939                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1940                     hasMetaData = true;
1941                 }
1942             }
1943         }
1944
1945 #if ENABLE(VIDEO_TRACK)
1946         updateAudioTracks();
1947         updateVideoTracks();
1948
1949 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1950         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1951         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1952 #endif
1953 #endif
1954
1955         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1956         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1957         // when it is not.
1958         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1959
1960         setHasAudio(hasAudio);
1961 #if ENABLE(DATACUE_VALUE)
1962         if (hasMetaData)
1963             processMetadataTrack();
1964 #endif
1965     }
1966
1967 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1968     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1969     if (legibleGroup && m_cachedTracks) {
1970         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1971         if (hasCaptions)
1972             processMediaSelectionOptions();
1973     }
1974 #endif
1975
1976 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1977     if (!hasCaptions && haveCCTrack)
1978         processLegacyClosedCaptionsTracks();
1979 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1980     if (haveCCTrack)
1981         processLegacyClosedCaptionsTracks();
1982 #endif
1983
1984     setHasClosedCaptions(hasCaptions);
1985
1986     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1987
1988     sizeChanged();
1989
1990     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1991         characteristicsChanged();
1992
1993 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1994     if (m_provider)
1995         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1996 #endif
1997
1998     setDelayCharacteristicsChangedNotification(false);
1999 }
2000
2001 #if ENABLE(VIDEO_TRACK)
2002
2003 template <typename RefT, typename PassRefT>
2004 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2005 {
2006     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2007         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2008     }]]]);
2009     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2010
2011     for (auto& oldItem : oldItems) {
2012         if (oldItem->playerItemTrack())
2013             [oldTracks addObject:oldItem->playerItemTrack()];
2014     }
2015
2016     // Find the added & removed AVPlayerItemTracks:
2017     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2018     [removedTracks minusSet:newTracks.get()];
2019
2020     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2021     [addedTracks minusSet:oldTracks.get()];
2022
2023     typedef Vector<RefT> ItemVector;
2024     ItemVector replacementItems;
2025     ItemVector addedItems;
2026     ItemVector removedItems;
2027     for (auto& oldItem : oldItems) {
2028         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2029             removedItems.append(oldItem);
2030         else
2031             replacementItems.append(oldItem);
2032     }
2033
2034     for (AVPlayerItemTrack* track in addedTracks.get())
2035         addedItems.append(itemFactory(track));
2036
2037     replacementItems.appendVector(addedItems);
2038     oldItems.swap(replacementItems);
2039
2040     for (auto& removedItem : removedItems)
2041         (player->*removedFunction)(*removedItem);
2042
2043     for (auto& addedItem : addedItems)
2044         (player->*addedFunction)(*addedItem);
2045 }
2046
2047 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2048
2049 template <typename RefT, typename PassRefT>
2050 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2051 {
2052     group->updateOptions(characteristics);
2053
2054     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2055     for (auto& option : group->options()) {
2056         if (!option)
2057             continue;
2058         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2059         if (!avOption)
2060             continue;
2061         newSelectionOptions.add(option);
2062     }
2063
2064     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2065     for (auto& oldItem : oldItems) {
2066         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2067             oldSelectionOptions.add(option);
2068     }
2069
2070     // Find the added & removed AVMediaSelectionOptions:
2071     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2072     for (auto& oldOption : oldSelectionOptions) {
2073         if (!newSelectionOptions.contains(oldOption))
2074             removedSelectionOptions.add(oldOption);
2075     }
2076
2077     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2078     for (auto& newOption : newSelectionOptions) {
2079         if (!oldSelectionOptions.contains(newOption))
2080             addedSelectionOptions.add(newOption);
2081     }
2082
2083     typedef Vector<RefT> ItemVector;
2084     ItemVector replacementItems;
2085     ItemVector addedItems;
2086     ItemVector removedItems;
2087     for (auto& oldItem : oldItems) {
2088         if (!oldItem->mediaSelectionOption())
2089             removedItems.append(oldItem);
2090         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2091             removedItems.append(oldItem);
2092         else
2093             replacementItems.append(oldItem);
2094     }
2095
2096     for (auto& option : addedSelectionOptions)
2097         addedItems.append(itemFactory(*option.get()));
2098
2099     replacementItems.appendVector(addedItems);
2100     oldItems.swap(replacementItems);
2101     
2102     for (auto& removedItem : removedItems)
2103         (player->*removedFunction)(*removedItem);
2104
2105     for (auto& addedItem : addedItems)
2106         (player->*addedFunction)(*addedItem);
2107 }
2108
2109 #endif
2110
2111 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2112 {
2113 #if !RELEASE_LOG_DISABLED
2114     size_t count = m_audioTracks.size();
2115 #endif
2116
2117 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2118     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2119     if (!m_audibleGroup) {
2120         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2121             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2122     }
2123
2124     if (m_audibleGroup)
2125         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2126     else
2127 #endif
2128         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2129
2130     for (auto& track : m_audioTracks)
2131         track->resetPropertiesFromTrack();
2132
2133 #if !RELEASE_LOG_DISABLED
2134     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2135 #endif
2136 }
2137
2138 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2139 {
2140 #if !RELEASE_LOG_DISABLED
2141     size_t count = m_videoTracks.size();
2142 #endif
2143
2144     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2145
2146 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2147     if (!m_visualGroup) {
2148         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2149             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2150     }
2151
2152     if (m_visualGroup)
2153         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2154 #endif
2155
2156     for (auto& track : m_audioTracks)
2157         track->resetPropertiesFromTrack();
2158
2159 #if !RELEASE_LOG_DISABLED
2160     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2161 #endif
2162 }
2163
2164 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2165 {
2166     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2167 }
2168
2169 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2170 {
2171     m_videoFullscreenLayerManager->syncTextTrackBounds();
2172 }
2173
2174 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2175 {
2176     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2177 }
2178
2179 #endif // ENABLE(VIDEO_TRACK)
2180
2181 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2182
2183 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2184 {
2185     if (!m_provider) {
2186         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2187         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2188     }
2189     return m_provider.get();
2190 }
2191
2192 #endif
2193
2194 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2195 {
2196     if (!m_avAsset)
2197         return;
2198
2199     setNaturalSize(m_cachedPresentationSize);
2200 }
2201
2202 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2203 {
2204     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2205 }
2206
2207 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2208 {
2209 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2210     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2211     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2212         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2213         return false;
2214
2215     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2216     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2217         return session.didPassCORSAccessChecks;
2218 #endif
2219     return false;
2220 }
2221
2222 std::optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2223 {
2224 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2225     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2226     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2227         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2228         return false;
2229
2230     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2231     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2232         return [session wouldTaintOrigin:origin];
2233 #endif
2234     return std::nullopt;
2235 }
2236
2237
2238 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2239
2240 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2241 {
2242     INFO_LOG(LOGIDENTIFIER);
2243
2244     if (!m_avPlayerItem || m_videoOutput)
2245         return;
2246
2247 #if USE(VIDEOTOOLBOX)
2248     NSDictionary* attributes = nil;
2249 #else
2250     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2251 #endif
2252     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2253     ASSERT(m_videoOutput);
2254
2255     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2256
2257     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2258 }
2259
2260 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2261 {
2262     if (!m_videoOutput)
2263         return;
2264
2265     if (m_avPlayerItem)
2266         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2267
2268     INFO_LOG(LOGIDENTIFIER);
2269
2270     m_videoOutput = 0;
2271 }
2272
2273 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2274 {
2275     if (!m_avPlayerItem)
2276         return false;
2277
2278     if (!m_videoOutput)
2279         createVideoOutput();
2280     ASSERT(m_videoOutput);
2281
2282     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2283
2284     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2285         return false;
2286
2287     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2288     m_lastImage = nullptr;
2289     return true;
2290 }
2291
2292 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2293 {
2294     if (!m_avPlayerItem)
2295         return false;
2296
2297     if (m_lastImage)
2298         return true;
2299
2300     if (!m_videoOutput)
2301         createVideoOutput();
2302
2303     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2304 }
2305
2306 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2307 {
2308 #if HAVE(CORE_VIDEO)
2309     if (!m_avPlayerItem)
2310         return;
2311
2312     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2313         waitForVideoOutputMediaDataWillChange();
2314
2315     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2316     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2317     // should be displayed.
2318     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2319         return;
2320
2321     if (!m_pixelBufferConformer) {
2322 #if USE(VIDEOTOOLBOX)
2323         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2324 #else
2325         NSDictionary *attributes = nil;
2326 #endif
2327         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2328     }
2329
2330 #if !RELEASE_LOG_DISABLED
2331     MonotonicTime start = MonotonicTime::now();
2332 #endif
2333
2334     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2335
2336 #if !RELEASE_LOG_DISABLED
2337     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2338 #endif
2339 #endif // HAVE(CORE_VIDEO)
2340 }
2341
2342 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2343 {
2344     updateLastImage(UpdateType::UpdateSynchronously);
2345     if (!m_lastImage)
2346         return;
2347
2348     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2349     if (!firstEnabledVideoTrack)
2350         return;
2351
2352     INFO_LOG(LOGIDENTIFIER);
2353
2354     GraphicsContextStateSaver stateSaver(context);
2355     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2356     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2357     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2358
2359     context.concatCTM(videoTransform);
2360     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2361
2362     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2363     // video frame, destroy it now that it is no longer needed.
2364     if (m_imageGenerator)
2365         destroyImageGenerator();
2366
2367 }
2368
2369 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2370 {
2371     ASSERT(context);
2372
2373     updateLastPixelBuffer();
2374     if (!m_lastPixelBuffer)
2375         return false;
2376
2377     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2378     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2379
2380     if (!m_videoTextureCopier)
2381         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2382
2383     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2384 }
2385
2386 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2387 {
2388     updateLastImage();
2389     return m_lastImage;
2390 }
2391
2392 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2393 {
2394     if (!m_videoOutputSemaphore)
2395         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2396
2397     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2398
2399     // Wait for 1 second.
2400     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2401
2402     if (result)
2403         ERROR_LOG(LOGIDENTIFIER, "timed out");
2404 }
2405
2406 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2407 {
2408     dispatch_semaphore_signal(m_videoOutputSemaphore);
2409 }
2410
2411 #endif
2412
2413 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2414
2415 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2416 {
2417     return m_keyURIToRequestMap.take(keyURI);
2418 }
2419
2420 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2421 {
2422     Vector<String> fulfilledKeyIds;
2423
2424     for (auto& pair : m_keyURIToRequestMap) {
2425         const String& keyId = pair.key;
2426         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2427
2428         auto keyData = player()->cachedKeyForKeyId(keyId);
2429         if (!keyData)
2430             continue;
2431
2432         fulfillRequestWithKeyData(request.get(), keyData.get());
2433         fulfilledKeyIds.append(keyId);
2434     }
2435
2436     for (auto& keyId : fulfilledKeyIds)
2437         m_keyURIToRequestMap.remove(keyId);
2438 }
2439
2440 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2441 {
2442     ASSERT_UNUSED(session, &session == m_session);
2443     m_session = nullptr;
2444 }
2445
2446 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2447 {
2448     if (!keySystemIsSupported(keySystem))
2449         return nullptr;
2450     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2451     m_session = makeWeakPtr(*session);
2452     return WTFMove(session);
2453 }
2454 #endif
2455
2456 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2457 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2458 {
2459 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2460     if (m_session && newValue)
2461         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2462 #endif
2463
2464 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2465     if (m_cdmInstance)
2466         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2467 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2468     UNUSED_PARAM(newValue);
2469 #endif
2470 }
2471 #endif
2472
2473 #if ENABLE(ENCRYPTED_MEDIA)
2474 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2475 {
2476 #if HAVE(AVCONTENTKEYSESSION)
2477     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2478         return;
2479
2480     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2481     if (&fpsInstance == m_cdmInstance)
2482         return;
2483
2484     if (m_cdmInstance)
2485         cdmInstanceDetached(*m_cdmInstance);
2486
2487     m_cdmInstance = &fpsInstance;
2488     [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2489 #else
2490     UNUSED_PARAM(instance);
2491 #endif
2492 }
2493
2494 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2495 {
2496 #if HAVE(AVCONTENTKEYSESSION)
2497     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2498     [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_avAsset.get()];
2499     m_cdmInstance = nullptr;
2500 #else
2501     UNUSED_PARAM(instance);
2502 #endif
2503 }
2504
2505 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2506 {
2507     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2508     for (auto& request : keyURIToRequestMap.values()) {
2509         if (auto *infoRequest = request.get().contentInformationRequest)
2510             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2511         [request finishLoading];
2512     }
2513 }
2514 #endif
2515
2516 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2517
2518 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2519 {
2520 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2521     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2522 #endif
2523
2524     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2525     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2526
2527         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2528         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2529             continue;
2530
2531         bool newCCTrack = true;
2532         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2533             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2534                 continue;
2535
2536             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2537             if (track->avPlayerItemTrack() == playerItemTrack) {
2538                 removedTextTracks.remove(i - 1);
2539                 newCCTrack = false;
2540                 break;
2541             }
2542         }
2543
2544         if (!newCCTrack)
2545             continue;
2546         
2547         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2548     }
2549
2550     processNewAndRemovedTextTracks(removedTextTracks);
2551 }
2552
2553 #endif
2554
2555 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2556 {
2557     if (!m_avAsset)
2558         return nil;
2559
2560     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2561         return nil;
2562
2563     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2564 }
2565
2566 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2567
2568 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2569 {
2570     if (!m_avAsset)
2571         return false;
2572
2573     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2574         return false;
2575
2576     return true;
2577 }
2578
2579 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2580 {
2581     if (!hasLoadedMediaSelectionGroups())
2582         return nil;
2583
2584     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2585 }
2586
2587 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2588 {
2589     if (!hasLoadedMediaSelectionGroups())
2590         return nil;
2591
2592     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2593 }
2594
2595 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2596 {
2597     if (!hasLoadedMediaSelectionGroups())
2598         return nil;
2599
2600     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2601 }
2602
2603 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2604 {
2605     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2606     if (!legibleGroup) {
2607         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2608         return;
2609     }
2610
2611     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2612     // but set the selected legible track to nil so text tracks will not be automatically configured.
2613     if (!m_textTracks.size())
2614         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2615
2616     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2617     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2618     for (AVMediaSelectionOptionType *option in legibleOptions) {
2619         bool newTrack = true;
2620         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2621             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2622                 continue;
2623             
2624             RetainPtr<AVMediaSelectionOptionType> currentOption;
2625 #if ENABLE(AVF_CAPTIONS)
2626             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2627                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2628                 currentOption = track->mediaSelectionOption();
2629             } else
2630 #endif
2631             {
2632                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2633                 currentOption = track->mediaSelectionOption();
2634             }
2635             
2636             if ([currentOption.get() isEqual:option]) {
2637                 removedTextTracks.remove(i - 1);
2638                 newTrack = false;
2639                 break;
2640             }
2641         }
2642         if (!newTrack)
2643             continue;
2644
2645 #if ENABLE(AVF_CAPTIONS)
2646         if ([option outOfBandSource]) {
2647             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2648             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2649             continue;
2650         }
2651 #endif
2652
2653         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2654     }
2655
2656     processNewAndRemovedTextTracks(removedTextTracks);
2657 }
2658
2659 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2660 {
2661     if (m_metadataTrack)
2662         return;
2663
2664     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2665     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2666     player()->addTextTrack(*m_metadataTrack);
2667 }
2668
2669 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2670 {
2671     ASSERT(time >= MediaTime::zeroTime());
2672
2673     if (!m_currentTextTrack)
2674         return;
2675
2676     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2677 }
2678
2679 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2680 {
2681     INFO_LOG(LOGIDENTIFIER);
2682
2683     if (!m_currentTextTrack)
2684         return;
2685     
2686     m_currentTextTrack->resetCueValues();
2687 }
2688
2689 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2690
2691 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2692 {
2693     if (m_currentTextTrack == track)
2694         return;
2695
2696     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2697
2698     m_currentTextTrack = track;
2699
2700     if (track) {
2701         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2702             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2703             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2704             ALLOW_DEPRECATED_DECLARATIONS_END
2705 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2706 #if ENABLE(AVF_CAPTIONS)
2707         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2708             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2709 #endif
2710         else
2711             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2712 #endif
2713     } else {
2714 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2715         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2716 #endif
2717         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2718         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2719         ALLOW_DEPRECATED_DECLARATIONS_END
2720     }
2721
2722 }
2723
2724 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2725 {
2726     if (!m_languageOfPrimaryAudioTrack.isNull())
2727         return m_languageOfPrimaryAudioTrack;
2728
2729     if (!m_avPlayerItem.get())
2730         return emptyString();
2731
2732 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2733     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2734     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2735     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2736     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2737     ALLOW_DEPRECATED_DECLARATIONS_END
2738     if (currentlySelectedAudibleOption) {
2739         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2740         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2741
2742         return m_languageOfPrimaryAudioTrack;
2743     }
2744 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2745
2746     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2747     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2748     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2749     if (!tracks || [tracks count] != 1) {
2750         m_languageOfPrimaryAudioTrack = emptyString();
2751         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2752         return m_languageOfPrimaryAudioTrack;
2753     }
2754
2755     AVAssetTrack *track = [tracks objectAtIndex:0];
2756     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2757
2758     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2759
2760     return m_languageOfPrimaryAudioTrack;
2761 }
2762
2763 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2764 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2765 {
2766     bool wirelessTarget = false;
2767
2768 #if !PLATFORM(IOS)
2769     if (m_playbackTarget) {
2770         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2771             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2772         else
2773             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2774     }
2775 #else
2776     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2777 #endif
2778
2779     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2780
2781     return wirelessTarget;
2782 }
2783
2784 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2785 {
2786     if (!m_avPlayer)
2787         return MediaPlayer::TargetTypeNone;
2788
2789 #if PLATFORM(IOS)
2790     if (!AVFoundationLibrary())
2791         return MediaPlayer::TargetTypeNone;
2792
2793     switch ([m_avPlayer externalPlaybackType]) {
2794     case AVPlayerExternalPlaybackTypeNone:
2795         return MediaPlayer::TargetTypeNone;
2796     case AVPlayerExternalPlaybackTypeAirPlay:
2797         return MediaPlayer::TargetTypeAirPlay;
2798     case AVPlayerExternalPlaybackTypeTVOut:
2799         return MediaPlayer::TargetTypeTVOut;
2800     }
2801
2802     ASSERT_NOT_REACHED();
2803     return MediaPlayer::TargetTypeNone;
2804
2805 #else
2806     return MediaPlayer::TargetTypeAirPlay;
2807 #endif
2808 }
2809     
2810 #if PLATFORM(IOS)
2811 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2812 {
2813 #if HAVE(CELESTIAL)
2814     if (!AVFoundationLibrary())
2815         return nil;
2816
2817 #if __IPHONE_OS_VERSION_MAX_ALLOWED >= 110000
2818     if ([getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2819         AVOutputContext *outputContext = [getAVOutputContextClass() sharedAudioPresentationOutputContext];
2820
2821         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2822             || ![outputContext supportsMultipleOutputDevices]
2823             || ![outputContext respondsToSelector:@selector(outputDevices)])
2824             return [outputContext deviceName];
2825
2826         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2827         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2828 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2829             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2830 ALLOW_DEPRECATED_DECLARATIONS_END
2831             [outputDeviceNames addObject:outputDeviceName.get()];
2832         }
2833
2834         return [outputDeviceNames componentsJoinedByString:@" + "];
2835     }
2836 #endif
2837
2838     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2839         return nil;
2840
2841     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2842     if (!pickableRoutes.count)
2843         return nil;
2844
2845     NSString *displayName = nil;
2846     for (NSDictionary *pickableRoute in pickableRoutes) {
2847         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2848             continue;
2849
2850         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2851
2852         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2853         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2854             break;
2855
2856         // The route is a speaker or HDMI out, override the name to be the localized device model.
2857         NSString *localizedDeviceModel = [[UIDevice currentDevice] localizedModel];
2858
2859         // In cases where a route with that name already exists, prefix the name with the model.
2860         BOOL includeLocalizedDeviceModelName = NO;
2861         for (NSDictionary *otherRoute in pickableRoutes) {
2862             if (otherRoute == pickableRoute)
2863                 continue;
2864
2865             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2866                 includeLocalizedDeviceModelName = YES;
2867                 break;
2868             }
2869         }
2870
2871         if (includeLocalizedDeviceModelName)
2872             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2873         else
2874             displayName = localizedDeviceModel;
2875
2876         break;
2877     }
2878
2879     return displayName;
2880 #else
2881     UNUSED_PARAM(player);
2882     return nil;
2883 #endif
2884 }
2885 #endif
2886
2887 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2888 {
2889     if (!m_avPlayer)
2890         return emptyString();
2891
2892     String wirelessTargetName;
2893 #if !PLATFORM(IOS)
2894     if (m_playbackTarget)
2895         wirelessTargetName = m_playbackTarget->deviceName();
2896 #else
2897     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2898 #endif
2899
2900     return wirelessTargetName;
2901 }
2902
2903 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2904 {
2905     if (!m_avPlayer)
2906         return !m_allowsWirelessVideoPlayback;
2907
2908     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2909     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2910
2911     return !m_allowsWirelessVideoPlayback;
2912 }
2913
2914 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2915 {
2916     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2917     m_allowsWirelessVideoPlayback = !disabled;
2918     if (!m_avPlayer)
2919         return;
2920
2921     setDelayCallbacks(true);
2922     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2923     setDelayCallbacks(false);
2924 }
2925
2926 #if !PLATFORM(IOS)
2927
2928 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2929 {
2930     m_playbackTarget = WTFMove(target);
2931
2932     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2933
2934     INFO_LOG(LOGIDENTIFIER);
2935
2936     if (!m_playbackTarget->hasActiveRoute())
2937         setShouldPlayToPlaybackTarget(false);
2938 }
2939
2940 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2941 {
2942     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2943         return;
2944
2945     m_shouldPlayToPlaybackTarget = shouldPlay;
2946
2947     if (!m_playbackTarget)
2948         return;
2949
2950     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2951
2952     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2953         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2954
2955         if (!m_avPlayer)
2956             return;
2957
2958         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2959         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2960             return;
2961
2962         setDelayCallbacks(true);
2963         m_avPlayer.get().outputContext = newContext;
2964         setDelayCallbacks(false);
2965
2966         return;
2967     }
2968
2969     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2970
2971     setDelayCallbacks(true);
2972     auto weakThis = makeWeakPtr(*this);
2973     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2974         if (!weakThis)
2975             return;
2976         weakThis->playbackTargetIsWirelessDidChange();
2977     }));
2978     setDelayCallbacks(false);
2979 }
2980
2981 #endif // !PLATFORM(IOS)
2982
2983 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2984 {
2985 #if PLATFORM(IOS)
2986     if (!m_avPlayer)
2987         return;
2988
2989     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2990         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2991 #endif
2992 }
2993
2994 #endif
2995
2996 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2997 {
2998     m_cachedItemStatus = status;
2999
3000     updateStates();
3001 }
3002
3003 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3004 {
3005     m_pendingStatusChanges++;
3006 }
3007
3008 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3009 {
3010     m_cachedLikelyToKeepUp = likelyToKeepUp;
3011
3012     ASSERT(m_pendingStatusChanges);
3013     if (!--m_pendingStatusChanges)
3014         updateStates();
3015 }
3016
3017 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3018 {
3019     m_pendingStatusChanges++;
3020 }
3021
3022 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3023 {
3024     m_cachedBufferEmpty = bufferEmpty;
3025
3026     ASSERT(m_pendingStatusChanges);
3027     if (!--m_pendingStatusChanges)
3028         updateStates();
3029 }
3030
3031 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3032 {
3033     m_pendingStatusChanges++;
3034 }
3035
3036 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3037 {
3038     m_cachedBufferFull = bufferFull;
3039
3040     ASSERT(m_pendingStatusChanges);
3041     if (!--m_pendingStatusChanges)
3042         updateStates();
3043 }
3044
3045 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3046 {
3047     m_cachedSeekableRanges = seekableRanges;
3048
3049     seekableTimeRangesChanged();
3050     updateStates();
3051 }
3052
3053 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3054 {
3055     m_cachedLoadedRanges = loadedRanges;
3056
3057     loadedTimeRangesChanged();
3058     updateStates();
3059 }
3060
3061 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3062 {
3063     m_cachedIsReadyForDisplay = isReady;
3064     if (!hasVideo() && isReady)
3065         tracksChanged();
3066     updateStates();
3067 }
3068
3069 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3070 {
3071     tracksChanged();
3072     updateStates();
3073 }
3074
3075 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3076 {
3077     INFO_LOG(LOGIDENTIFIER, "- ", shouldBuffer);
3078
3079     if (m_shouldBufferData == shouldBuffer)
3080         return;
3081
3082     m_shouldBufferData = shouldBuffer;
3083     
3084     if (!m_avPlayer)
3085         return;
3086
3087     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3088     updateStates();
3089 }
3090
3091 #if ENABLE(DATACUE_VALUE)
3092
3093 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3094 {
3095     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3096     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3097     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3098     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3099     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3100
3101     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3102         return quickTimeUserData;
3103     if (canLoadAVMetadataKeySpaceISOUserData() && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3104         return isoUserData;
3105     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3106         return quickTimeMetadata;
3107     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3108         return iTunesMetadata;
3109     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3110         return id3Metadata;
3111
3112     return emptyAtom();
3113 }
3114
3115 #endif
3116
3117 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3118 {
3119     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3120
3121     DEBUG_LOG(LOGIDENTIFIER, "adding ", m_currentMetaData ? [m_currentMetaData.get() count] : 0, " at time ", mediaTime);
3122
3123 #if ENABLE(DATACUE_VALUE)
3124     if (seeking())
3125         return;
3126
3127     if (!m_metadataTrack)
3128         processMetadataTrack();
3129
3130     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3131         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3132         return;
3133     }
3134
3135     // Set the duration of all incomplete cues before adding new ones.
3136     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3137     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3138         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3139         if (start < earliestStartTime)
3140             earliestStartTime = start;
3141     }
3142     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3143
3144     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3145         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3146         MediaTime end = MediaTime::positiveInfiniteTime();
3147         if (CMTIME_IS_VALID(item.duration))
3148             end = start + PAL::toMediaTime(item.duration);
3149
3150         AtomicString type = nullAtom();
3151         if (item.keySpace)
3152             type = metadataType(item.keySpace);
3153
3154         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3155     }
3156 #endif
3157 }
3158
3159 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3160 {
3161     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3162         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3163
3164     NSArray *assetTracks = [m_avAsset tracks];
3165
3166     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3167         AVAssetTrack* assetTrack = [obj assetTrack];
3168
3169         if ([assetTracks containsObject:assetTrack])
3170             return YES;
3171
3172         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3173         if (!hasLoadedMediaSelectionGroups())
3174             return NO;
3175
3176         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3177             return NO;
3178
3179         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3180             return NO;
3181
3182         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3183             return NO;
3184
3185         return YES;
3186     }]];
3187
3188     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3189         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3190
3191     m_cachedTotalBytes = 0;
3192
3193     tracksChanged();
3194     updateStates();
3195 }
3196
3197 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3198 {
3199     m_cachedHasEnabledAudio = hasEnabledAudio;
3200
3201     tracksChanged();
3202     updateStates();
3203 }
3204
3205 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3206 {
3207     m_cachedPresentationSize = size;
3208
3209     sizeChanged();
3210     updateStates();
3211 }
3212
3213 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3214 {
3215     m_cachedDuration = duration;
3216
3217     invalidateCachedDuration();
3218 }
3219
3220 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3221 {
3222     m_cachedRate = rate;
3223
3224     updateStates();
3225     rateChanged();
3226 }
3227
3228 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3229
3230 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3231 {
3232     playbackTargetIsWirelessChanged();
3233 }
3234
3235 #endif
3236
3237 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3238 {
3239     m_cachedCanPlayFastForward = newValue;
3240 }
3241
3242 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3243 {
3244     m_cachedCanPlayFastReverse = newValue;
3245 }
3246
3247 void MediaPlayerPrivateAVFoundationObjC::setShouldDisableSleep(bool flag)
3248 {
3249 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR) && !PLATFORM(IOSMAC)
3250     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
3251     [m_avPlayer _setPreventsSleepDuringVideoPlayback:flag];
3252     ALLOW_DEPRECATED_DECLARATIONS_END
3253 #else
3254     UNUSED_PARAM(flag);
3255 #endif
3256 }
3257
3258 NSArray* assetMetadataKeyNames()
3259 {
3260     static NSArray* keys = [[NSArray alloc] initWithObjects:
3261         @"duration",
3262         @"naturalSize",
3263         @"preferredTransform",
3264         @"preferredVolume",
3265         @"preferredRate",
3266         @"playable",
3267         @"resolvedURL",
3268         @"tracks",
3269         @"availableMediaCharacteristicsWithMediaSelectionOptions",
3270     nil];
3271     return keys;
3272 }
3273
3274 NSArray* itemKVOProperties()
3275 {
3276     static NSArray* keys = [[NSArray alloc] initWithObjects:
3277         @"presentationSize",
3278         @"status",
3279         @"asset",
3280         @"tracks",
3281         @"seekableTimeRanges",
3282         @"loadedTimeRanges",
3283         @"playbackLikelyToKeepUp",
3284         @"playbackBufferFull",
3285         @"playbackBufferEmpty",
3286         @"duration",
3287         @"hasEnabledAudio",
3288         @"timedMetadata",
3289         @"canPlayFastForward",
3290         @"canPlayFastReverse",
3291     nil];
3292     return keys;
3293 }
3294
3295 NSArray* assetTrackMetadataKeyNames()
3296 {
3297     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3298     return keys;
3299 }
3300
3301 NSArray* playerKVOProperties()
3302 {
3303     static NSArray* keys = [[NSArray alloc] initWithObjects:
3304         @"rate",
3305 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3306         @"externalPlaybackActive",
3307         @"allowsExternalPlayback",
3308 #endif
3309 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3310         @"outputObscuredDueToInsufficientExternalProtection",
3311 #endif
3312     nil];
3313     return keys;
3314 }
3315 } // namespace WebCore
3316
3317 @implementation WebCoreAVFMovieObserver
3318
3319 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3320 {
3321     self = [super init];
3322     if (!self)
3323         return nil;
3324     m_callback = callback;
3325     return self;
3326 }
3327
3328 - (void)disconnect
3329 {
3330     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3331     m_callback = nil;
3332 }
3333
3334 - (void)metadataLoaded
3335 {
3336     if (!m_callback)
3337         return;
3338     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3339 }
3340
3341 - (void)didEnd:(NSNotification *)unusedNotification
3342 {
3343     UNUSED_PARAM(unusedNotification);
3344     if (!m_callback)
3345         return;
3346     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3347 }
3348
3349 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3350 {
3351     UNUSED_PARAM(object);
3352     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3353
3354     if (!m_callback)
3355         return;
3356
3357     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3358     bool shouldLogValue = !willChange;
3359     WTF::Function<void ()> function;
3360
3361     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3362         if ([keyPath isEqualToString:@"readyForDisplay"])
3363             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3364     }
3365
3366     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3367         if ([keyPath isEqualToString:@"enabled"])
3368             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3369     }
3370
3371     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3372         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3373             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3374         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3375             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3376         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3377             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3378     }
3379
3380     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3381         // A value changed for an AVPlayerItem
3382         if ([keyPath isEqualToString:@"status"])
3383             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3384         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3385             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3386         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3387             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3388         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3389             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3390         else if ([keyPath isEqualToString:@"asset"]) {
3391             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3392             shouldLogValue = false;
3393         } else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3394             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3395         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3396             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3397         else if ([keyPath isEqualToString:@"tracks"]) {
3398             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3399             shouldLogValue = false;
3400         } else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3401             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3402         else if ([keyPath isEqualToString:@"presentationSize"])
3403             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3404         else if ([keyPath isEqualToString:@"duration"])
3405             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, PAL::toMediaTime([newValue CMTimeValue]));
3406         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3407             MediaTime now;
3408             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3409             if (CMTIME_IS_NUMERIC(itemTime))
3410                 now = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3411             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3412             shouldLogValue = false;
3413         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3414             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3415         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3416             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3417     }
3418
3419     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3420         // A value changed for an AVPlayer.
3421         if ([keyPath isEqualToString:@"rate"])
3422             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3423 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3424         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3425             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3426 #endif
3427 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3428         else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3429             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged, m_callback, [newValue boolValue]);
3430 #endif
3431     }
3432
3433 #if !RELEASE_LOG_DISABLED
3434     if (m_callback->logger().willLog(m_callback->logChannel(), WTFLogLevelDebug) && !([keyPath isEqualToString:@"loadedTimeRanges"] || [keyPath isEqualToString:@"seekableTimeRanges"])) {
3435         auto identifier = Logger::LogSiteIdentifier("MediaPlayerPrivateAVFoundation", "observeValueForKeyPath", m_callback->logIdentifier());
3436
3437         if (shouldLogValue) {
3438             if ([keyPath isEqualToString:@"duration"])
3439                 m_callback->logger().debug(m_callback->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", PAL::toMediaTime([newValue CMTimeValue]));
3440             else {
3441                 RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3442                 m_callback->logger().debug(m_callback->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", [valueString.get() UTF8String]);
3443             }
3444         } else
3445             m_callback->logger().debug(m_callback->logChannel(), identifier, willChange ? "will" : "did", " change '", [keyPath UTF8String], "'");
3446     }
3447 #endif
3448
3449     if (!function)
3450         return;
3451
3452     auto weakThis = makeWeakPtr(*m_callback);
3453     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function = WTFMove(function)]{
3454         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3455         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3456         if (!weakThis)
3457             return;
3458         function();
3459     }));
3460 }
3461
3462 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3463
3464 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3465 {
3466     UNUSED_PARAM(output);
3467     UNUSED_PARAM(nativeSamples);
3468
3469     if (!m_callback)
3470         return;
3471
3472     RetainPtr<WebCoreAVFMovieObserver> protectedSelf = self;
3473     RetainPtr<NSArray> protectedStrings = strings;
3474     RetainPtr<NSArray> protectedNativeSamples = nativeSamples;
3475     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedStrings = WTFMove(protectedStrings), protectedNativeSamples = WTFMove(protectedNativeSamples), itemTime] {
3476         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3477         if (!callback)
3478             return;
3479         MediaTime time = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3480         callback->processCue(protectedStrings.get(), protectedNativeSamples.get(), time);
3481     });
3482 }
3483
3484 - (void)outputSequenceWasFlushed:(id)output
3485 {
3486     UNUSED_PARAM(output);
3487
3488     if (!m_callback)
3489         return;
3490     
3491     callOnMainThread([protectedSelf = RetainPtr<WebCoreAVFMovieObserver>(self)] {
3492         if (MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback)
3493             callback->flushCues();
3494     });
3495 }
3496
3497 #endif
3498
3499 @end
3500
3501 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3502
3503 @implementation WebCoreAVFLoaderDelegate
3504
3505 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback