Allow clients to specify a list of codecs which should require hardware decode support.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVFoundationSPI.h"
34 #import "AVTrackPrivateAVFObjCImpl.h"
35 #import "AudioSourceProviderAVFObjC.h"
36 #import "AudioTrackPrivateAVFObjC.h"
37 #import "AuthenticationChallenge.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "MediaTimeAVFoundation.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "Settings.h"
61 #import "TextEncoding.h"
62 #import "TextTrackRepresentation.h"
63 #import "TextureCacheCV.h"
64 #import "URL.h"
65 #import "VideoTextureCopierCV.h"
66 #import "VideoTrackPrivateAVFObjC.h"
67 #import "WebCoreAVFResourceLoader.h"
68 #import "WebCoreCALayerExtras.h"
69 #import "WebCoreNSURLSession.h"
70 #import "WebCoreSystemInterface.h"
71 #import <functional>
72 #import <map>
73 #import <objc/runtime.h>
74 #import <runtime/DataView.h>
75 #import <runtime/JSCInlines.h>
76 #import <runtime/TypedArrayInlines.h>
77 #import <runtime/Uint16Array.h>
78 #import <runtime/Uint32Array.h>
79 #import <runtime/Uint8Array.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/CurrentTime.h>
82 #import <wtf/ListHashSet.h>
83 #import <wtf/NeverDestroyed.h>
84 #import <wtf/OSObjectPtr.h>
85 #import <wtf/text/CString.h>
86
87 #if ENABLE(AVF_CAPTIONS)
88 #include "TextTrack.h"
89 #endif
90
91 #import <AVFoundation/AVAssetImageGenerator.h>
92 #import <AVFoundation/AVAssetTrack.h>
93 #import <AVFoundation/AVMediaSelectionGroup.h>
94 #import <AVFoundation/AVMetadataItem.h>
95 #import <AVFoundation/AVPlayer.h>
96 #import <AVFoundation/AVPlayerItem.h>
97 #import <AVFoundation/AVPlayerItemOutput.h>
98 #import <AVFoundation/AVPlayerItemTrack.h>
99 #import <AVFoundation/AVPlayerLayer.h>
100 #import <AVFoundation/AVTime.h>
101
102 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
103 #import "VideoFullscreenLayerManager.h"
104 #endif
105
106 #if PLATFORM(IOS)
107 #import "WAKAppKitStubs.h"
108 #import <CoreImage/CoreImage.h>
109 #import <mach/mach_port.h>
110 #else
111 #import <Foundation/NSGeometry.h>
112 #import <QuartzCore/CoreImage.h>
113 #endif
114
115 #if USE(VIDEOTOOLBOX)
116 #import <CoreVideo/CoreVideo.h>
117 #import <VideoToolbox/VideoToolbox.h>
118 #endif
119
120 #if USE(CFURLCONNECTION)
121 #include "CFNSURLConnectionSPI.h"
122 #endif
123
124 #import "CoreVideoSoftLink.h"
125
126 namespace std {
127 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
128     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
129 };
130 }
131
132 #if ENABLE(AVF_CAPTIONS)
133 // Note: This must be defined before our SOFT_LINK macros:
134 @class AVMediaSelectionOption;
135 @interface AVMediaSelectionOption (OutOfBandExtensions)
136 @property (nonatomic, readonly) NSString* outOfBandSource;
137 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
138 @end
139 #endif
140
141 @interface AVURLAsset (WebKitExtensions)
142 @property (nonatomic, readonly) NSURL *resolvedURL;
143 @end
144
145 typedef AVPlayer AVPlayerType;
146 typedef AVPlayerItem AVPlayerItemType;
147 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
148 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
149 typedef AVMetadataItem AVMetadataItemType;
150 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
151 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
152 typedef AVAssetCache AVAssetCacheType;
153
154 #pragma mark - Soft Linking
155
156 // Soft-linking headers must be included last since they #define functions, constants, etc.
157 #import "CoreMediaSoftLink.h"
158
159 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
160
161 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
162
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
165 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
166 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
167 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
168 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
169 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
170 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
171
172 SOFT_LINK_CLASS(CoreImage, CIContext)
173 SOFT_LINK_CLASS(CoreImage, CIImage)
174
175 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
176 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
177 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
190
191 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
192 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
193 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
194
195 #define AVPlayer initAVPlayer()
196 #define AVPlayerItem initAVPlayerItem()
197 #define AVPlayerLayer initAVPlayerLayer()
198 #define AVURLAsset initAVURLAsset()
199 #define AVAssetImageGenerator initAVAssetImageGenerator()
200 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
201 #define AVMetadataItem initAVMetadataItem()
202 #define AVAssetCache initAVAssetCache()
203
204 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
205 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
206 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
207 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
208 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
209 #define AVMediaTypeVideo getAVMediaTypeVideo()
210 #define AVMediaTypeAudio getAVMediaTypeAudio()
211 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
212 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
213 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
214 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
215 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
216 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
217 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
218 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
219 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
220 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
221 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
222
223 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
224 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
225 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
226
227 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
228 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
229 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
230
231 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
232 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
233 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
234 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
235
236 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
237 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
238 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
239 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
240 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
241 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
242 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
243 #endif
244
245 #if ENABLE(AVF_CAPTIONS)
246 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
259
260 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
261 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
262 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
263 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
264 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
265 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
266 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
267 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
268 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
269 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
270 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
271 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
272 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
273 #endif
274
275 #if ENABLE(DATACUE_VALUE)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
277 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
281
282 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
283 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
284 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
285 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
286 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
287 #endif
288
289 #if PLATFORM(IOS)
290 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
292 #endif
293
294 SOFT_LINK_FRAMEWORK(MediaToolbox)
295 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
296
297 using namespace WebCore;
298
299 enum MediaPlayerAVFoundationObservationContext {
300     MediaPlayerAVFoundationObservationContextPlayerItem,
301     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
302     MediaPlayerAVFoundationObservationContextPlayer,
303     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
304 };
305
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
307 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
308 #else
309 @interface WebCoreAVFMovieObserver : NSObject
310 #endif
311 {
312     MediaPlayerPrivateAVFoundationObjC* m_callback;
313     int m_delayCallbacks;
314 }
315 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 -(void)disconnect;
317 -(void)metadataLoaded;
318 -(void)didEnd:(NSNotification *)notification;
319 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
320 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
321 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
322 - (void)outputSequenceWasFlushed:(id)output;
323 #endif
324 @end
325
326 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
327 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
333 @end
334 #endif
335
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
338     MediaPlayerPrivateAVFoundationObjC *m_callback;
339     dispatch_semaphore_t m_semaphore;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
344 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
345 @end
346 #endif
347
348 namespace WebCore {
349
350 static NSArray *assetMetadataKeyNames();
351 static NSArray *itemKVOProperties();
352 static NSArray *assetTrackMetadataKeyNames();
353 static NSArray *playerKVOProperties();
354 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
355
356 #if !LOG_DISABLED
357 static const char *boolString(bool val)
358 {
359     return val ? "true" : "false";
360 }
361 #endif
362
363 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
364 static dispatch_queue_t globalLoaderDelegateQueue()
365 {
366     static dispatch_queue_t globalQueue;
367     static dispatch_once_t onceToken;
368     dispatch_once(&onceToken, ^{
369         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
370     });
371     return globalQueue;
372 }
373 #endif
374
375 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
376 static dispatch_queue_t globalPullDelegateQueue()
377 {
378     static dispatch_queue_t globalQueue;
379     static dispatch_once_t onceToken;
380     dispatch_once(&onceToken, ^{
381         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
382     });
383     return globalQueue;
384 }
385 #endif
386
387 #if USE(CFURLCONNECTION)
388 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
389 public:
390     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
391     {
392         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
393     }
394
395     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
396     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
397
398 private:
399     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
400         : m_challenge(challenge)
401     {
402         ASSERT(m_challenge);
403     }
404
405     void refAuthenticationClient() override { ref(); }
406     void derefAuthenticationClient() override { deref(); }
407
408     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
409     {
410         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
411     }
412
413     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
414     {
415         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
416     }
417
418     void receivedCancellation(const AuthenticationChallenge&) override
419     {
420         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
421     }
422
423     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
424     {
425         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
426             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
427     }
428
429     void receivedChallengeRejection(const AuthenticationChallenge&) override
430     {
431         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
432             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
433     }
434
435     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
436 };
437 #endif
438
439 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
440 {
441     if (!isAvailable())
442         return;
443
444     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
445             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
446     AVFoundationMIMETypeCache::singleton().loadTypes();
447 }
448
449 static AVAssetCacheType *assetCacheForPath(const String& path)
450 {
451     NSURL *assetCacheURL;
452     
453     if (path.isEmpty())
454         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
455     else
456         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
457
458     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
459 }
460
461 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
462 {
463     HashSet<RefPtr<SecurityOrigin>> origins;
464     for (NSString *key in [assetCacheForPath(path) allKeys]) {
465         URL keyAsURL = URL(URL(), key);
466         if (keyAsURL.isValid())
467             origins.add(SecurityOrigin::create(keyAsURL));
468     }
469     return origins;
470 }
471
472 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
473 {
474     ASSERT(date);
475     using namespace std::chrono;
476
477     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
478 }
479
480 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
481 {
482     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
483     
484     AVAssetCacheType* assetCache = assetCacheForPath(path);
485     
486     for (NSString *key in [assetCache allKeys]) {
487         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
488             [assetCache removeEntryForKey:key];
489     }
490
491     NSFileManager *fileManager = [NSFileManager defaultManager];
492     NSURL *baseURL = [assetCache URL];
493
494     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
495         [fileManager removeItemAtURL:baseURL error:nil];
496         return;
497     }
498     
499     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
500     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
501         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
502         errorHandler:nil];
503     
504     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
505     for (NSURL *fileURL : enumerator) {
506         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
507     
508         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
509             continue;
510         
511         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
512             continue;
513         
514         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
515             continue;
516         
517         [urlsToDelete addObject:fileURL];
518     }
519     
520     for (NSURL *fileURL in urlsToDelete.get())
521         [fileManager removeItemAtURL:fileURL error:nil];
522 }
523
524 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
525 {
526     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
527     AVAssetCacheType* assetCache = assetCacheForPath(path);
528     for (NSString *key in [assetCache allKeys]) {
529         URL keyAsURL = URL(URL(), key);
530         if (keyAsURL.isValid()) {
531             if (origins.contains(SecurityOrigin::create(keyAsURL)))
532                 [assetCache removeEntryForKey:key];
533         }
534     }
535 }
536
537 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
538     : MediaPlayerPrivateAVFoundation(player)
539     , m_weakPtrFactory(this)
540 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
541     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
542     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
543 #endif
544     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
545     , m_videoFrameHasDrawn(false)
546     , m_haveCheckedPlayability(false)
547 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
548     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
549     , m_videoOutputSemaphore(nullptr)
550 #endif
551 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
552     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
553 #endif
554     , m_currentTextTrack(0)
555     , m_cachedRate(0)
556     , m_cachedTotalBytes(0)
557     , m_pendingStatusChanges(0)
558     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
559     , m_cachedLikelyToKeepUp(false)
560     , m_cachedBufferEmpty(false)
561     , m_cachedBufferFull(false)
562     , m_cachedHasEnabledAudio(false)
563     , m_shouldBufferData(true)
564     , m_cachedIsReadyForDisplay(false)
565     , m_haveBeenAskedToCreateLayer(false)
566 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
567     , m_allowsWirelessVideoPlayback(true)
568 #endif
569 {
570 }
571
572 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
573 {
574 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
575     [m_loaderDelegate.get() setCallback:0];
576     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
577
578     for (auto& pair : m_resourceLoaderMap)
579         pair.value->invalidate();
580 #endif
581 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
582     [m_videoOutputDelegate setCallback:0];
583     [m_videoOutput setDelegate:nil queue:0];
584     if (m_videoOutputSemaphore)
585         dispatch_release(m_videoOutputSemaphore);
586 #endif
587
588     if (m_videoLayer)
589         destroyVideoLayer();
590
591     cancelLoad();
592 }
593
594 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
595 {
596     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
597     tearDownVideoRendering();
598
599     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
600     [m_objcObserver.get() disconnect];
601
602     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
603     setIgnoreLoadStateChanges(true);
604     if (m_avAsset) {
605         [m_avAsset.get() cancelLoading];
606         m_avAsset = nil;
607     }
608
609     clearTextTracks();
610
611 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
612     if (m_legibleOutput) {
613         if (m_avPlayerItem)
614             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
615         m_legibleOutput = nil;
616     }
617 #endif
618
619     if (m_avPlayerItem) {
620         for (NSString *keyName in itemKVOProperties())
621             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
622         
623         m_avPlayerItem = nil;
624     }
625     if (m_avPlayer) {
626         if (m_timeObserver)
627             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
628         m_timeObserver = nil;
629
630         for (NSString *keyName in playerKVOProperties())
631             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
632         m_avPlayer = nil;
633     }
634
635     // Reset cached properties
636     m_pendingStatusChanges = 0;
637     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
638     m_cachedSeekableRanges = nullptr;
639     m_cachedLoadedRanges = nullptr;
640     m_cachedHasEnabledAudio = false;
641     m_cachedPresentationSize = FloatSize();
642     m_cachedDuration = MediaTime::zeroTime();
643
644     for (AVPlayerItemTrack *track in m_cachedTracks.get())
645         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
646     m_cachedTracks = nullptr;
647
648 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
649     if (m_provider) {
650         m_provider->setPlayerItem(nullptr);
651         m_provider->setAudioTrack(nullptr);
652     }
653 #endif
654
655     setIgnoreLoadStateChanges(false);
656 }
657
658 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
659 {
660     return m_haveBeenAskedToCreateLayer;
661 }
662
663 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
664 {
665 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
666     if (m_videoOutput)
667         return true;
668 #endif
669     return m_imageGenerator;
670 }
671
672 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
673 {
674 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
675     createVideoOutput();
676 #else
677     createImageGenerator();
678 #endif
679 }
680
681 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
682 {
683     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
684
685     if (!m_avAsset || m_imageGenerator)
686         return;
687
688     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
689
690     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
691     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
692     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
693     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
694
695     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
696 }
697
698 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
699 {
700 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
701     destroyVideoOutput();
702     destroyOpenGLVideoOutput();
703 #endif
704     destroyImageGenerator();
705 }
706
707 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
708 {
709     if (!m_imageGenerator)
710         return;
711
712     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
713
714     m_imageGenerator = 0;
715 }
716
717 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
718 {
719     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
720         return;
721
722     callOnMainThread([this, weakThis = createWeakPtr()] {
723         if (!weakThis)
724             return;
725
726         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
727             return;
728         m_haveBeenAskedToCreateLayer = true;
729
730         if (!m_videoLayer)
731             createAVPlayerLayer();
732
733 #if USE(VIDEOTOOLBOX)
734         if (!m_videoOutput)
735             createVideoOutput();
736 #endif
737
738         player()->client().mediaPlayerRenderingModeChanged(player());
739     });
740 }
741
742 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
743 {
744     if (!m_avPlayer)
745         return;
746
747     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
748     [m_videoLayer setPlayer:m_avPlayer.get()];
749     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
750
751 #ifndef NDEBUG
752     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
753 #endif
754     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
755     updateVideoLayerGravity();
756     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
757     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
758     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
759
760 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
761     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
762
763 #if PLATFORM(IOS)
764     [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
765 #endif
766 #else
767     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
768 #endif
769 }
770
771 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
772 {
773     if (!m_videoLayer)
774         return;
775
776     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
777
778     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
779     [m_videoLayer setPlayer:nil];
780
781 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
782     m_videoFullscreenLayerManager->didDestroyVideoLayer();
783 #endif
784
785     m_videoLayer = nil;
786 }
787
788 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
789 {
790     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
791     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
792
793     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
794     if (!date)
795         return MediaTime::invalidTime();
796
797     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
798
799     // Rounding due to second offset error when subtracting.
800     return MediaTime::createWithDouble(round(date - currentTime));
801 }
802
803 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
804 {
805     if (currentRenderingMode() == MediaRenderingToLayer)
806         return m_cachedIsReadyForDisplay;
807
808     return m_videoFrameHasDrawn;
809 }
810
811 #if ENABLE(AVF_CAPTIONS)
812 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
813 {
814     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
815     if (manualSelectionMode)
816         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
817
818     // FIXME: Match these to correct types:
819     if (kind == PlatformTextTrack::Caption)
820         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
821
822     if (kind == PlatformTextTrack::Subtitle)
823         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
824
825     if (kind == PlatformTextTrack::Description)
826         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
827
828     if (kind == PlatformTextTrack::Forced)
829         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
830
831     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
832 }
833     
834 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
835 {
836     trackModeChanged();
837 }
838     
839 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
840 {
841     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
842     
843     for (auto& textTrack : m_textTracks) {
844         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
845             continue;
846         
847         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
848         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
849         
850         for (auto& track : outOfBandTrackSources) {
851             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
852             
853             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
854                 continue;
855             
856             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
857             if (track->mode() == PlatformTextTrack::Hidden)
858                 mode = InbandTextTrackPrivate::Hidden;
859             else if (track->mode() == PlatformTextTrack::Disabled)
860                 mode = InbandTextTrackPrivate::Disabled;
861             else if (track->mode() == PlatformTextTrack::Showing)
862                 mode = InbandTextTrackPrivate::Showing;
863             
864             textTrack->setMode(mode);
865             break;
866         }
867     }
868 }
869 #endif
870
871
872 static NSURL *canonicalURL(const String& url)
873 {
874     NSURL *cocoaURL = URL(ParsedURLString, url);
875     if (url.isEmpty())
876         return cocoaURL;
877
878     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
879     if (!request)
880         return cocoaURL;
881
882     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
883     if (!canonicalRequest)
884         return cocoaURL;
885
886     return [canonicalRequest URL];
887 }
888
889 #if PLATFORM(IOS)
890 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
891 {
892     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
893     [properties setDictionary:@{
894         NSHTTPCookieName: cookie.name,
895         NSHTTPCookieValue: cookie.value,
896         NSHTTPCookieDomain: cookie.domain,
897         NSHTTPCookiePath: cookie.path,
898         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
899     }];
900     if (cookie.secure)
901         [properties setObject:@YES forKey:NSHTTPCookieSecure];
902     if (cookie.session)
903         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
904
905     return [NSHTTPCookie cookieWithProperties:properties.get()];
906 }
907 #endif
908
909 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
910 {
911     if (m_avAsset)
912         return;
913
914     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
915
916     setDelayCallbacks(true);
917
918     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
919
920     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
921
922     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
923
924     String referrer = player()->referrer();
925     if (!referrer.isEmpty())
926         [headerFields.get() setObject:referrer forKey:@"Referer"];
927
928     String userAgent = player()->userAgent();
929     if (!userAgent.isEmpty())
930         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
931
932     if ([headerFields.get() count])
933         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
934
935     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
936         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
937
938 #if PLATFORM(IOS)
939     // FIXME: rdar://problem/20354688
940     String identifier = player()->sourceApplicationIdentifier();
941     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
942         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
943
944     if (AVURLAssetRequiresCustomURLLoadingKey)
945         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
946 #endif
947
948     auto& type = player()->contentMIMEType();
949     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
950         auto& codecs = player()->contentTypeCodecs();
951         if (!codecs.isEmpty()) {
952             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
953             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
954         } else
955             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
956     }
957
958 #if ENABLE(AVF_CAPTIONS)
959     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
960     if (!outOfBandTrackSources.isEmpty()) {
961         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
962         for (auto& trackSource : outOfBandTrackSources) {
963             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
964             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
965             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
966             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
967             [outOfBandTracks.get() addObject:@{
968                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
969                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
970                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
971                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
972                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
973                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
974             }];
975         }
976
977         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
978     }
979 #endif
980
981 #if PLATFORM(IOS)
982     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
983     if (!networkInterfaceName.isEmpty())
984         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
985 #endif
986
987 #if PLATFORM(IOS)
988     Vector<Cookie> cookies;
989     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
990         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
991         for (auto& cookie : cookies)
992             [nsCookies addObject:toNSHTTPCookie(cookie)];
993
994         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
995     }
996 #endif
997
998     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
999     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
1000     
1001     if (usePersistentCache)
1002         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
1003
1004     NSURL *cocoaURL = canonicalURL(url);
1005     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
1006
1007 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1008     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
1009     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
1010
1011 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
1012     if (Settings::isAVFoundationNSURLSessionEnabled()
1013         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
1014         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
1015         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
1016         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
1017         if (mediaResourceLoader)
1018             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
1019     }
1020 #endif
1021
1022 #endif
1023
1024     m_haveCheckedPlayability = false;
1025
1026     setDelayCallbacks(false);
1027 }
1028
1029 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1030 {
1031     if (!m_avPlayer)
1032         return;
1033
1034     if (pthread_main_np()) {
1035         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1036         return;
1037     }
1038
1039     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1040     RetainPtr<AVPlayerItemType> strongItem = item;
1041     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1042         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1043     });
1044 }
1045
1046 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1047 {
1048     if (m_avPlayer)
1049         return;
1050
1051     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1052
1053     setDelayCallbacks(true);
1054
1055     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1056     for (NSString *keyName in playerKVOProperties())
1057         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1058
1059 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1060     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1061 #endif
1062
1063 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1064     updateDisableExternalPlayback();
1065     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1066 #endif
1067
1068 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1069     if (m_shouldPlayToPlaybackTarget) {
1070         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1071         m_shouldPlayToPlaybackTarget = false;
1072         setShouldPlayToPlaybackTarget(true);
1073     }
1074 #endif
1075
1076 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1077     setShouldDisableSleep(player()->shouldDisableSleep());
1078 #endif
1079
1080     if (m_muted) {
1081         // Clear m_muted so setMuted doesn't return without doing anything.
1082         m_muted = false;
1083         [m_avPlayer.get() setMuted:m_muted];
1084     }
1085
1086     if (player()->client().mediaPlayerIsVideo())
1087         createAVPlayerLayer();
1088
1089     if (m_avPlayerItem)
1090         setAVPlayerItem(m_avPlayerItem.get());
1091
1092     setDelayCallbacks(false);
1093 }
1094
1095 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1096 {
1097     if (m_avPlayerItem)
1098         return;
1099
1100     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1101
1102     setDelayCallbacks(true);
1103
1104     // Create the player item so we can load media data. 
1105     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1106
1107     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1108
1109     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1110     for (NSString *keyName in itemKVOProperties())
1111         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1112
1113     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1114
1115     if (m_avPlayer)
1116         setAVPlayerItem(m_avPlayerItem.get());
1117
1118 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1119     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1120
1121     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1122     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1123     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1124
1125     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1126     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1127     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1128     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1129 #endif
1130
1131 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1132     if (m_provider) {
1133         m_provider->setPlayerItem(m_avPlayerItem.get());
1134         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1135     }
1136 #endif
1137
1138     setDelayCallbacks(false);
1139 }
1140
1141 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1142 {
1143     if (m_haveCheckedPlayability)
1144         return;
1145     m_haveCheckedPlayability = true;
1146
1147     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1148     auto weakThis = createWeakPtr();
1149
1150     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1151         callOnMainThread([weakThis] {
1152             if (weakThis)
1153                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1154         });
1155     }];
1156 }
1157
1158 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1159 {
1160     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1161
1162     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1163     dispatch_group_enter(metadataLoadingGroup.get());
1164     auto weakThis = createWeakPtr();
1165     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1166
1167         callOnMainThread([weakThis, metadataLoadingGroup] {
1168             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1169                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1170                     dispatch_group_enter(metadataLoadingGroup.get());
1171                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1172                         dispatch_group_leave(metadataLoadingGroup.get());
1173                     }];
1174                 }
1175             }
1176             dispatch_group_leave(metadataLoadingGroup.get());
1177         });
1178     }];
1179
1180     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1181         callOnMainThread([weakThis] {
1182             if (weakThis)
1183                 [weakThis->m_objcObserver.get() metadataLoaded];
1184         });
1185     });
1186 }
1187
1188 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1189 {
1190     if (!m_avPlayerItem)
1191         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1192
1193     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1194         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1195     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1196         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1197     if (m_cachedLikelyToKeepUp)
1198         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1199     if (m_cachedBufferFull)
1200         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1201     if (m_cachedBufferEmpty)
1202         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1203
1204     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1205 }
1206
1207 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1208 {
1209     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1210     PlatformMedia pm;
1211     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1212     pm.media.avfMediaPlayer = m_avPlayer.get();
1213     return pm;
1214 }
1215
1216 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1217 {
1218 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1219     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1220 #else
1221     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1222 #endif
1223 }
1224
1225 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1226 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, std::function<void()> completionHandler)
1227 {
1228     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1229         completionHandler();
1230         return;
1231     }
1232
1233     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, completionHandler);
1234
1235     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1236         syncTextTrackBounds();
1237         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1238     }
1239
1240     updateDisableExternalPlayback();
1241 }
1242
1243 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1244 {
1245     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1246     syncTextTrackBounds();
1247 }
1248
1249 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1250 {
1251     m_videoFullscreenGravity = gravity;
1252
1253     if (!m_videoLayer)
1254         return;
1255
1256     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1257     if (gravity == MediaPlayer::VideoGravityResize)
1258         videoGravity = AVLayerVideoGravityResize;
1259     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1260         videoGravity = AVLayerVideoGravityResizeAspect;
1261     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1262         videoGravity = AVLayerVideoGravityResizeAspectFill;
1263     else
1264         ASSERT_NOT_REACHED();
1265     
1266     if ([m_videoLayer videoGravity] == videoGravity)
1267         return;
1268
1269     [m_videoLayer setVideoGravity:videoGravity];
1270     syncTextTrackBounds();
1271 }
1272
1273 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1274 {
1275 #if PLATFORM(IOS)
1276     [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1277     updateDisableExternalPlayback();
1278 #else
1279     UNUSED_PARAM(mode);
1280 #endif
1281 }
1282
1283 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1284
1285 #if PLATFORM(IOS)
1286 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1287 {
1288     if (m_currentMetaData)
1289         return m_currentMetaData.get();
1290     return nil;
1291 }
1292
1293 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1294 {
1295     if (!m_avPlayerItem)
1296         return emptyString();
1297     
1298     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1299     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1300
1301     return logString.get();
1302 }
1303
1304 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1305 {
1306     if (!m_avPlayerItem)
1307         return emptyString();
1308
1309     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1310     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1311
1312     return logString.get();
1313 }
1314 #endif
1315
1316 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1317 {
1318     [CATransaction begin];
1319     [CATransaction setDisableActions:YES];    
1320     if (m_videoLayer)
1321         [m_videoLayer.get() setHidden:!isVisible];
1322     [CATransaction commit];
1323 }
1324     
1325 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1326 {
1327     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1328     if (!metaDataAvailable())
1329         return;
1330
1331     setDelayCallbacks(true);
1332     m_cachedRate = requestedRate();
1333     [m_avPlayer.get() setRate:requestedRate()];
1334     setDelayCallbacks(false);
1335 }
1336
1337 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1338 {
1339     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1340     if (!metaDataAvailable())
1341         return;
1342
1343     setDelayCallbacks(true);
1344     m_cachedRate = 0;
1345     [m_avPlayer.get() setRate:0];
1346     setDelayCallbacks(false);
1347 }
1348
1349 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1350 {
1351     // Do not ask the asset for duration before it has been loaded or it will fetch the
1352     // answer synchronously.
1353     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1354         return MediaTime::invalidTime();
1355     
1356     CMTime cmDuration;
1357     
1358     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1359     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1360         cmDuration = [m_avPlayerItem.get() duration];
1361     else
1362         cmDuration = [m_avAsset.get() duration];
1363
1364     if (CMTIME_IS_NUMERIC(cmDuration))
1365         return toMediaTime(cmDuration);
1366
1367     if (CMTIME_IS_INDEFINITE(cmDuration))
1368         return MediaTime::positiveInfiniteTime();
1369
1370     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1371     return MediaTime::invalidTime();
1372 }
1373
1374 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1375 {
1376     if (!metaDataAvailable() || !m_avPlayerItem)
1377         return MediaTime::zeroTime();
1378
1379     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1380     if (CMTIME_IS_NUMERIC(itemTime))
1381         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1382
1383     return MediaTime::zeroTime();
1384 }
1385
1386 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1387 {
1388     // setCurrentTime generates several event callbacks, update afterwards.
1389     setDelayCallbacks(true);
1390
1391     if (m_metadataTrack)
1392         m_metadataTrack->flushPartialCues();
1393
1394     CMTime cmTime = toCMTime(time);
1395     CMTime cmBefore = toCMTime(negativeTolerance);
1396     CMTime cmAfter = toCMTime(positiveTolerance);
1397
1398     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1399     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1400         cmBefore = kCMTimeZero;
1401     
1402     auto weakThis = createWeakPtr();
1403
1404     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1405
1406     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1407         callOnMainThread([weakThis, finished] {
1408             auto _this = weakThis.get();
1409             if (!_this)
1410                 return;
1411
1412             _this->seekCompleted(finished);
1413         });
1414     }];
1415
1416     setDelayCallbacks(false);
1417 }
1418
1419 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1420 {
1421 #if PLATFORM(IOS)
1422     UNUSED_PARAM(volume);
1423     return;
1424 #else
1425     if (!m_avPlayer)
1426         return;
1427
1428     [m_avPlayer.get() setVolume:volume];
1429 #endif
1430 }
1431
1432 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1433 {
1434     if (m_muted == muted)
1435         return;
1436
1437     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setMuted(%p) - set to %s", this, boolString(muted));
1438
1439     m_muted = muted;
1440
1441     if (!m_avPlayer)
1442         return;
1443
1444     [m_avPlayer.get() setMuted:m_muted];
1445 }
1446
1447 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1448 {
1449     UNUSED_PARAM(closedCaptionsVisible);
1450
1451     if (!metaDataAvailable())
1452         return;
1453
1454     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1455 }
1456
1457 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1458 {
1459     setDelayCallbacks(true);
1460     m_cachedRate = rate;
1461     [m_avPlayer.get() setRate:rate];
1462     setDelayCallbacks(false);
1463 }
1464
1465 double MediaPlayerPrivateAVFoundationObjC::rate() const
1466 {
1467     if (!metaDataAvailable())
1468         return 0;
1469
1470     return m_cachedRate;
1471 }
1472
1473 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1474 {
1475     if (m_avPlayerItem)
1476         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1477 }
1478
1479 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1480 {
1481     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1482
1483     if (!m_avPlayerItem)
1484         return timeRanges;
1485
1486     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1487         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1488         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1489             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1490     }
1491     return timeRanges;
1492 }
1493
1494 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1495 {
1496     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1497         return MediaTime::zeroTime();
1498
1499     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1500     bool hasValidRange = false;
1501     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1502         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1503         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1504             continue;
1505
1506         hasValidRange = true;
1507         MediaTime startOfRange = toMediaTime(timeRange.start);
1508         if (minTimeSeekable > startOfRange)
1509             minTimeSeekable = startOfRange;
1510     }
1511     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1512 }
1513
1514 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1515 {
1516     if (!m_cachedSeekableRanges)
1517         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1518
1519     MediaTime maxTimeSeekable;
1520     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1521         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1522         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1523             continue;
1524         
1525         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1526         if (maxTimeSeekable < endOfRange)
1527             maxTimeSeekable = endOfRange;
1528     }
1529     return maxTimeSeekable;
1530 }
1531
1532 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1533 {
1534     if (!m_cachedLoadedRanges)
1535         return MediaTime::zeroTime();
1536
1537     MediaTime maxTimeLoaded;
1538     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1539         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1540         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1541             continue;
1542         
1543         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1544         if (maxTimeLoaded < endOfRange)
1545             maxTimeLoaded = endOfRange;
1546     }
1547
1548     return maxTimeLoaded;   
1549 }
1550
1551 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1552 {
1553     if (!metaDataAvailable())
1554         return 0;
1555
1556     if (m_cachedTotalBytes)
1557         return m_cachedTotalBytes;
1558
1559     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1560         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1561
1562     return m_cachedTotalBytes;
1563 }
1564
1565 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1566 {
1567     m_avAsset = asset;
1568 }
1569
1570 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1571 {
1572     if (!m_avAsset)
1573         return MediaPlayerAVAssetStatusDoesNotExist;
1574
1575     for (NSString *keyName in assetMetadataKeyNames()) {
1576         NSError *error = nil;
1577         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1578 #if !LOG_DISABLED
1579         if (error)
1580             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1581 #endif
1582
1583         if (keyStatus < AVKeyValueStatusLoaded)
1584             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1585         
1586         if (keyStatus == AVKeyValueStatusFailed)
1587             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1588
1589         if (keyStatus == AVKeyValueStatusCancelled)
1590             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1591     }
1592
1593     if (!m_tracksArePlayable) {
1594         m_tracksArePlayable = true;
1595         for (AVAssetTrack *track in [m_avAsset tracks]) {
1596             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1597                 m_tracksArePlayable = false;
1598                 break;
1599             }
1600         }
1601     }
1602
1603     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1604         return MediaPlayerAVAssetStatusPlayable;
1605
1606     return MediaPlayerAVAssetStatusLoaded;
1607 }
1608
1609 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1610 {
1611     if (!m_avAsset)
1612         return 0;
1613
1614     NSError *error = nil;
1615     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1616     return [error code];
1617 }
1618
1619 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1620 {
1621     if (!metaDataAvailable() || context.paintingDisabled())
1622         return;
1623
1624     setDelayCallbacks(true);
1625     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1626
1627 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1628     if (videoOutputHasAvailableFrame())
1629         paintWithVideoOutput(context, rect);
1630     else
1631 #endif
1632         paintWithImageGenerator(context, rect);
1633
1634     END_BLOCK_OBJC_EXCEPTIONS;
1635     setDelayCallbacks(false);
1636
1637     m_videoFrameHasDrawn = true;
1638 }
1639
1640 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1641 {
1642     if (!metaDataAvailable() || context.paintingDisabled())
1643         return;
1644
1645     // We can ignore the request if we are already rendering to a layer.
1646     if (currentRenderingMode() == MediaRenderingToLayer)
1647         return;
1648
1649     // paint() is best effort, so only paint if we already have an image generator or video output available.
1650     if (!hasContextRenderer())
1651         return;
1652
1653     paintCurrentFrameInContext(context, rect);
1654 }
1655
1656 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1657 {
1658     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1659
1660     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1661     if (image) {
1662         GraphicsContextStateSaver stateSaver(context);
1663         context.translate(rect.x(), rect.y() + rect.height());
1664         context.scale(FloatSize(1.0f, -1.0f));
1665         context.setImageInterpolationQuality(InterpolationLow);
1666         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1667         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1668     }
1669 }
1670
1671 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1672 {
1673     if (!m_imageGenerator)
1674         createImageGenerator();
1675     ASSERT(m_imageGenerator);
1676
1677 #if !LOG_DISABLED
1678     double start = monotonicallyIncreasingTime();
1679 #endif
1680
1681     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1682     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1683     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1684
1685 #if !LOG_DISABLED
1686     double duration = monotonicallyIncreasingTime() - start;
1687     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1688 #endif
1689
1690     return image;
1691 }
1692
1693 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1694 {
1695     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::getSupportedTypes");
1696     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1697
1698
1699 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1700 static bool keySystemIsSupported(const String& keySystem)
1701 {
1702     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1703         return true;
1704     return false;
1705 }
1706 #endif
1707
1708 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1709 {
1710 #if ENABLE(MEDIA_SOURCE)
1711     if (parameters.isMediaSource)
1712         return MediaPlayer::IsNotSupported;
1713 #endif
1714 #if ENABLE(MEDIA_STREAM)
1715     if (parameters.isMediaStream)
1716         return MediaPlayer::IsNotSupported;
1717 #endif
1718     if (isUnsupportedMIMEType(parameters.type))
1719         return MediaPlayer::IsNotSupported;
1720
1721     if (!staticMIMETypeList().contains(parameters.type) && !AVFoundationMIMETypeCache::singleton().types().contains(parameters.type))
1722         return MediaPlayer::IsNotSupported;
1723
1724     // The spec says:
1725     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1726     if (parameters.codecs.isEmpty())
1727         return MediaPlayer::MayBeSupported;
1728
1729     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1730     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1731 }
1732
1733 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1734 {
1735 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1736     if (!keySystem.isEmpty()) {
1737         // "Clear Key" is only supported with HLS:
1738         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1739             return MediaPlayer::IsNotSupported;
1740
1741         if (!keySystemIsSupported(keySystem))
1742             return false;
1743
1744         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1745             return false;
1746
1747         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1748             return false;
1749
1750         return true;
1751     }
1752 #else
1753     UNUSED_PARAM(keySystem);
1754     UNUSED_PARAM(mimeType);
1755 #endif
1756     return false;
1757 }
1758
1759 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1760 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1761 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1762 {
1763     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1764         [infoRequest setContentLength:keyData->byteLength()];
1765         [infoRequest setByteRangeAccessSupported:YES];
1766     }
1767
1768     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1769         long long start = [dataRequest currentOffset];
1770         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1771
1772         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1773             [request finishLoadingWithError:nil];
1774             return;
1775         }
1776
1777         ASSERT(start <= std::numeric_limits<int>::max());
1778         ASSERT(end <= std::numeric_limits<int>::max());
1779         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1780         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1781         [dataRequest respondWithData:nsData.get()];
1782     }
1783
1784     [request finishLoading];
1785 }
1786 #endif
1787
1788 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1789 {
1790     String scheme = [[[avRequest request] URL] scheme];
1791     String keyURI = [[[avRequest request] URL] absoluteString];
1792
1793 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1794     if (scheme == "skd") {
1795         // Create an initData with the following layout:
1796         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1797         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1798         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1799         unsigned byteLength = initDataBuffer->byteLength();
1800         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1801         initDataView->set<uint32_t>(0, keyURISize, true);
1802
1803         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1804         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1805
1806         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1807         if (!player()->keyNeeded(initData.get()))
1808             return false;
1809
1810         m_keyURIToRequestMap.set(keyURI, avRequest);
1811         return true;
1812     }
1813
1814     if (scheme == "clearkey") {
1815         String keyID = [[[avRequest request] URL] resourceSpecifier];
1816         StringView keyIDView(keyID);
1817         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1818
1819         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1820         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1821
1822         auto keyData = player()->cachedKeyForKeyId(keyID);
1823         if (keyData) {
1824             fulfillRequestWithKeyData(avRequest, keyData.get());
1825             return false;
1826         }
1827
1828         if (!player()->keyNeeded(initData.get()))
1829             return false;
1830
1831         m_keyURIToRequestMap.set(keyID, avRequest);
1832         return true;
1833     }
1834 #endif
1835
1836     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1837     m_resourceLoaderMap.add(avRequest, resourceLoader);
1838     resourceLoader->startLoading();
1839     return true;
1840 }
1841
1842 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1843 {
1844 #if USE(CFURLCONNECTION)
1845     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1846     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1847     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1848 #else
1849     AuthenticationChallenge challenge(nsChallenge);
1850 #endif
1851
1852     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1853 }
1854
1855 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1856 {
1857     String scheme = [[[avRequest request] URL] scheme];
1858
1859     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1860
1861     if (resourceLoader)
1862         resourceLoader->stopLoading();
1863 }
1864
1865 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1866 {
1867     m_resourceLoaderMap.remove(avRequest);
1868 }
1869 #endif
1870
1871 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1872 {
1873     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1874 }
1875
1876 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1877 {
1878     if (!metaDataAvailable())
1879         return timeValue;
1880
1881     // FIXME - impossible to implement until rdar://8721510 is fixed.
1882     return timeValue;
1883 }
1884
1885 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1886 {
1887 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1888     return 0;
1889 #else
1890     return 5;
1891 #endif
1892 }
1893
1894 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1895 {
1896     if (!m_videoLayer)
1897         return;
1898
1899 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1900     // Do not attempt to change the video gravity while in full screen mode.
1901     // See setVideoFullscreenGravity().
1902     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1903         return;
1904 #endif
1905
1906     [CATransaction begin];
1907     [CATransaction setDisableActions:YES];    
1908     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1909     [m_videoLayer.get() setVideoGravity:gravity];
1910     [CATransaction commit];
1911 }
1912
1913 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1914 {
1915     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1916         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1917     }];
1918     if (index == NSNotFound)
1919         return nil;
1920     return [tracks objectAtIndex:index];
1921 }
1922
1923 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1924 {
1925     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1926     m_languageOfPrimaryAudioTrack = String();
1927
1928     if (!m_avAsset)
1929         return;
1930
1931     setDelayCharacteristicsChangedNotification(true);
1932
1933     bool haveCCTrack = false;
1934     bool hasCaptions = false;
1935
1936     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1937     // asked about those fairly fequently.
1938     if (!m_avPlayerItem) {
1939         // We don't have a player item yet, so check with the asset because some assets support inspection
1940         // prior to becoming ready to play.
1941         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1942         setHasVideo(firstEnabledVideoTrack);
1943         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1944 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1945         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1946 #endif
1947         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1948         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1949         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1950         if (size.width() < 0)
1951             size.setWidth(-size.width());
1952         if (size.height() < 0)
1953             size.setHeight(-size.height());
1954         presentationSizeDidChange(size);
1955     } else {
1956         bool hasVideo = false;
1957         bool hasAudio = false;
1958         bool hasMetaData = false;
1959         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1960             if ([track isEnabled]) {
1961                 AVAssetTrack *assetTrack = [track assetTrack];
1962                 NSString *mediaType = [assetTrack mediaType];
1963                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1964                     hasVideo = true;
1965                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1966                     hasAudio = true;
1967                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1968 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1969                     hasCaptions = true;
1970 #endif
1971                     haveCCTrack = true;
1972                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1973                     hasMetaData = true;
1974                 }
1975             }
1976         }
1977
1978 #if ENABLE(VIDEO_TRACK)
1979         updateAudioTracks();
1980         updateVideoTracks();
1981
1982 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1983         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1984         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1985 #endif
1986 #endif
1987
1988         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1989         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1990         // when it is not.
1991         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1992
1993         setHasAudio(hasAudio);
1994 #if ENABLE(DATACUE_VALUE)
1995         if (hasMetaData)
1996             processMetadataTrack();
1997 #endif
1998     }
1999
2000 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2001     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2002     if (legibleGroup && m_cachedTracks) {
2003         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
2004         if (hasCaptions)
2005             processMediaSelectionOptions();
2006     }
2007 #endif
2008
2009 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2010     if (!hasCaptions && haveCCTrack)
2011         processLegacyClosedCaptionsTracks();
2012 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2013     if (haveCCTrack)
2014         processLegacyClosedCaptionsTracks();
2015 #endif
2016
2017     setHasClosedCaptions(hasCaptions);
2018
2019     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
2020         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
2021
2022     sizeChanged();
2023
2024     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2025         characteristicsChanged();
2026
2027 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2028     if (m_provider)
2029         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2030 #endif
2031
2032     setDelayCharacteristicsChangedNotification(false);
2033 }
2034
2035 #if ENABLE(VIDEO_TRACK)
2036
2037 template <typename RefT, typename PassRefT>
2038 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2039 {
2040     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2041         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2042     }]]]);
2043     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2044
2045     for (auto& oldItem : oldItems) {
2046         if (oldItem->playerItemTrack())
2047             [oldTracks addObject:oldItem->playerItemTrack()];
2048     }
2049
2050     // Find the added & removed AVPlayerItemTracks:
2051     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2052     [removedTracks minusSet:newTracks.get()];
2053
2054     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2055     [addedTracks minusSet:oldTracks.get()];
2056
2057     typedef Vector<RefT> ItemVector;
2058     ItemVector replacementItems;
2059     ItemVector addedItems;
2060     ItemVector removedItems;
2061     for (auto& oldItem : oldItems) {
2062         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2063             removedItems.append(oldItem);
2064         else
2065             replacementItems.append(oldItem);
2066     }
2067
2068     for (AVPlayerItemTrack* track in addedTracks.get())
2069         addedItems.append(itemFactory(track));
2070
2071     replacementItems.appendVector(addedItems);
2072     oldItems.swap(replacementItems);
2073
2074     for (auto& removedItem : removedItems)
2075         (player->*removedFunction)(*removedItem);
2076
2077     for (auto& addedItem : addedItems)
2078         (player->*addedFunction)(*addedItem);
2079 }
2080
2081 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2082
2083 template <typename RefT, typename PassRefT>
2084 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2085 {
2086     group->updateOptions(characteristics);
2087
2088     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2089     for (auto& option : group->options()) {
2090         if (!option)
2091             continue;
2092         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2093         if (!avOption)
2094             continue;
2095         newSelectionOptions.add(option);
2096     }
2097
2098     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2099     for (auto& oldItem : oldItems) {
2100         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2101             oldSelectionOptions.add(option);
2102     }
2103
2104     // Find the added & removed AVMediaSelectionOptions:
2105     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2106     for (auto& oldOption : oldSelectionOptions) {
2107         if (!newSelectionOptions.contains(oldOption))
2108             removedSelectionOptions.add(oldOption);
2109     }
2110
2111     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2112     for (auto& newOption : newSelectionOptions) {
2113         if (!oldSelectionOptions.contains(newOption))
2114             addedSelectionOptions.add(newOption);
2115     }
2116
2117     typedef Vector<RefT> ItemVector;
2118     ItemVector replacementItems;
2119     ItemVector addedItems;
2120     ItemVector removedItems;
2121     for (auto& oldItem : oldItems) {
2122         if (!oldItem->mediaSelectionOption())
2123             removedItems.append(oldItem);
2124         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2125             removedItems.append(oldItem);
2126         else
2127             replacementItems.append(oldItem);
2128     }
2129
2130     for (auto& option : addedSelectionOptions)
2131         addedItems.append(itemFactory(*option.get()));
2132
2133     replacementItems.appendVector(addedItems);
2134     oldItems.swap(replacementItems);
2135     
2136     for (auto& removedItem : removedItems)
2137         (player->*removedFunction)(*removedItem);
2138
2139     for (auto& addedItem : addedItems)
2140         (player->*addedFunction)(*addedItem);
2141 }
2142
2143 #endif
2144
2145 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2146 {
2147 #if !LOG_DISABLED
2148     size_t count = m_audioTracks.size();
2149 #endif
2150
2151 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2152     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2153     if (!m_audibleGroup) {
2154         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2155             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2156     }
2157
2158     if (m_audibleGroup)
2159         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2160     else
2161 #endif
2162         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2163
2164     for (auto& track : m_audioTracks)
2165         track->resetPropertiesFromTrack();
2166
2167 #if !LOG_DISABLED
2168     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2169 #endif
2170 }
2171
2172 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2173 {
2174 #if !LOG_DISABLED
2175     size_t count = m_videoTracks.size();
2176 #endif
2177
2178     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2179
2180 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2181     if (!m_visualGroup) {
2182         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2183             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2184     }
2185
2186     if (m_visualGroup)
2187         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2188 #endif
2189
2190     for (auto& track : m_audioTracks)
2191         track->resetPropertiesFromTrack();
2192
2193 #if !LOG_DISABLED
2194     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2195 #endif
2196 }
2197
2198 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2199 {
2200 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2201     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2202         return true;
2203 #endif
2204     return false;
2205 }
2206
2207 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2208 {
2209 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2210     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2211         return;
2212
2213     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2214     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2215     [m_textTrackRepresentationLayer setFrame:textFrame];
2216 #endif
2217 }
2218
2219 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2220 {
2221 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2222     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2223     if (representationLayer == m_textTrackRepresentationLayer) {
2224         syncTextTrackBounds();
2225         return;
2226     }
2227
2228     if (m_textTrackRepresentationLayer)
2229         [m_textTrackRepresentationLayer removeFromSuperlayer];
2230
2231     m_textTrackRepresentationLayer = representationLayer;
2232
2233     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2234         syncTextTrackBounds();
2235         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2236     }
2237
2238 #else
2239     UNUSED_PARAM(representation);
2240 #endif
2241 }
2242
2243 #endif // ENABLE(VIDEO_TRACK)
2244
2245 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2246
2247 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2248 {
2249     if (!m_provider) {
2250         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2251         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2252     }
2253     return m_provider.get();
2254 }
2255
2256 #endif
2257
2258 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2259 {
2260     if (!m_avAsset)
2261         return;
2262
2263     setNaturalSize(m_cachedPresentationSize);
2264 }
2265     
2266 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2267 {
2268     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2269         return false;
2270     
2271     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2272     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2273     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
2274 }
2275
2276 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2277 {
2278 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2279     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2280     if (!Settings::isAVFoundationNSURLSessionEnabled()
2281         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2282         return false;
2283
2284     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2285     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2286         return session.didPassCORSAccessChecks;
2287 #endif
2288     return false;
2289 }
2290
2291 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2292
2293 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2294 {
2295     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2296
2297     if (!m_avPlayerItem || m_videoOutput)
2298         return;
2299
2300 #if USE(VIDEOTOOLBOX)
2301     NSDictionary* attributes = nil;
2302 #else
2303     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2304 #endif
2305     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2306     ASSERT(m_videoOutput);
2307
2308     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2309
2310     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2311
2312     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2313 }
2314
2315 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2316 {
2317     if (!m_videoOutput)
2318         return;
2319
2320     if (m_avPlayerItem)
2321         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2322     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2323
2324     m_videoOutput = 0;
2325 }
2326
2327 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2328 {
2329     if (!m_videoOutput)
2330         createVideoOutput();
2331     ASSERT(m_videoOutput);
2332
2333     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2334
2335     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2336         return 0;
2337
2338     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2339 }
2340
2341 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2342 {
2343     if (!m_avPlayerItem)
2344         return false;
2345
2346     if (m_lastImage)
2347         return true;
2348
2349     if (!m_videoOutput)
2350         createVideoOutput();
2351
2352     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2353 }
2354
2355 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2356 {
2357     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2358
2359     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2360     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2361     // should be displayed.
2362     if (!pixelBuffer)
2363         return;
2364
2365     if (!m_pixelBufferConformer) {
2366 #if USE(VIDEOTOOLBOX)
2367         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2368 #else
2369         NSDictionary *attributes = nil;
2370 #endif
2371         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2372     }
2373
2374 #if !LOG_DISABLED
2375     double start = monotonicallyIncreasingTime();
2376 #endif
2377
2378     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2379
2380 #if !LOG_DISABLED
2381     double duration = monotonicallyIncreasingTime() - start;
2382     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2383 #endif
2384 }
2385
2386 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2387 {
2388     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2389         waitForVideoOutputMediaDataWillChange();
2390
2391     updateLastImage();
2392
2393     if (!m_lastImage)
2394         return;
2395
2396     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2397     if (!firstEnabledVideoTrack)
2398         return;
2399
2400     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2401
2402     GraphicsContextStateSaver stateSaver(context);
2403     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2404     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2405     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2406
2407     context.concatCTM(videoTransform);
2408     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2409
2410     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2411     // video frame, destroy it now that it is no longer needed.
2412     if (m_imageGenerator)
2413         destroyImageGenerator();
2414
2415 }
2416
2417 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2418 {
2419     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2420
2421     if (!m_avPlayerItem || m_openGLVideoOutput)
2422         return;
2423
2424 #if PLATFORM(IOS)
2425     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2426 #else
2427     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2428 #endif
2429     m_openGLVideoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2430     ASSERT(m_openGLVideoOutput);
2431
2432     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2433
2434     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2435 }
2436
2437 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2438 {
2439     if (!m_openGLVideoOutput)
2440         return;
2441
2442     if (m_avPlayerItem)
2443         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2444     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2445
2446     m_openGLVideoOutput = 0;
2447 }
2448
2449 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2450 {
2451     if (!m_openGLVideoOutput)
2452         return;
2453
2454     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2455     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2456         return;
2457
2458     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2459 }
2460
2461 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2462 {
2463     if (flipY || premultiplyAlpha)
2464         return false;
2465
2466     ASSERT(context);
2467
2468     if (!m_openGLVideoOutput)
2469         createOpenGLVideoOutput();
2470
2471     updateLastOpenGLImage();
2472
2473     if (!m_lastOpenGLImage)
2474         return false;
2475
2476     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2477     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2478
2479     if (!m_textureCache) {
2480         m_textureCache = TextureCacheCV::create(*context);
2481         if (!m_textureCache)
2482             return false;
2483     }
2484
2485     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2486
2487     if (!m_videoTextureCopier)
2488         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2489
2490     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2491 }
2492
2493 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2494 {
2495     updateLastImage();
2496     return m_lastImage;
2497 }
2498
2499 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2500 {
2501     if (!m_videoOutputSemaphore)
2502         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2503
2504     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2505
2506     // Wait for 1 second.
2507     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2508
2509     if (result)
2510         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2511 }
2512
2513 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2514 {
2515     dispatch_semaphore_signal(m_videoOutputSemaphore);
2516 }
2517
2518 #endif
2519
2520 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2521
2522 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2523 {
2524     return m_keyURIToRequestMap.take(keyURI);
2525 }
2526
2527 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2528 {
2529     Vector<String> fulfilledKeyIds;
2530
2531     for (auto& pair : m_keyURIToRequestMap) {
2532         const String& keyId = pair.key;
2533         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2534
2535         auto keyData = player()->cachedKeyForKeyId(keyId);
2536         if (!keyData)
2537             continue;
2538
2539         fulfillRequestWithKeyData(request.get(), keyData.get());
2540         fulfilledKeyIds.append(keyId);
2541     }
2542
2543     for (auto& keyId : fulfilledKeyIds)
2544         m_keyURIToRequestMap.remove(keyId);
2545 }
2546
2547 void MediaPlayerPrivateAVFoundationObjC::removeSession(CDMSession& session)
2548 {
2549     ASSERT_UNUSED(session, &session == m_session);
2550     m_session = nullptr;
2551 }
2552
2553 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2554 {
2555     if (!keySystemIsSupported(keySystem))
2556         return nullptr;
2557     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2558     m_session = session->createWeakPtr();
2559     return WTFMove(session);
2560 }
2561
2562 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2563 {
2564     if (m_session && newValue)
2565         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2566 }
2567
2568 #endif
2569
2570 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2571
2572 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2573 {
2574 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2575     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2576 #endif
2577
2578     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2579     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2580
2581         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2582         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2583             continue;
2584
2585         bool newCCTrack = true;
2586         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2587             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2588                 continue;
2589
2590             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2591             if (track->avPlayerItemTrack() == playerItemTrack) {
2592                 removedTextTracks.remove(i - 1);
2593                 newCCTrack = false;
2594                 break;
2595             }
2596         }
2597
2598         if (!newCCTrack)
2599             continue;
2600         
2601         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2602     }
2603
2604     processNewAndRemovedTextTracks(removedTextTracks);
2605 }
2606
2607 #endif
2608
2609 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2610 {
2611     if (!m_avAsset)
2612         return nil;
2613
2614     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2615         return nil;
2616
2617     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2618 }
2619
2620 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2621
2622 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2623 {
2624     if (!m_avAsset)
2625         return false;
2626
2627     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2628         return false;
2629
2630     return true;
2631 }
2632
2633 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2634 {
2635     if (!hasLoadedMediaSelectionGroups())
2636         return nil;
2637
2638     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2639 }
2640
2641 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2642 {
2643     if (!hasLoadedMediaSelectionGroups())
2644         return nil;
2645
2646     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2647 }
2648
2649 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2650 {
2651     if (!hasLoadedMediaSelectionGroups())
2652         return nil;
2653
2654     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2655 }
2656
2657 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2658 {
2659     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2660     if (!legibleGroup) {
2661         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2662         return;
2663     }
2664
2665     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2666     // but set the selected legible track to nil so text tracks will not be automatically configured.
2667     if (!m_textTracks.size())
2668         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2669
2670     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2671     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2672     for (AVMediaSelectionOptionType *option in legibleOptions) {
2673         bool newTrack = true;
2674         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2675             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2676                 continue;
2677             
2678             RetainPtr<AVMediaSelectionOptionType> currentOption;
2679 #if ENABLE(AVF_CAPTIONS)
2680             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2681                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2682                 currentOption = track->mediaSelectionOption();
2683             } else
2684 #endif
2685             {
2686                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2687                 currentOption = track->mediaSelectionOption();
2688             }
2689             
2690             if ([currentOption.get() isEqual:option]) {
2691                 removedTextTracks.remove(i - 1);
2692                 newTrack = false;
2693                 break;
2694             }
2695         }
2696         if (!newTrack)
2697             continue;
2698
2699 #if ENABLE(AVF_CAPTIONS)
2700         if ([option outOfBandSource]) {
2701             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2702             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2703             continue;
2704         }
2705 #endif
2706
2707         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2708     }
2709
2710     processNewAndRemovedTextTracks(removedTextTracks);
2711 }
2712
2713 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2714 {
2715     if (m_metadataTrack)
2716         return;
2717
2718     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2719     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2720     player()->addTextTrack(*m_metadataTrack);
2721 }
2722
2723 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2724 {
2725     ASSERT(time >= MediaTime::zeroTime());
2726
2727     if (!m_currentTextTrack)
2728         return;
2729
2730     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2731 }
2732
2733 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2734 {
2735     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2736
2737     if (!m_currentTextTrack)
2738         return;
2739     
2740     m_currentTextTrack->resetCueValues();
2741 }
2742
2743 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2744
2745 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2746 {
2747     if (m_currentTextTrack == track)
2748         return;
2749
2750     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2751         
2752     m_currentTextTrack = track;
2753
2754     if (track) {
2755         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2756 #pragma clang diagnostic push
2757 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2758             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2759 #pragma clang diagnostic pop
2760 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2761 #if ENABLE(AVF_CAPTIONS)
2762         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2763             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2764 #endif
2765         else
2766             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2767 #endif
2768     } else {
2769 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2770         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2771 #endif
2772 #pragma clang diagnostic push
2773 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2774         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2775 #pragma clang diagnostic pop
2776     }
2777
2778 }
2779
2780 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2781 {
2782     if (!m_languageOfPrimaryAudioTrack.isNull())
2783         return m_languageOfPrimaryAudioTrack;
2784
2785     if (!m_avPlayerItem.get())
2786         return emptyString();
2787
2788 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2789     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2790     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2791 #pragma clang diagnostic push
2792 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2793     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2794 #pragma clang diagnostic pop
2795     if (currentlySelectedAudibleOption) {
2796         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2797         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2798
2799         return m_languageOfPrimaryAudioTrack;
2800     }
2801 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2802
2803     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2804     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2805     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2806     if (!tracks || [tracks count] != 1) {
2807         m_languageOfPrimaryAudioTrack = emptyString();
2808         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2809         return m_languageOfPrimaryAudioTrack;
2810     }
2811
2812     AVAssetTrack *track = [tracks objectAtIndex:0];
2813     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2814
2815 #if !LOG_DISABLED
2816     if (m_languageOfPrimaryAudioTrack == emptyString())
2817         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2818     else
2819         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2820 #endif
2821
2822     return m_languageOfPrimaryAudioTrack;
2823 }
2824
2825 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2826 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2827 {
2828     bool wirelessTarget = false;
2829
2830 #if !PLATFORM(IOS)
2831     if (m_playbackTarget) {
2832         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2833             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2834         else
2835             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2836     }
2837 #else
2838     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2839 #endif
2840
2841     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2842
2843     return wirelessTarget;
2844 }
2845
2846 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2847 {
2848     if (!m_avPlayer)
2849         return MediaPlayer::TargetTypeNone;
2850
2851 #if PLATFORM(IOS)
2852     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2853     case wkExternalPlaybackTypeNone:
2854         return MediaPlayer::TargetTypeNone;
2855     case wkExternalPlaybackTypeAirPlay:
2856         return MediaPlayer::TargetTypeAirPlay;
2857     case wkExternalPlaybackTypeTVOut:
2858         return MediaPlayer::TargetTypeTVOut;
2859     }
2860
2861     ASSERT_NOT_REACHED();
2862     return MediaPlayer::TargetTypeNone;
2863
2864 #else
2865     return MediaPlayer::TargetTypeAirPlay;
2866 #endif
2867 }
2868
2869 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2870 {
2871     if (!m_avPlayer)
2872         return emptyString();
2873
2874     String wirelessTargetName;
2875 #if !PLATFORM(IOS)
2876     if (m_playbackTarget)
2877         wirelessTargetName = m_playbackTarget->deviceName();
2878 #else
2879     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2880 #endif
2881     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2882
2883     return wirelessTargetName;
2884 }
2885
2886 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2887 {
2888     if (!m_avPlayer)
2889         return !m_allowsWirelessVideoPlayback;
2890
2891     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2892     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2893
2894     return !m_allowsWirelessVideoPlayback;
2895 }
2896
2897 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2898 {
2899     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2900     m_allowsWirelessVideoPlayback = !disabled;
2901     if (!m_avPlayer)
2902         return;
2903
2904     setDelayCallbacks(true);
2905     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2906     setDelayCallbacks(false);
2907 }
2908
2909 #if !PLATFORM(IOS)
2910
2911 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2912 {
2913     m_playbackTarget = WTFMove(target);
2914
2915     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2916
2917     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2918
2919     if (!m_playbackTarget->hasActiveRoute())
2920         setShouldPlayToPlaybackTarget(false);
2921 }
2922
2923 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2924 {
2925     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2926         return;
2927
2928     m_shouldPlayToPlaybackTarget = shouldPlay;
2929
2930     if (!m_playbackTarget)
2931         return;
2932
2933     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2934         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2935
2936         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2937
2938         if (!m_avPlayer)
2939             return;
2940
2941         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2942         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2943             return;
2944
2945         setDelayCallbacks(true);
2946         m_avPlayer.get().outputContext = newContext;
2947         setDelayCallbacks(false);
2948
2949         return;
2950     }
2951
2952     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2953
2954     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2955
2956     setDelayCallbacks(true);
2957     auto weakThis = createWeakPtr();
2958     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2959         if (!weakThis)
2960             return;
2961         weakThis->playbackTargetIsWirelessDidChange();
2962     }));
2963     setDelayCallbacks(false);
2964 }
2965
2966 #endif // !PLATFORM(IOS)
2967
2968 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2969 {
2970 #if PLATFORM(IOS)
2971     if (!m_avPlayer)
2972         return;
2973
2974     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2975 #endif
2976 }
2977
2978 #endif
2979
2980 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2981 {
2982     m_cachedItemStatus = status;
2983
2984     updateStates();
2985 }
2986
2987 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2988 {
2989     m_pendingStatusChanges++;
2990 }
2991
2992 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2993 {
2994     m_cachedLikelyToKeepUp = likelyToKeepUp;
2995
2996     ASSERT(m_pendingStatusChanges);
2997     if (!--m_pendingStatusChanges)
2998         updateStates();
2999 }
3000
3001 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3002 {
3003     m_pendingStatusChanges++;
3004 }
3005
3006 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3007 {
3008     m_cachedBufferEmpty = bufferEmpty;
3009
3010     ASSERT(m_pendingStatusChanges);
3011     if (!--m_pendingStatusChanges)
3012         updateStates();
3013 }
3014
3015 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3016 {
3017     m_pendingStatusChanges++;
3018 }
3019
3020 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3021 {
3022     m_cachedBufferFull = bufferFull;
3023
3024     ASSERT(m_pendingStatusChanges);
3025     if (!--m_pendingStatusChanges)
3026         updateStates();
3027 }
3028
3029 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3030 {
3031     m_cachedSeekableRanges = seekableRanges;
3032
3033     seekableTimeRangesChanged();
3034     updateStates();
3035 }
3036
3037 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3038 {
3039     m_cachedLoadedRanges = loadedRanges;
3040
3041     loadedTimeRangesChanged();
3042     updateStates();
3043 }
3044
3045 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3046 {
3047     m_cachedIsReadyForDisplay = isReady;
3048     if (!hasVideo() && isReady)
3049         tracksChanged();
3050     updateStates();
3051 }
3052
3053 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3054 {
3055     tracksChanged();
3056     updateStates();
3057 }
3058
3059 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3060 {
3061     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
3062     if (m_shouldBufferData == shouldBuffer)
3063         return;
3064
3065     m_shouldBufferData = shouldBuffer;
3066     
3067     if (!m_avPlayer)
3068         return;
3069
3070     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3071 }
3072
3073 #if ENABLE(DATACUE_VALUE)
3074
3075 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3076 {
3077     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3078     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3079     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3080     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3081     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3082
3083     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3084         return quickTimeUserData;
3085     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3086         return isoUserData;
3087     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3088         return quickTimeMetadata;
3089     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3090         return iTunesMetadata;
3091     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3092         return id3Metadata;
3093
3094     return emptyAtom;
3095 }
3096
3097 #endif
3098
3099 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3100 {
3101     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3102
3103     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3104
3105 #if ENABLE(DATACUE_VALUE)
3106     if (seeking())
3107         return;
3108
3109     if (!m_metadataTrack)
3110         processMetadataTrack();
3111
3112     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3113         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3114         return;
3115     }
3116
3117     // Set the duration of all incomplete cues before adding new ones.
3118     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3119     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3120         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3121         if (start < earliestStartTime)
3122             earliestStartTime = start;
3123     }
3124     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3125
3126     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3127         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3128         MediaTime end = MediaTime::positiveInfiniteTime();
3129         if (CMTIME_IS_VALID(item.duration))
3130             end = start + toMediaTime(item.duration);
3131
3132         AtomicString type = nullAtom;
3133         if (item.keySpace)
3134             type = metadataType(item.keySpace);
3135
3136         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3137     }
3138 #endif
3139 }
3140
3141 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3142 {
3143     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3144         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3145
3146     NSArray *assetTracks = [m_avAsset tracks];
3147
3148     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3149         AVAssetTrack* assetTrack = [obj assetTrack];
3150
3151         if ([assetTracks containsObject:assetTrack])
3152             return YES;
3153
3154         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3155         if (!hasLoadedMediaSelectionGroups())
3156             return NO;
3157
3158         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3159             return NO;
3160
3161         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3162             return NO;
3163
3164         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3165             return NO;
3166
3167         return YES;
3168     }]];
3169
3170     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3171         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3172
3173     m_cachedTotalBytes = 0;
3174
3175     tracksChanged();
3176     updateStates();
3177 }
3178
3179 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3180 {
3181     m_cachedHasEnabledAudio = hasEnabledAudio;
3182
3183     tracksChanged();
3184     updateStates();
3185 }
3186
3187 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3188 {
3189     m_cachedPresentationSize = size;
3190
3191     sizeChanged();
3192     updateStates();
3193 }
3194
3195 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3196 {
3197     m_cachedDuration = duration;
3198
3199     invalidateCachedDuration();
3200 }
3201
3202 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3203 {
3204     m_cachedRate = rate;
3205
3206     updateStates();
3207     rateChanged();
3208 }
3209
3210 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3211
3212 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3213 {
3214     playbackTargetIsWirelessChanged();
3215 }
3216
3217 #endif
3218
3219 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3220 {
3221     m_cachedCanPlayFastForward = newValue;
3222 }
3223
3224 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3225 {
3226     m_cachedCanPlayFastReverse = newValue;
3227 }
3228
3229 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3230 {
3231     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3232         return MediaPlayerPrivateAVFoundation::resolvedURL();
3233
3234     return URL([m_avAsset resolvedURL]);
3235 }
3236
3237 void MediaPlayerPrivateAVFoundationObjC::setShouldDisableSleep(bool flag)
3238 {
3239 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
3240     [m_avPlayer _setPreventsSleepDuringVideoPlayback:flag];
3241 #else
3242     UNUSED_PARAM(flag);
3243 #endif
3244 }
3245
3246 NSArray* assetMetadataKeyNames()
3247 {
3248     static NSArray* keys = [[NSArray alloc] initWithObjects:
3249         @"duration",
3250         @"naturalSize",
3251         @"preferredTransform",
3252         @"preferredVolume",
3253         @"preferredRate",
3254         @"playable",
3255         @"resolvedURL",
3256         @"tracks",
3257         @"availableMediaCharacteristicsWithMediaSelectionOptions",
3258     nil];
3259     return keys;
3260 }
3261
3262 NSArray* itemKVOProperties()
3263 {
3264     static NSArray* keys = [[NSArray alloc] initWithObjects:
3265         @"presentationSize",
3266         @"status",
3267         @"asset",
3268         @"tracks",
3269         @"seekableTimeRanges",
3270         @"loadedTimeRanges",
3271         @"playbackLikelyToKeepUp",
3272         @"playbackBufferFull",
3273         @"playbackBufferEmpty",
3274         @"duration",
3275         @"hasEnabledAudio",
3276         @"timedMetadata",
3277         @"canPlayFastForward",
3278         @"canPlayFastReverse",
3279     nil];
3280     return keys;
3281 }
3282
3283 NSArray* assetTrackMetadataKeyNames()
3284 {
3285     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3286     return keys;
3287 }
3288
3289 NSArray* playerKVOProperties()
3290 {
3291     static NSArray* keys = [[NSArray alloc] initWithObjects:
3292         @"rate",
3293 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3294         @"externalPlaybackActive",
3295         @"allowsExternalPlayback",
3296 #endif
3297 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
3298         @"outputObscuredDueToInsufficientExternalProtection",
3299 #endif
3300     nil];
3301     return keys;
3302 }
3303 } // namespace WebCore
3304
3305 @implementation WebCoreAVFMovieObserver
3306
3307 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3308 {
3309     self = [super init];
3310     if (!self)
3311         return nil;
3312     m_callback = callback;
3313     return self;
3314 }
3315
3316 - (void)disconnect
3317 {
3318     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3319     m_callback = nil;
3320 }
3321
3322 - (void)metadataLoaded
3323 {
3324     if (!m_callback)
3325         return;
3326     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3327 }
3328
3329 - (void)didEnd:(NSNotification *)unusedNotification
3330 {
3331     UNUSED_PARAM(unusedNotification);
3332     if (!m_callback)
3333         return;
3334     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3335 }
3336
3337 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3338 {
3339     UNUSED_PARAM(object);
3340     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3341
3342     if (!m_callback)
3343         return;
3344
3345     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3346
3347 #if !LOG_DISABLED
3348     if (willChange)
3349         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3350     else {
3351         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3352         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3353     }
3354 #endif
3355
3356     std::function<void ()> function;
3357
3358     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3359         if ([keyPath isEqualToString:@"readyForDisplay"])
3360             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3361     }
3362
3363     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3364         if ([keyPath isEqualToString:@"enabled"])
3365             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3366     }
3367
3368     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3369         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3370             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3371         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3372             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3373         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3374             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3375     }
3376
3377     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3378         // A value changed for an AVPlayerItem
3379         if ([keyPath isEqualToString:@"status"])
3380             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3381         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3382             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3383         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3384             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3385         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3386             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3387         else if ([keyPath isEqualToString:@"asset"])
3388             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3389         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3390             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3391         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3392             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3393         else if ([keyPath isEqualToString:@"tracks"])
3394             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3395         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3396             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3397         else if ([keyPath isEqualToString:@"presentationSize"])
3398             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3399         else if ([keyPath isEqualToString:@"duration"])
3400             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3401         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3402             MediaTime now;
3403             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3404             if (CMTIME_IS_NUMERIC(itemTime))
3405                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3406             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3407         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3408             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3409         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3410             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3411     }
3412
3413     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3414         // A value changed for an AVPlayer.
3415         if ([keyPath isEqualToString:@"rate"])
3416             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3417 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3418         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3419             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3420 #endif
3421 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
3422         else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3423             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged, m_callback, [newValue boolValue]);
3424 #endif
3425     }
3426     
3427     if (!function)
3428         return;
3429
3430     auto weakThis = m_callback->createWeakPtr();
3431     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3432         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3433         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3434         if (!weakThis)
3435             return;
3436         function();
3437     }));
3438 }
3439
3440 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3441
3442 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3443 {
3444     UNUSED_PARAM(output);
3445     UNUSED_PARAM(nativeSamples);
3446
3447     if (!m_callback)
3448         return;
3449
3450     RetainPtr<WebCoreAVFMovieObserver> protectedSelf = self;
3451     RetainPtr<NSArray> protectedStrings = strings;
3452     RetainPtr<NSArray> protectedNativeSamples = nativeSamples;
3453     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedStrings = WTFMove(protectedStrings), protectedNativeSamples = WTFMove(protectedNativeSamples), itemTime] {
3454         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3455         if (!callback)
3456             return;
3457         MediaTime time = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3458         callback->processCue(protectedStrings.get(), protectedNativeSamples.get(), time);
3459     });
3460 }
3461
3462 - (void)outputSequenceWasFlushed:(id)output
3463 {
3464     UNUSED_PARAM(output);
3465
3466     if (!m_callback)
3467         return;
3468     
3469     callOnMainThread([protectedSelf = RetainPtr<WebCoreAVFMovieObserver>(self)] {
3470         if (MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback)
3471             callback->flushCues();
3472     });
3473 }
3474
3475 #endif
3476
3477 @end
3478
3479 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)