When clearing cache, also clear AVFoundation cache.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "OutOfBandTextTrackPrivateAVF.h"
50 #import "URL.h"
51 #import "Logging.h"
52 #import "MediaPlaybackTargetMac.h"
53 #import "MediaPlaybackTargetMock.h"
54 #import "MediaSelectionGroupAVFObjC.h"
55 #import "MediaTimeAVFoundation.h"
56 #import "PixelBufferConformerCV.h"
57 #import "PlatformTimeRanges.h"
58 #import "QuartzCoreSPI.h"
59 #import "SecurityOrigin.h"
60 #import "SerializedPlatformRepresentationMac.h"
61 #import "Settings.h"
62 #import "TextEncoding.h"
63 #import "TextTrackRepresentation.h"
64 #import "TextureCacheCV.h"
65 #import "UUID.h"
66 #import "VideoTextureCopierCV.h"
67 #import "VideoTrackPrivateAVFObjC.h"
68 #import "WebCoreAVFResourceLoader.h"
69 #import "WebCoreCALayerExtras.h"
70 #import "WebCoreNSURLSession.h"
71 #import "WebCoreSystemInterface.h"
72 #import <functional>
73 #import <map>
74 #import <objc/runtime.h>
75 #import <runtime/DataView.h>
76 #import <runtime/JSCInlines.h>
77 #import <runtime/TypedArrayInlines.h>
78 #import <runtime/Uint16Array.h>
79 #import <runtime/Uint32Array.h>
80 #import <runtime/Uint8Array.h>
81 #import <wtf/CurrentTime.h>
82 #import <wtf/ListHashSet.h>
83 #import <wtf/NeverDestroyed.h>
84 #import <wtf/OSObjectPtr.h>
85 #import <wtf/text/CString.h>
86 #import <wtf/text/StringBuilder.h>
87
88 #if ENABLE(AVF_CAPTIONS)
89 #include "TextTrack.h"
90 #endif
91
92 #import <AVFoundation/AVFoundation.h>
93
94 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
95 #import "VideoFullscreenLayerManager.h"
96 #endif
97
98 #if PLATFORM(IOS)
99 #import "WAKAppKitStubs.h"
100 #import <CoreImage/CoreImage.h>
101 #import <mach/mach_port.h>
102 #else
103 #import <Foundation/NSGeometry.h>
104 #import <QuartzCore/CoreImage.h>
105 #endif
106
107 #if USE(VIDEOTOOLBOX)
108 #import <CoreVideo/CoreVideo.h>
109 #import <VideoToolbox/VideoToolbox.h>
110 #endif
111
112 #if USE(CFNETWORK)
113 #include "CFNSURLConnectionSPI.h"
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117
118 namespace std {
119 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
120     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
121 };
122 }
123
124 #if ENABLE(AVF_CAPTIONS)
125 // Note: This must be defined before our SOFT_LINK macros:
126 @class AVMediaSelectionOption;
127 @interface AVMediaSelectionOption (OutOfBandExtensions)
128 @property (nonatomic, readonly) NSString* outOfBandSource;
129 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
130 @end
131 #endif
132
133 @interface AVURLAsset (WebKitExtensions)
134 @property (nonatomic, readonly) NSURL *resolvedURL;
135 @end
136
137 typedef AVPlayer AVPlayerType;
138 typedef AVPlayerItem AVPlayerItemType;
139 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
140 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
141 typedef AVMetadataItem AVMetadataItemType;
142 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
143 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
144
145 #pragma mark - Soft Linking
146
147 // Soft-linking headers must be included last since they #define functions, constants, etc.
148 #import "CoreMediaSoftLink.h"
149
150 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
153
154 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
155 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
156 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
157 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
158 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
159 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
160 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
161 SOFT_LINK_CLASS(AVFoundation, AVAssetCache)
162
163 SOFT_LINK_CLASS(CoreImage, CIContext)
164 SOFT_LINK_CLASS(CoreImage, CIImage)
165
166 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
167 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
168 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
169 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
181
182 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
183
184 #define AVPlayer getAVPlayerClass()
185 #define AVPlayerItem getAVPlayerItemClass()
186 #define AVPlayerLayer getAVPlayerLayerClass()
187 #define AVURLAsset getAVURLAssetClass()
188 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
189 #define AVMetadataItem getAVMetadataItemClass()
190
191 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
192 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
193 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
194 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
195 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
196 #define AVMediaTypeVideo getAVMediaTypeVideo()
197 #define AVMediaTypeAudio getAVMediaTypeAudio()
198 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
199 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
200 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
201 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
202 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
203 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
204 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
205 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
206 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
207
208 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
209 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
210 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
211
212 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
213 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
214 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
215
216 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
217 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
218 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
219 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
220
221 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
222 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
223 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
224 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
225 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
226 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
227 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
228 #endif
229
230 #if ENABLE(AVF_CAPTIONS)
231 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
232 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
233 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
234 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
235 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
236 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
237 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
238 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
239 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
240 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
241 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
242 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
244
245 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
246 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
247 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
248 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
249 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
250 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
251 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
252 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
253 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
254 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
255 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
256 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
257 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
258 #endif
259
260 #if ENABLE(DATACUE_VALUE)
261 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
262 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
263 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
264 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
265 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
266
267 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
268 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
269 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
270 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
271 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
272 #endif
273
274 #if PLATFORM(IOS)
275 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
276 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
277 #endif
278
279 SOFT_LINK_FRAMEWORK(MediaToolbox)
280 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
281
282 using namespace WebCore;
283
284 enum MediaPlayerAVFoundationObservationContext {
285     MediaPlayerAVFoundationObservationContextPlayerItem,
286     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
287     MediaPlayerAVFoundationObservationContextPlayer,
288     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
289 };
290
291 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
292 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
293 #else
294 @interface WebCoreAVFMovieObserver : NSObject
295 #endif
296 {
297     MediaPlayerPrivateAVFoundationObjC* m_callback;
298     int m_delayCallbacks;
299 }
300 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
301 -(void)disconnect;
302 -(void)metadataLoaded;
303 -(void)didEnd:(NSNotification *)notification;
304 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
305 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
306 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
307 - (void)outputSequenceWasFlushed:(id)output;
308 #endif
309 @end
310
311 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
312 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
313     MediaPlayerPrivateAVFoundationObjC* m_callback;
314 }
315 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
317 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
318 @end
319 #endif
320
321 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
322 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
323     MediaPlayerPrivateAVFoundationObjC *m_callback;
324     dispatch_semaphore_t m_semaphore;
325 }
326 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
327 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
328 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
329 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
330 @end
331 #endif
332
333 namespace WebCore {
334
335 static NSArray *assetMetadataKeyNames();
336 static NSArray *itemKVOProperties();
337 static NSArray *assetTrackMetadataKeyNames();
338 static NSArray *playerKVOProperties();
339 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
340
341 #if !LOG_DISABLED
342 static const char *boolString(bool val)
343 {
344     return val ? "true" : "false";
345 }
346 #endif
347
348 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
349 static dispatch_queue_t globalLoaderDelegateQueue()
350 {
351     static dispatch_queue_t globalQueue;
352     static dispatch_once_t onceToken;
353     dispatch_once(&onceToken, ^{
354         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
355     });
356     return globalQueue;
357 }
358 #endif
359
360 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
361 static dispatch_queue_t globalPullDelegateQueue()
362 {
363     static dispatch_queue_t globalQueue;
364     static dispatch_once_t onceToken;
365     dispatch_once(&onceToken, ^{
366         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
367     });
368     return globalQueue;
369 }
370 #endif
371
372 #if USE(CFNETWORK)
373 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
374 public:
375     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
376     {
377         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
378     }
379
380     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
381     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
382
383 private:
384     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
385         : m_challenge(challenge)
386     {
387         ASSERT(m_challenge);
388     }
389
390     void refAuthenticationClient() override { ref(); }
391     void derefAuthenticationClient() override { deref(); }
392
393     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
394     {
395         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
396     }
397
398     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
399     {
400         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
401     }
402
403     void receivedCancellation(const AuthenticationChallenge&) override
404     {
405         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
406     }
407
408     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
409     {
410         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
411             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
412     }
413
414     void receivedChallengeRejection(const AuthenticationChallenge&) override
415     {
416         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
417             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
418     }
419
420     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
421 };
422 #endif
423
424 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
425 {
426     if (isAvailable())
427         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
428             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
429 }
430
431 static AVAssetCache *assetCacheForPath(const String& path)
432 {
433     NSURL *assetCacheURL;
434     
435     if (path.isEmpty())
436         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
437     else
438         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
439
440     return [getAVAssetCacheClass() assetCacheWithURL:assetCacheURL];
441 }
442
443 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
444 {
445     HashSet<RefPtr<SecurityOrigin>> origins;
446     for (NSString *key in [assetCacheForPath(path) allKeys]) {
447         URL keyAsURL = URL(URL(), key);
448         if (keyAsURL.isValid())
449             origins.add(SecurityOrigin::create(keyAsURL));
450     }
451     return origins;
452 }
453
454 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
455 {
456     ASSERT(date);
457     using namespace std::chrono;
458
459     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
460 }
461
462 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
463 {
464     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
465     
466     AVAssetCache* assetCache = assetCacheForPath(path);
467     
468     for (NSString *key in [assetCache allKeys]) {
469         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
470             [assetCache removeEntryForKey:key];
471     }
472
473     NSFileManager *fileManager = [NSFileManager defaultManager];
474     NSURL *baseURL = [assetCache URL];
475
476     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
477         [fileManager removeItemAtURL:baseURL error:nil];
478         return;
479     }
480     
481     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
482     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
483         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
484         errorHandler:nil];
485     
486     RetainPtr<NSMutableArray<NSURL *>> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
487     for (NSURL *fileURL : enumerator) {
488         NSDictionary<NSString *, id> *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
489     
490         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
491             continue;
492         
493         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
494             continue;
495         
496         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
497             continue;
498         
499         [urlsToDelete addObject:fileURL];
500     }
501     
502     for (NSURL *fileURL in urlsToDelete.get())
503         [fileManager removeItemAtURL:fileURL error:nil];
504 }
505
506 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
507 {
508     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
509     AVAssetCache* assetCache = assetCacheForPath(path);
510     for (NSString *key in [assetCache allKeys]) {
511         URL keyAsURL = URL(URL(), key);
512         if (keyAsURL.isValid()) {
513             if (origins.contains(SecurityOrigin::create(keyAsURL)))
514                 [assetCache removeEntryForKey:key];
515         }
516     }
517 }
518
519 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
520     : MediaPlayerPrivateAVFoundation(player)
521     , m_weakPtrFactory(this)
522 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
523     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
524     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
525 #endif
526     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
527     , m_videoFrameHasDrawn(false)
528     , m_haveCheckedPlayability(false)
529 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
530     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
531     , m_videoOutputSemaphore(nullptr)
532 #endif
533 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
534     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
535 #endif
536     , m_currentTextTrack(0)
537     , m_cachedRate(0)
538     , m_cachedTotalBytes(0)
539     , m_pendingStatusChanges(0)
540     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
541     , m_cachedLikelyToKeepUp(false)
542     , m_cachedBufferEmpty(false)
543     , m_cachedBufferFull(false)
544     , m_cachedHasEnabledAudio(false)
545     , m_shouldBufferData(true)
546     , m_cachedIsReadyForDisplay(false)
547     , m_haveBeenAskedToCreateLayer(false)
548 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
549     , m_allowsWirelessVideoPlayback(true)
550 #endif
551 {
552 }
553
554 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
555 {
556 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
557     [m_loaderDelegate.get() setCallback:0];
558     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
559
560     for (auto& pair : m_resourceLoaderMap)
561         pair.value->invalidate();
562 #endif
563 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
564     [m_videoOutputDelegate setCallback:0];
565     [m_videoOutput setDelegate:nil queue:0];
566     if (m_videoOutputSemaphore)
567         dispatch_release(m_videoOutputSemaphore);
568 #endif
569
570     if (m_videoLayer)
571         destroyVideoLayer();
572
573     cancelLoad();
574 }
575
576 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
577 {
578     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
579     tearDownVideoRendering();
580
581     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
582     [m_objcObserver.get() disconnect];
583
584     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
585     setIgnoreLoadStateChanges(true);
586     if (m_avAsset) {
587         [m_avAsset.get() cancelLoading];
588         m_avAsset = nil;
589     }
590
591     clearTextTracks();
592
593 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
594     if (m_legibleOutput) {
595         if (m_avPlayerItem)
596             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
597         m_legibleOutput = nil;
598     }
599 #endif
600
601     if (m_avPlayerItem) {
602         for (NSString *keyName in itemKVOProperties())
603             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
604         
605         m_avPlayerItem = nil;
606     }
607     if (m_avPlayer) {
608         if (m_timeObserver)
609             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
610         m_timeObserver = nil;
611
612         for (NSString *keyName in playerKVOProperties())
613             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
614         m_avPlayer = nil;
615     }
616
617     // Reset cached properties
618     m_pendingStatusChanges = 0;
619     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
620     m_cachedSeekableRanges = nullptr;
621     m_cachedLoadedRanges = nullptr;
622     m_cachedHasEnabledAudio = false;
623     m_cachedPresentationSize = FloatSize();
624     m_cachedDuration = MediaTime::zeroTime();
625
626     for (AVPlayerItemTrack *track in m_cachedTracks.get())
627         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
628     m_cachedTracks = nullptr;
629
630 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
631     if (m_provider) {
632         m_provider->setPlayerItem(nullptr);
633         m_provider->setAudioTrack(nullptr);
634     }
635 #endif
636
637     setIgnoreLoadStateChanges(false);
638 }
639
640 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
641 {
642     return m_haveBeenAskedToCreateLayer;
643 }
644
645 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
646 {
647 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
648     if (m_videoOutput)
649         return true;
650 #endif
651     return m_imageGenerator;
652 }
653
654 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
655 {
656 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
657     createVideoOutput();
658 #else
659     createImageGenerator();
660 #endif
661 }
662
663 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
664 {
665     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
666
667     if (!m_avAsset || m_imageGenerator)
668         return;
669
670     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
671
672     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
673     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
674     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
675     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
676
677     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
678 }
679
680 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
681 {
682 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
683     destroyVideoOutput();
684     destroyOpenGLVideoOutput();
685 #endif
686     destroyImageGenerator();
687 }
688
689 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
690 {
691     if (!m_imageGenerator)
692         return;
693
694     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
695
696     m_imageGenerator = 0;
697 }
698
699 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
700 {
701     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
702         return;
703
704     auto weakThis = createWeakPtr();
705     callOnMainThread([this, weakThis] {
706         if (!weakThis)
707             return;
708
709         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
710             return;
711         m_haveBeenAskedToCreateLayer = true;
712
713         if (!m_videoLayer)
714             createAVPlayerLayer();
715
716 #if USE(VIDEOTOOLBOX)
717         if (!m_videoOutput)
718             createVideoOutput();
719 #endif
720
721         player()->client().mediaPlayerRenderingModeChanged(player());
722     });
723 }
724
725 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
726 {
727     if (!m_avPlayer)
728         return;
729
730     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
731     [m_videoLayer setPlayer:m_avPlayer.get()];
732     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
733 #ifndef NDEBUG
734     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
735 #endif
736     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
737     updateVideoLayerGravity();
738     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
739     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
740     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
741
742 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
743     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
744
745 #if PLATFORM(IOS)
746     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
747         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
748 #endif
749 #else
750     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
751 #endif
752 }
753
754 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
755 {
756     if (!m_videoLayer)
757         return;
758
759     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
760
761     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
762     [m_videoLayer.get() setPlayer:nil];
763
764 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
765     m_videoFullscreenLayerManager->didDestroyVideoLayer();
766 #endif
767
768     m_videoLayer = nil;
769 }
770
771 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
772 {
773     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
774     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
775
776     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
777     if (!date)
778         return MediaTime::invalidTime();
779
780     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
781
782     // Rounding due to second offset error when subtracting.
783     return MediaTime::createWithDouble(round(date - currentTime));
784 }
785
786 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
787 {
788     if (currentRenderingMode() == MediaRenderingToLayer)
789         return m_cachedIsReadyForDisplay;
790
791     return m_videoFrameHasDrawn;
792 }
793
794 #if ENABLE(AVF_CAPTIONS)
795 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
796 {
797     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
798     if (manualSelectionMode)
799         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
800
801     // FIXME: Match these to correct types:
802     if (kind == PlatformTextTrack::Caption)
803         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
804
805     if (kind == PlatformTextTrack::Subtitle)
806         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
807
808     if (kind == PlatformTextTrack::Description)
809         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
810
811     if (kind == PlatformTextTrack::Forced)
812         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
813
814     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
815 }
816     
817 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
818 {
819     trackModeChanged();
820 }
821     
822 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
823 {
824     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
825     
826     for (auto& textTrack : m_textTracks) {
827         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
828             continue;
829         
830         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
831         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
832         
833         for (auto& track : outOfBandTrackSources) {
834             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
835             
836             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
837                 continue;
838             
839             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
840             if (track->mode() == PlatformTextTrack::Hidden)
841                 mode = InbandTextTrackPrivate::Hidden;
842             else if (track->mode() == PlatformTextTrack::Disabled)
843                 mode = InbandTextTrackPrivate::Disabled;
844             else if (track->mode() == PlatformTextTrack::Showing)
845                 mode = InbandTextTrackPrivate::Showing;
846             
847             textTrack->setMode(mode);
848             break;
849         }
850     }
851 }
852 #endif
853
854
855 static NSURL *canonicalURL(const String& url)
856 {
857     NSURL *cocoaURL = URL(ParsedURLString, url);
858     if (url.isEmpty())
859         return cocoaURL;
860
861     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
862     if (!request)
863         return cocoaURL;
864
865     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
866     if (!canonicalRequest)
867         return cocoaURL;
868
869     return [canonicalRequest URL];
870 }
871
872 #if PLATFORM(IOS)
873 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
874 {
875     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
876     [properties setDictionary:@{
877         NSHTTPCookieName: cookie.name,
878         NSHTTPCookieValue: cookie.value,
879         NSHTTPCookieDomain: cookie.domain,
880         NSHTTPCookiePath: cookie.path,
881         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
882     }];
883     if (cookie.secure)
884         [properties setObject:@YES forKey:NSHTTPCookieSecure];
885     if (cookie.session)
886         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
887
888     return [NSHTTPCookie cookieWithProperties:properties.get()];
889 }
890 #endif
891
892 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
893 {
894     if (m_avAsset)
895         return;
896
897     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
898
899     setDelayCallbacks(true);
900
901     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
902
903     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
904
905     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
906
907     String referrer = player()->referrer();
908     if (!referrer.isEmpty())
909         [headerFields.get() setObject:referrer forKey:@"Referer"];
910
911     String userAgent = player()->userAgent();
912     if (!userAgent.isEmpty())
913         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
914
915     if ([headerFields.get() count])
916         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
917
918     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
919         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
920
921 #if PLATFORM(IOS)
922     // FIXME: rdar://problem/20354688
923     String identifier = player()->sourceApplicationIdentifier();
924     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
925         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
926 #endif
927
928 #if ENABLE(AVF_CAPTIONS)
929     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
930     if (!outOfBandTrackSources.isEmpty()) {
931         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
932         for (auto& trackSource : outOfBandTrackSources) {
933             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
934             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
935             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
936             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
937             [outOfBandTracks.get() addObject:@{
938                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
939                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
940                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
941                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
942                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
943                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
944             }];
945         }
946
947         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
948     }
949 #endif
950
951 #if PLATFORM(IOS)
952     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
953     if (!networkInterfaceName.isEmpty())
954         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
955 #endif
956
957 #if PLATFORM(IOS)
958     Vector<Cookie> cookies;
959     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
960         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
961         for (auto& cookie : cookies)
962             [nsCookies addObject:toNSHTTPCookie(cookie)];
963
964         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
965     }
966 #endif
967
968     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
969     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
970     
971     if (usePersistentCache)
972         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
973
974     NSURL *cocoaURL = canonicalURL(url);
975     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
976
977 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
978     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
979     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
980
981 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
982     if (Settings::isAVFoundationNSURLSessionEnabled()
983         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
984         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
985         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
986         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
987         if (mediaResourceLoader)
988             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
989     }
990 #endif
991
992 #endif
993
994     m_haveCheckedPlayability = false;
995
996     setDelayCallbacks(false);
997 }
998
999 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1000 {
1001     if (!m_avPlayer)
1002         return;
1003
1004     if (pthread_main_np()) {
1005         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1006         return;
1007     }
1008
1009     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1010     RetainPtr<AVPlayerItemType> strongItem = item;
1011     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1012         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1013     });
1014 }
1015
1016 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1017 {
1018     if (m_avPlayer)
1019         return;
1020
1021     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1022
1023     setDelayCallbacks(true);
1024
1025     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
1026     for (NSString *keyName in playerKVOProperties())
1027         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1028
1029 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1030     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1031 #endif
1032
1033 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1034     updateDisableExternalPlayback();
1035     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1036 #endif
1037
1038 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1039     if (m_shouldPlayToPlaybackTarget)
1040         setShouldPlayToPlaybackTarget(true);
1041 #endif
1042
1043     if (player()->client().mediaPlayerIsVideo())
1044         createAVPlayerLayer();
1045
1046     if (m_avPlayerItem)
1047         setAVPlayerItem(m_avPlayerItem.get());
1048
1049     setDelayCallbacks(false);
1050 }
1051
1052 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1053 {
1054     if (m_avPlayerItem)
1055         return;
1056
1057     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1058
1059     setDelayCallbacks(true);
1060
1061     // Create the player item so we can load media data. 
1062     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
1063
1064     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1065
1066     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1067     for (NSString *keyName in itemKVOProperties())
1068         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1069
1070     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1071
1072     if (m_avPlayer)
1073         setAVPlayerItem(m_avPlayerItem.get());
1074
1075 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1076     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1077
1078     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1079     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1080     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1081
1082     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1083     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1084     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1085     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1086 #endif
1087
1088 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1089     if (m_provider) {
1090         m_provider->setPlayerItem(m_avPlayerItem.get());
1091         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1092     }
1093 #endif
1094
1095     setDelayCallbacks(false);
1096 }
1097
1098 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1099 {
1100     if (m_haveCheckedPlayability)
1101         return;
1102     m_haveCheckedPlayability = true;
1103
1104     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1105     auto weakThis = createWeakPtr();
1106
1107     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1108         callOnMainThread([weakThis] {
1109             if (weakThis)
1110                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1111         });
1112     }];
1113 }
1114
1115 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1116 {
1117     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1118
1119     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1120     dispatch_group_enter(metadataLoadingGroup.get());
1121     auto weakThis = createWeakPtr();
1122     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1123
1124         callOnMainThread([weakThis, metadataLoadingGroup] {
1125             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1126                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1127                     dispatch_group_enter(metadataLoadingGroup.get());
1128                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1129                         dispatch_group_leave(metadataLoadingGroup.get());
1130                     }];
1131                 }
1132             }
1133             dispatch_group_leave(metadataLoadingGroup.get());
1134         });
1135     }];
1136
1137     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1138         callOnMainThread([weakThis] {
1139             if (weakThis)
1140                 [weakThis->m_objcObserver.get() metadataLoaded];
1141         });
1142     });
1143 }
1144
1145 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1146 {
1147     if (!m_avPlayerItem)
1148         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1149
1150     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1151         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1152     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1153         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1154     if (m_cachedLikelyToKeepUp)
1155         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1156     if (m_cachedBufferFull)
1157         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1158     if (m_cachedBufferEmpty)
1159         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1160
1161     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1162 }
1163
1164 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1165 {
1166     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1167     PlatformMedia pm;
1168     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1169     pm.media.avfMediaPlayer = m_avPlayer.get();
1170     return pm;
1171 }
1172
1173 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1174 {
1175 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1176     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1177 #else
1178     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1179 #endif
1180 }
1181
1182 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1183 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1184 {
1185     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer)
1186         return;
1187
1188     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer);
1189
1190     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1191         syncTextTrackBounds();
1192         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1193     }
1194
1195     updateDisableExternalPlayback();
1196 }
1197
1198 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1199 {
1200     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1201     syncTextTrackBounds();
1202 }
1203
1204 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1205 {
1206     m_videoFullscreenGravity = gravity;
1207     if (!m_videoLayer)
1208         return;
1209
1210     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1211     if (gravity == MediaPlayer::VideoGravityResize)
1212         videoGravity = AVLayerVideoGravityResize;
1213     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1214         videoGravity = AVLayerVideoGravityResizeAspect;
1215     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1216         videoGravity = AVLayerVideoGravityResizeAspectFill;
1217     else
1218         ASSERT_NOT_REACHED();
1219     
1220     if ([m_videoLayer videoGravity] == videoGravity)
1221         return;
1222
1223     [m_videoLayer setVideoGravity:videoGravity];
1224     syncTextTrackBounds();
1225 }
1226
1227 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1228 {
1229 #if PLATFORM(IOS)
1230     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1231         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1232     updateDisableExternalPlayback();
1233 #else
1234     UNUSED_PARAM(mode);
1235 #endif
1236 }
1237
1238 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1239
1240 #if PLATFORM(IOS)
1241 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1242 {
1243     if (m_currentMetaData)
1244         return m_currentMetaData.get();
1245     return nil;
1246 }
1247
1248 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1249 {
1250     if (!m_avPlayerItem)
1251         return emptyString();
1252     
1253     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1254     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1255
1256     return logString.get();
1257 }
1258
1259 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1260 {
1261     if (!m_avPlayerItem)
1262         return emptyString();
1263
1264     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1265     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1266
1267     return logString.get();
1268 }
1269 #endif
1270
1271 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1272 {
1273     [CATransaction begin];
1274     [CATransaction setDisableActions:YES];    
1275     if (m_videoLayer)
1276         [m_videoLayer.get() setHidden:!isVisible];
1277     [CATransaction commit];
1278 }
1279     
1280 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1281 {
1282     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1283     if (!metaDataAvailable())
1284         return;
1285
1286     setDelayCallbacks(true);
1287     m_cachedRate = requestedRate();
1288     [m_avPlayer.get() setRate:requestedRate()];
1289     setDelayCallbacks(false);
1290 }
1291
1292 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1293 {
1294     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1295     if (!metaDataAvailable())
1296         return;
1297
1298     setDelayCallbacks(true);
1299     m_cachedRate = 0;
1300     [m_avPlayer.get() setRate:0];
1301     setDelayCallbacks(false);
1302 }
1303
1304 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1305 {
1306     // Do not ask the asset for duration before it has been loaded or it will fetch the
1307     // answer synchronously.
1308     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1309         return MediaTime::invalidTime();
1310     
1311     CMTime cmDuration;
1312     
1313     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1314     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1315         cmDuration = [m_avPlayerItem.get() duration];
1316     else
1317         cmDuration = [m_avAsset.get() duration];
1318
1319     if (CMTIME_IS_NUMERIC(cmDuration))
1320         return toMediaTime(cmDuration);
1321
1322     if (CMTIME_IS_INDEFINITE(cmDuration))
1323         return MediaTime::positiveInfiniteTime();
1324
1325     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1326     return MediaTime::invalidTime();
1327 }
1328
1329 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1330 {
1331     if (!metaDataAvailable() || !m_avPlayerItem)
1332         return MediaTime::zeroTime();
1333
1334     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1335     if (CMTIME_IS_NUMERIC(itemTime))
1336         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1337
1338     return MediaTime::zeroTime();
1339 }
1340
1341 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1342 {
1343     // setCurrentTime generates several event callbacks, update afterwards.
1344     setDelayCallbacks(true);
1345
1346     if (m_metadataTrack)
1347         m_metadataTrack->flushPartialCues();
1348
1349     CMTime cmTime = toCMTime(time);
1350     CMTime cmBefore = toCMTime(negativeTolerance);
1351     CMTime cmAfter = toCMTime(positiveTolerance);
1352
1353     auto weakThis = createWeakPtr();
1354
1355     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1356
1357     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1358         callOnMainThread([weakThis, finished] {
1359             auto _this = weakThis.get();
1360             if (!_this)
1361                 return;
1362
1363             _this->seekCompleted(finished);
1364         });
1365     }];
1366
1367     setDelayCallbacks(false);
1368 }
1369
1370 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1371 {
1372 #if PLATFORM(IOS)
1373     UNUSED_PARAM(volume);
1374     return;
1375 #else
1376     if (!metaDataAvailable())
1377         return;
1378
1379     [m_avPlayer.get() setVolume:volume];
1380 #endif
1381 }
1382
1383 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1384 {
1385     UNUSED_PARAM(closedCaptionsVisible);
1386
1387     if (!metaDataAvailable())
1388         return;
1389
1390     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1391 }
1392
1393 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1394 {
1395     setDelayCallbacks(true);
1396     m_cachedRate = rate;
1397     [m_avPlayer.get() setRate:rate];
1398     setDelayCallbacks(false);
1399 }
1400
1401 double MediaPlayerPrivateAVFoundationObjC::rate() const
1402 {
1403     if (!metaDataAvailable())
1404         return 0;
1405
1406     return m_cachedRate;
1407 }
1408
1409 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1410 {
1411     if (m_avPlayerItem)
1412         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1413 }
1414
1415 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1416 {
1417     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1418
1419     if (!m_avPlayerItem)
1420         return timeRanges;
1421
1422     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1423         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1424         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1425             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1426     }
1427     return timeRanges;
1428 }
1429
1430 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1431 {
1432     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1433         return MediaTime::zeroTime();
1434
1435     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1436     bool hasValidRange = false;
1437     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1438         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1439         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1440             continue;
1441
1442         hasValidRange = true;
1443         MediaTime startOfRange = toMediaTime(timeRange.start);
1444         if (minTimeSeekable > startOfRange)
1445             minTimeSeekable = startOfRange;
1446     }
1447     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1448 }
1449
1450 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1451 {
1452     if (!m_cachedSeekableRanges)
1453         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1454
1455     MediaTime maxTimeSeekable;
1456     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1457         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1458         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1459             continue;
1460         
1461         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1462         if (maxTimeSeekable < endOfRange)
1463             maxTimeSeekable = endOfRange;
1464     }
1465     return maxTimeSeekable;
1466 }
1467
1468 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1469 {
1470     if (!m_cachedLoadedRanges)
1471         return MediaTime::zeroTime();
1472
1473     MediaTime maxTimeLoaded;
1474     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1475         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1476         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1477             continue;
1478         
1479         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1480         if (maxTimeLoaded < endOfRange)
1481             maxTimeLoaded = endOfRange;
1482     }
1483
1484     return maxTimeLoaded;   
1485 }
1486
1487 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1488 {
1489     if (!metaDataAvailable())
1490         return 0;
1491
1492     if (m_cachedTotalBytes)
1493         return m_cachedTotalBytes;
1494
1495     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1496         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1497
1498     return m_cachedTotalBytes;
1499 }
1500
1501 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1502 {
1503     m_avAsset = asset;
1504 }
1505
1506 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1507 {
1508     if (!m_avAsset)
1509         return MediaPlayerAVAssetStatusDoesNotExist;
1510
1511     for (NSString *keyName in assetMetadataKeyNames()) {
1512         NSError *error = nil;
1513         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1514 #if !LOG_DISABLED
1515         if (error)
1516             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1517 #endif
1518
1519         if (keyStatus < AVKeyValueStatusLoaded)
1520             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1521         
1522         if (keyStatus == AVKeyValueStatusFailed)
1523             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1524
1525         if (keyStatus == AVKeyValueStatusCancelled)
1526             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1527     }
1528
1529     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1530         return MediaPlayerAVAssetStatusPlayable;
1531
1532     return MediaPlayerAVAssetStatusLoaded;
1533 }
1534
1535 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1536 {
1537     if (!m_avAsset)
1538         return 0;
1539
1540     NSError *error = nil;
1541     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1542     return [error code];
1543 }
1544
1545 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1546 {
1547     if (!metaDataAvailable() || context.paintingDisabled())
1548         return;
1549
1550     setDelayCallbacks(true);
1551     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1552
1553 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1554     if (videoOutputHasAvailableFrame())
1555         paintWithVideoOutput(context, rect);
1556     else
1557 #endif
1558         paintWithImageGenerator(context, rect);
1559
1560     END_BLOCK_OBJC_EXCEPTIONS;
1561     setDelayCallbacks(false);
1562
1563     m_videoFrameHasDrawn = true;
1564 }
1565
1566 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1567 {
1568     if (!metaDataAvailable() || context.paintingDisabled())
1569         return;
1570
1571     // We can ignore the request if we are already rendering to a layer.
1572     if (currentRenderingMode() == MediaRenderingToLayer)
1573         return;
1574
1575     // paint() is best effort, so only paint if we already have an image generator or video output available.
1576     if (!hasContextRenderer())
1577         return;
1578
1579     paintCurrentFrameInContext(context, rect);
1580 }
1581
1582 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1583 {
1584     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1585
1586     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1587     if (image) {
1588         GraphicsContextStateSaver stateSaver(context);
1589         context.translate(rect.x(), rect.y() + rect.height());
1590         context.scale(FloatSize(1.0f, -1.0f));
1591         context.setImageInterpolationQuality(InterpolationLow);
1592         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1593         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1594     }
1595 }
1596
1597 static const HashSet<String, ASCIICaseInsensitiveHash>& avfMIMETypes()
1598 {
1599     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []() {
1600         HashSet<String, ASCIICaseInsensitiveHash> types;
1601         for (NSString *type in [AVURLAsset audiovisualMIMETypes])
1602             types.add(type);
1603         return types;
1604     }();
1605     
1606     return cache;
1607 }
1608
1609 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1610 {
1611     if (!m_imageGenerator)
1612         createImageGenerator();
1613     ASSERT(m_imageGenerator);
1614
1615 #if !LOG_DISABLED
1616     double start = monotonicallyIncreasingTime();
1617 #endif
1618
1619     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1620     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1621     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1622
1623 #if !LOG_DISABLED
1624     double duration = monotonicallyIncreasingTime() - start;
1625     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1626 #endif
1627
1628     return image;
1629 }
1630
1631 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1632 {
1633     supportedTypes = avfMIMETypes();
1634
1635
1636 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1637 static bool keySystemIsSupported(const String& keySystem)
1638 {
1639     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1640         return true;
1641     return false;
1642 }
1643 #endif
1644
1645 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1646 {
1647 #if ENABLE(ENCRYPTED_MEDIA)
1648     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1649     // In addition to the steps in the current specification, this method must run the following steps:
1650
1651     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1652     //    If keySystem is null, continue to the next step.
1653     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1654         // "Clear Key" is only supported with HLS:
1655         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1656             return MediaPlayer::IsNotSupported;
1657
1658         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1659         if (!keySystemIsSupported(parameters.keySystem))
1660             return MediaPlayer::IsNotSupported;
1661
1662         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1663         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1664     }
1665
1666     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1667 #endif
1668
1669 #if ENABLE(MEDIA_SOURCE)
1670     if (parameters.isMediaSource)
1671         return MediaPlayer::IsNotSupported;
1672 #endif
1673 #if ENABLE(MEDIA_STREAM)
1674     if (parameters.isMediaStream)
1675         return MediaPlayer::IsNotSupported;
1676 #endif
1677     if (isUnsupportedMIMEType(parameters.type))
1678         return MediaPlayer::IsNotSupported;
1679
1680     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1681         return MediaPlayer::IsNotSupported;
1682
1683     // The spec says:
1684     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1685     if (parameters.codecs.isEmpty())
1686         return MediaPlayer::MayBeSupported;
1687
1688     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1689     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1690 }
1691
1692 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1693 {
1694 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1695     if (!keySystem.isEmpty()) {
1696         // "Clear Key" is only supported with HLS:
1697         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1698             return MediaPlayer::IsNotSupported;
1699
1700         if (!keySystemIsSupported(keySystem))
1701             return false;
1702
1703         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1704             return false;
1705
1706         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1707             return false;
1708
1709         return true;
1710     }
1711 #else
1712     UNUSED_PARAM(keySystem);
1713     UNUSED_PARAM(mimeType);
1714 #endif
1715     return false;
1716 }
1717
1718 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1719 #if ENABLE(ENCRYPTED_MEDIA_V2)
1720 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1721 {
1722     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1723         [infoRequest setContentLength:keyData->byteLength()];
1724         [infoRequest setByteRangeAccessSupported:YES];
1725     }
1726
1727     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1728         long long start = [dataRequest currentOffset];
1729         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1730
1731         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1732             [request finishLoadingWithError:nil];
1733             return;
1734         }
1735
1736         ASSERT(start <= std::numeric_limits<int>::max());
1737         ASSERT(end <= std::numeric_limits<int>::max());
1738         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1739         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1740         [dataRequest respondWithData:nsData.get()];
1741     }
1742
1743     [request finishLoading];
1744 }
1745 #endif
1746
1747 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1748 {
1749     String scheme = [[[avRequest request] URL] scheme];
1750     String keyURI = [[[avRequest request] URL] absoluteString];
1751
1752 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1753     if (scheme == "skd") {
1754         // Create an initData with the following layout:
1755         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1756         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1757         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1758         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1759         initDataView->set<uint32_t>(0, keyURISize, true);
1760
1761         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1762         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1763
1764 #if ENABLE(ENCRYPTED_MEDIA)
1765         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1766 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1767         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1768         if (!player()->keyNeeded(initData.get()))
1769 #endif
1770             return false;
1771
1772         m_keyURIToRequestMap.set(keyURI, avRequest);
1773         return true;
1774 #if ENABLE(ENCRYPTED_MEDIA_V2)
1775     } else if (scheme == "clearkey") {
1776         String keyID = [[[avRequest request] URL] resourceSpecifier];
1777         StringView keyIDView(keyID);
1778         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1779
1780         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1781         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1782
1783         auto keyData = player()->cachedKeyForKeyId(keyID);
1784         if (keyData) {
1785             fulfillRequestWithKeyData(avRequest, keyData.get());
1786             return false;
1787         }
1788
1789         if (!player()->keyNeeded(initData.get()))
1790             return false;
1791
1792         m_keyURIToRequestMap.set(keyID, avRequest);
1793         return true;
1794 #endif
1795     }
1796 #endif
1797
1798     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1799     m_resourceLoaderMap.add(avRequest, resourceLoader);
1800     resourceLoader->startLoading();
1801     return true;
1802 }
1803
1804 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1805 {
1806 #if USE(CFNETWORK)
1807     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1808     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1809     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1810 #else
1811     AuthenticationChallenge challenge(nsChallenge);
1812 #endif
1813
1814     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1815 }
1816
1817 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1818 {
1819     String scheme = [[[avRequest request] URL] scheme];
1820
1821     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1822
1823     if (resourceLoader)
1824         resourceLoader->stopLoading();
1825 }
1826
1827 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1828 {
1829     m_resourceLoaderMap.remove(avRequest);
1830 }
1831 #endif
1832
1833 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1834 {
1835     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1836 }
1837
1838 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1839 {
1840     if (!metaDataAvailable())
1841         return timeValue;
1842
1843     // FIXME - impossible to implement until rdar://8721510 is fixed.
1844     return timeValue;
1845 }
1846
1847 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1848 {
1849 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1850     return 0;
1851 #else
1852     return 5;
1853 #endif
1854 }
1855
1856 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1857 {
1858     if (!m_videoLayer)
1859         return;
1860
1861 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1862     // Do not attempt to change the video gravity while in full screen mode.
1863     // See setVideoFullscreenGravity().
1864     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1865         return;
1866 #endif
1867
1868     [CATransaction begin];
1869     [CATransaction setDisableActions:YES];    
1870     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1871     [m_videoLayer.get() setVideoGravity:gravity];
1872     [CATransaction commit];
1873 }
1874
1875 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1876 {
1877     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1878         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1879     }];
1880     if (index == NSNotFound)
1881         return nil;
1882     return [tracks objectAtIndex:index];
1883 }
1884
1885 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1886 {
1887     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1888     m_languageOfPrimaryAudioTrack = String();
1889
1890     if (!m_avAsset)
1891         return;
1892
1893     setDelayCharacteristicsChangedNotification(true);
1894
1895     bool haveCCTrack = false;
1896     bool hasCaptions = false;
1897
1898     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1899     // asked about those fairly fequently.
1900     if (!m_avPlayerItem) {
1901         // We don't have a player item yet, so check with the asset because some assets support inspection
1902         // prior to becoming ready to play.
1903         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1904         setHasVideo(firstEnabledVideoTrack);
1905         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1906 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1907         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1908 #endif
1909
1910         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1911     } else {
1912         bool hasVideo = false;
1913         bool hasAudio = false;
1914         bool hasMetaData = false;
1915         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1916             if ([track isEnabled]) {
1917                 AVAssetTrack *assetTrack = [track assetTrack];
1918                 NSString *mediaType = [assetTrack mediaType];
1919                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1920                     hasVideo = true;
1921                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1922                     hasAudio = true;
1923                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1924 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1925                     hasCaptions = true;
1926 #endif
1927                     haveCCTrack = true;
1928                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1929                     hasMetaData = true;
1930                 }
1931             }
1932         }
1933
1934 #if ENABLE(VIDEO_TRACK)
1935         updateAudioTracks();
1936         updateVideoTracks();
1937
1938 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1939         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1940         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1941 #endif
1942 #endif
1943
1944         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1945         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1946         // when it is not.
1947         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1948
1949         setHasAudio(hasAudio);
1950 #if ENABLE(DATACUE_VALUE)
1951         if (hasMetaData)
1952             processMetadataTrack();
1953 #endif
1954     }
1955
1956 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1957     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1958     if (legibleGroup && m_cachedTracks) {
1959         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1960         if (hasCaptions)
1961             processMediaSelectionOptions();
1962     }
1963 #endif
1964
1965 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1966     if (!hasCaptions && haveCCTrack)
1967         processLegacyClosedCaptionsTracks();
1968 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1969     if (haveCCTrack)
1970         processLegacyClosedCaptionsTracks();
1971 #endif
1972
1973     setHasClosedCaptions(hasCaptions);
1974
1975     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1976         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1977
1978     sizeChanged();
1979
1980     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1981         characteristicsChanged();
1982
1983 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1984     if (m_provider)
1985         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1986 #endif
1987
1988     setDelayCharacteristicsChangedNotification(false);
1989 }
1990
1991 #if ENABLE(VIDEO_TRACK)
1992 template <typename RefT, typename PassRefT>
1993 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1994 {
1995     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1996         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1997     }]]]);
1998     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1999
2000     for (auto& oldItem : oldItems) {
2001         if (oldItem->playerItemTrack())
2002             [oldTracks addObject:oldItem->playerItemTrack()];
2003     }
2004
2005     // Find the added & removed AVPlayerItemTracks:
2006     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2007     [removedTracks minusSet:newTracks.get()];
2008
2009     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2010     [addedTracks minusSet:oldTracks.get()];
2011
2012     typedef Vector<RefT> ItemVector;
2013     ItemVector replacementItems;
2014     ItemVector addedItems;
2015     ItemVector removedItems;
2016     for (auto& oldItem : oldItems) {
2017         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2018             removedItems.append(oldItem);
2019         else
2020             replacementItems.append(oldItem);
2021     }
2022
2023     for (AVPlayerItemTrack* track in addedTracks.get())
2024         addedItems.append(itemFactory(track));
2025
2026     replacementItems.appendVector(addedItems);
2027     oldItems.swap(replacementItems);
2028
2029     for (auto& removedItem : removedItems)
2030         (player->*removedFunction)(removedItem);
2031
2032     for (auto& addedItem : addedItems)
2033         (player->*addedFunction)(addedItem);
2034 }
2035
2036 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2037 template <typename RefT, typename PassRefT>
2038 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2039 {
2040     group->updateOptions(characteristics);
2041
2042     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2043     for (auto& option : group->options()) {
2044         if (!option)
2045             continue;
2046         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2047         if (!avOption)
2048             continue;
2049         newSelectionOptions.add(option);
2050     }
2051
2052     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2053     for (auto& oldItem : oldItems) {
2054         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2055             oldSelectionOptions.add(option);
2056     }
2057
2058     // Find the added & removed AVMediaSelectionOptions:
2059     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2060     for (auto& oldOption : oldSelectionOptions) {
2061         if (!newSelectionOptions.contains(oldOption))
2062             removedSelectionOptions.add(oldOption);
2063     }
2064
2065     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2066     for (auto& newOption : newSelectionOptions) {
2067         if (!oldSelectionOptions.contains(newOption))
2068             addedSelectionOptions.add(newOption);
2069     }
2070
2071     typedef Vector<RefT> ItemVector;
2072     ItemVector replacementItems;
2073     ItemVector addedItems;
2074     ItemVector removedItems;
2075     for (auto& oldItem : oldItems) {
2076         if (!oldItem->mediaSelectionOption())
2077             removedItems.append(oldItem);
2078         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2079             removedItems.append(oldItem);
2080         else
2081             replacementItems.append(oldItem);
2082     }
2083
2084     for (auto& option : addedSelectionOptions)
2085         addedItems.append(itemFactory(*option.get()));
2086
2087     replacementItems.appendVector(addedItems);
2088     oldItems.swap(replacementItems);
2089     
2090     for (auto& removedItem : removedItems)
2091         (player->*removedFunction)(removedItem);
2092     
2093     for (auto& addedItem : addedItems)
2094         (player->*addedFunction)(addedItem);
2095 }
2096 #endif
2097
2098 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2099 {
2100 #if !LOG_DISABLED
2101     size_t count = m_audioTracks.size();
2102 #endif
2103
2104 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2105     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2106     if (!m_audibleGroup) {
2107         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2108             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2109     }
2110
2111     if (m_audibleGroup)
2112         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2113     else
2114 #endif
2115         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2116
2117     for (auto& track : m_audioTracks)
2118         track->resetPropertiesFromTrack();
2119
2120 #if !LOG_DISABLED
2121     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2122 #endif
2123 }
2124
2125 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2126 {
2127 #if !LOG_DISABLED
2128     size_t count = m_videoTracks.size();
2129 #endif
2130
2131     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2132
2133 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2134     if (!m_visualGroup) {
2135         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2136             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2137     }
2138
2139     if (m_visualGroup)
2140         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2141 #endif
2142
2143     for (auto& track : m_audioTracks)
2144         track->resetPropertiesFromTrack();
2145
2146 #if !LOG_DISABLED
2147     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2148 #endif
2149 }
2150
2151 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2152 {
2153 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2154     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2155         return true;
2156 #endif
2157     return false;
2158 }
2159
2160 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2161 {
2162 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2163     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2164         return;
2165
2166     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2167     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2168     [m_textTrackRepresentationLayer setFrame:textFrame];
2169 #endif
2170 }
2171
2172 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2173 {
2174 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2175     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2176     if (representationLayer == m_textTrackRepresentationLayer) {
2177         syncTextTrackBounds();
2178         return;
2179     }
2180
2181     if (m_textTrackRepresentationLayer)
2182         [m_textTrackRepresentationLayer removeFromSuperlayer];
2183
2184     m_textTrackRepresentationLayer = representationLayer;
2185
2186     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2187         syncTextTrackBounds();
2188         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2189     }
2190
2191 #else
2192     UNUSED_PARAM(representation);
2193 #endif
2194 }
2195 #endif // ENABLE(VIDEO_TRACK)
2196
2197 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2198 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2199 {
2200     if (!m_provider) {
2201         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2202         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2203     }
2204
2205     return m_provider.get();
2206 }
2207 #endif
2208
2209 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2210 {
2211     if (!m_avAsset)
2212         return;
2213
2214     setNaturalSize(m_cachedPresentationSize);
2215 }
2216     
2217 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2218 {
2219     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2220         return false;
2221     
2222     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2223     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2224     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2225 }
2226
2227 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2228 {
2229 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2230     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2231     if (!Settings::isAVFoundationNSURLSessionEnabled()
2232         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2233         return false;
2234
2235     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2236     if ([session respondsToSelector:@selector(didPassCORSAccessChecks)])
2237         return session.didPassCORSAccessChecks;
2238 #endif
2239     return false;
2240 }
2241
2242 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2243 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2244 {
2245     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2246
2247     if (!m_avPlayerItem || m_videoOutput)
2248         return;
2249
2250 #if USE(VIDEOTOOLBOX)
2251     NSDictionary* attributes = nil;
2252 #else
2253     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2254                                 nil];
2255 #endif
2256     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2257     ASSERT(m_videoOutput);
2258
2259     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2260
2261     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2262
2263     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2264 }
2265
2266 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2267 {
2268     if (!m_videoOutput)
2269         return;
2270
2271     if (m_avPlayerItem)
2272         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2273     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2274
2275     m_videoOutput = 0;
2276 }
2277
2278 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2279 {
2280     if (!m_videoOutput)
2281         createVideoOutput();
2282     ASSERT(m_videoOutput);
2283
2284     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2285
2286     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2287         return 0;
2288
2289     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2290 }
2291
2292 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2293 {
2294     if (!m_avPlayerItem)
2295         return false;
2296
2297     if (m_lastImage)
2298         return true;
2299
2300     if (!m_videoOutput)
2301         createVideoOutput();
2302
2303     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2304 }
2305
2306 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2307 {
2308     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2309
2310     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2311     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2312     // should be displayed.
2313     if (!pixelBuffer)
2314         return;
2315
2316     if (!m_pixelBufferConformer) {
2317 #if USE(VIDEOTOOLBOX)
2318         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2319 #else
2320         NSDictionary *attributes = nil;
2321 #endif
2322         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2323     }
2324
2325 #if !LOG_DISABLED
2326     double start = monotonicallyIncreasingTime();
2327 #endif
2328
2329     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2330
2331 #if !LOG_DISABLED
2332     double duration = monotonicallyIncreasingTime() - start;
2333     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2334 #endif
2335 }
2336
2337 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2338 {
2339     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2340         waitForVideoOutputMediaDataWillChange();
2341
2342     updateLastImage();
2343
2344     if (!m_lastImage)
2345         return;
2346
2347     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2348     if (!firstEnabledVideoTrack)
2349         return;
2350
2351     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2352
2353     GraphicsContextStateSaver stateSaver(context);
2354     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2355     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2356     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2357
2358     context.concatCTM(videoTransform);
2359     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2360
2361     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2362     // video frame, destroy it now that it is no longer needed.
2363     if (m_imageGenerator)
2364         destroyImageGenerator();
2365
2366 }
2367
2368 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2369 {
2370     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2371
2372     if (!m_avPlayerItem || m_openGLVideoOutput)
2373         return;
2374
2375 #if PLATFORM(IOS)
2376     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2377 #else
2378     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2379 #endif
2380     m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2381     ASSERT(m_openGLVideoOutput);
2382
2383     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2384
2385     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2386 }
2387
2388 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2389 {
2390     if (!m_openGLVideoOutput)
2391         return;
2392
2393     if (m_avPlayerItem)
2394         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2395     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2396
2397     m_openGLVideoOutput = 0;
2398 }
2399
2400 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2401 {
2402     if (!m_openGLVideoOutput)
2403         return;
2404
2405     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2406     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2407         return;
2408
2409     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2410 }
2411
2412 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2413 {
2414     if (flipY || premultiplyAlpha)
2415         return false;
2416
2417     ASSERT(context);
2418
2419     if (!m_openGLVideoOutput)
2420         createOpenGLVideoOutput();
2421
2422     updateLastOpenGLImage();
2423
2424     if (!m_lastOpenGLImage)
2425         return false;
2426
2427     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2428     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2429
2430     if (!m_textureCache) {
2431         m_textureCache = TextureCacheCV::create(*context);
2432         if (!m_textureCache)
2433             return false;
2434     }
2435
2436     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2437
2438     if (!m_videoTextureCopier)
2439         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2440
2441     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2442 }
2443
2444 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2445 {
2446     updateLastImage();
2447     return m_lastImage;
2448 }
2449
2450 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2451 {
2452     if (!m_videoOutputSemaphore)
2453         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2454
2455     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2456
2457     // Wait for 1 second.
2458     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2459
2460     if (result)
2461         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2462 }
2463
2464 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2465 {
2466     dispatch_semaphore_signal(m_videoOutputSemaphore);
2467 }
2468 #endif
2469
2470 #if ENABLE(ENCRYPTED_MEDIA)
2471 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2472 {
2473     if (!keySystemIsSupported(keySystem))
2474         return MediaPlayer::KeySystemNotSupported;
2475
2476     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2477     String keyURI;
2478     String keyID;
2479     RefPtr<Uint8Array> certificate;
2480     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2481         return MediaPlayer::InvalidPlayerState;
2482
2483     if (!m_keyURIToRequestMap.contains(keyURI))
2484         return MediaPlayer::InvalidPlayerState;
2485
2486     String sessionID = createCanonicalUUIDString();
2487
2488     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2489
2490     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2491     NSString* assetStr = keyID;
2492     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2493     NSError* error = 0;
2494     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2495
2496     if (!keyRequest) {
2497         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2498         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2499         return MediaPlayer::NoError;
2500     }
2501
2502     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2503     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2504     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2505
2506     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2507     m_sessionIDToRequestMap.set(sessionID, avRequest);
2508     m_keyURIToRequestMap.remove(keyURI);
2509
2510     return MediaPlayer::NoError;
2511 }
2512
2513 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2514 {
2515     if (!keySystemIsSupported(keySystem))
2516         return MediaPlayer::KeySystemNotSupported;
2517
2518     if (!m_sessionIDToRequestMap.contains(sessionID))
2519         return MediaPlayer::InvalidPlayerState;
2520
2521     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2522     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2523     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2524     [avRequest.get() finishLoading];
2525     m_sessionIDToRequestMap.remove(sessionID);
2526
2527     player()->keyAdded(keySystem, sessionID);
2528
2529     UNUSED_PARAM(initDataPtr);
2530     UNUSED_PARAM(initDataLength);
2531     return MediaPlayer::NoError;
2532 }
2533
2534 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2535 {
2536     if (!keySystemIsSupported(keySystem))
2537         return MediaPlayer::KeySystemNotSupported;
2538
2539     if (!m_sessionIDToRequestMap.contains(sessionID))
2540         return MediaPlayer::InvalidPlayerState;
2541
2542     m_sessionIDToRequestMap.remove(sessionID);
2543     return MediaPlayer::NoError;
2544 }
2545 #endif
2546
2547 #if ENABLE(ENCRYPTED_MEDIA_V2)
2548 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2549 {
2550     return m_keyURIToRequestMap.take(keyURI);
2551 }
2552
2553 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2554 {
2555     Vector<String> fulfilledKeyIds;
2556
2557     for (auto& pair : m_keyURIToRequestMap) {
2558         const String& keyId = pair.key;
2559         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2560
2561         auto keyData = player()->cachedKeyForKeyId(keyId);
2562         if (!keyData)
2563             continue;
2564
2565         fulfillRequestWithKeyData(request.get(), keyData.get());
2566         fulfilledKeyIds.append(keyId);
2567     }
2568
2569     for (auto& keyId : fulfilledKeyIds)
2570         m_keyURIToRequestMap.remove(keyId);
2571 }
2572
2573 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2574 {
2575     if (!keySystemIsSupported(keySystem))
2576         return nullptr;
2577
2578     return std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2579 }
2580 #endif
2581
2582 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2583 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2584 {
2585 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2586     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2587 #endif
2588
2589     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2590     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2591
2592         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2593         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2594             continue;
2595
2596         bool newCCTrack = true;
2597         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2598             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2599                 continue;
2600
2601             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2602             if (track->avPlayerItemTrack() == playerItemTrack) {
2603                 removedTextTracks.remove(i - 1);
2604                 newCCTrack = false;
2605                 break;
2606             }
2607         }
2608
2609         if (!newCCTrack)
2610             continue;
2611         
2612         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2613     }
2614
2615     processNewAndRemovedTextTracks(removedTextTracks);
2616 }
2617 #endif
2618
2619 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2620 {
2621     if (!m_avAsset)
2622         return nil;
2623
2624     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2625         return nil;
2626
2627     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2628 }
2629
2630 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2631 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2632 {
2633     if (!m_avAsset)
2634         return false;
2635
2636     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2637         return false;
2638
2639     return true;
2640 }
2641
2642 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2643 {
2644     if (!hasLoadedMediaSelectionGroups())
2645         return nil;
2646
2647     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2648 }
2649
2650 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2651 {
2652     if (!hasLoadedMediaSelectionGroups())
2653         return nil;
2654
2655     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2656 }
2657
2658 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2659 {
2660     if (!hasLoadedMediaSelectionGroups())
2661         return nil;
2662
2663     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2664 }
2665
2666 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2667 {
2668     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2669     if (!legibleGroup) {
2670         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2671         return;
2672     }
2673
2674     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2675     // but set the selected legible track to nil so text tracks will not be automatically configured.
2676     if (!m_textTracks.size())
2677         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2678
2679     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2680     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2681     for (AVMediaSelectionOptionType *option in legibleOptions) {
2682         bool newTrack = true;
2683         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2684             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2685                 continue;
2686             
2687             RetainPtr<AVMediaSelectionOptionType> currentOption;
2688 #if ENABLE(AVF_CAPTIONS)
2689             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2690                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2691                 currentOption = track->mediaSelectionOption();
2692             } else
2693 #endif
2694             {
2695                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2696                 currentOption = track->mediaSelectionOption();
2697             }
2698             
2699             if ([currentOption.get() isEqual:option]) {
2700                 removedTextTracks.remove(i - 1);
2701                 newTrack = false;
2702                 break;
2703             }
2704         }
2705         if (!newTrack)
2706             continue;
2707
2708 #if ENABLE(AVF_CAPTIONS)
2709         if ([option outOfBandSource]) {
2710             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2711             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2712             continue;
2713         }
2714 #endif
2715
2716         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2717     }
2718
2719     processNewAndRemovedTextTracks(removedTextTracks);
2720 }
2721
2722 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2723 {
2724     if (m_metadataTrack)
2725         return;
2726
2727     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2728     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2729     player()->addTextTrack(m_metadataTrack);
2730 }
2731
2732 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2733 {
2734     ASSERT(time >= MediaTime::zeroTime());
2735
2736     if (!m_currentTextTrack)
2737         return;
2738
2739     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2740 }
2741
2742 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2743 {
2744     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2745
2746     if (!m_currentTextTrack)
2747         return;
2748     
2749     m_currentTextTrack->resetCueValues();
2750 }
2751 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2752
2753 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2754 {
2755     if (m_currentTextTrack == track)
2756         return;
2757
2758     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2759         
2760     m_currentTextTrack = track;
2761
2762     if (track) {
2763         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2764             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2765 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2766 #if ENABLE(AVF_CAPTIONS)
2767         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2768             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2769 #endif
2770         else
2771             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2772 #endif
2773     } else {
2774 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2775         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2776 #endif
2777         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2778     }
2779
2780 }
2781
2782 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2783 {
2784     if (!m_languageOfPrimaryAudioTrack.isNull())
2785         return m_languageOfPrimaryAudioTrack;
2786
2787     if (!m_avPlayerItem.get())
2788         return emptyString();
2789
2790 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2791     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2792     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2793     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2794     if (currentlySelectedAudibleOption) {
2795         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2796         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2797
2798         return m_languageOfPrimaryAudioTrack;
2799     }
2800 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2801
2802     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2803     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2804     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2805     if (!tracks || [tracks count] != 1) {
2806         m_languageOfPrimaryAudioTrack = emptyString();
2807         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2808         return m_languageOfPrimaryAudioTrack;
2809     }
2810
2811     AVAssetTrack *track = [tracks objectAtIndex:0];
2812     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2813
2814 #if !LOG_DISABLED
2815     if (m_languageOfPrimaryAudioTrack == emptyString())
2816         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2817     else
2818         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2819 #endif
2820
2821     return m_languageOfPrimaryAudioTrack;
2822 }
2823
2824 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2825 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2826 {
2827     bool wirelessTarget = false;
2828
2829 #if !PLATFORM(IOS)
2830     if (m_playbackTarget) {
2831         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2832             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2833         else
2834             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2835     }
2836 #else
2837     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2838 #endif
2839
2840     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2841
2842     return wirelessTarget;
2843 }
2844
2845 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2846 {
2847     if (!m_avPlayer)
2848         return MediaPlayer::TargetTypeNone;
2849
2850 #if PLATFORM(IOS)
2851     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2852     case wkExternalPlaybackTypeNone:
2853         return MediaPlayer::TargetTypeNone;
2854     case wkExternalPlaybackTypeAirPlay:
2855         return MediaPlayer::TargetTypeAirPlay;
2856     case wkExternalPlaybackTypeTVOut:
2857         return MediaPlayer::TargetTypeTVOut;
2858     }
2859
2860     ASSERT_NOT_REACHED();
2861     return MediaPlayer::TargetTypeNone;
2862
2863 #else
2864     return MediaPlayer::TargetTypeAirPlay;
2865 #endif
2866 }
2867
2868 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2869 {
2870     if (!m_avPlayer)
2871         return emptyString();
2872
2873     String wirelessTargetName;
2874 #if !PLATFORM(IOS)
2875     if (m_playbackTarget)
2876         wirelessTargetName = m_playbackTarget->deviceName();
2877 #else
2878     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2879 #endif
2880     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2881
2882     return wirelessTargetName;
2883 }
2884
2885 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2886 {
2887     if (!m_avPlayer)
2888         return !m_allowsWirelessVideoPlayback;
2889
2890     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2891     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2892
2893     return !m_allowsWirelessVideoPlayback;
2894 }
2895
2896 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2897 {
2898     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2899     m_allowsWirelessVideoPlayback = !disabled;
2900     if (!m_avPlayer)
2901         return;
2902
2903     setDelayCallbacks(true);
2904     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2905     setDelayCallbacks(false);
2906 }
2907
2908 #if !PLATFORM(IOS)
2909 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2910 {
2911     m_playbackTarget = WTFMove(target);
2912
2913     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2914
2915     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2916
2917     if (!m_playbackTarget->hasActiveRoute())
2918         setShouldPlayToPlaybackTarget(false);
2919 }
2920
2921 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2922 {
2923     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2924         return;
2925
2926     m_shouldPlayToPlaybackTarget = shouldPlay;
2927
2928     if (!m_playbackTarget)
2929         return;
2930
2931     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2932         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2933
2934         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2935
2936         if (!m_avPlayer)
2937             return;
2938
2939         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2940         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2941             return;
2942
2943         setDelayCallbacks(true);
2944         m_avPlayer.get().outputContext = newContext;
2945         setDelayCallbacks(false);
2946
2947         return;
2948     }
2949
2950     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2951
2952     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2953
2954     setDelayCallbacks(true);
2955     auto weakThis = createWeakPtr();
2956     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2957         if (!weakThis)
2958             return;
2959         weakThis->playbackTargetIsWirelessDidChange();
2960     }));
2961     setDelayCallbacks(false);
2962 }
2963 #endif // !PLATFORM(IOS)
2964
2965 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2966 {
2967     if (!m_avPlayer)
2968         return;
2969
2970 #if PLATFORM(IOS)
2971     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2972 #endif
2973 }
2974 #endif
2975
2976 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2977 {
2978     m_cachedItemStatus = status;
2979
2980     updateStates();
2981 }
2982
2983 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2984 {
2985     m_pendingStatusChanges++;
2986 }
2987
2988 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2989 {
2990     m_cachedLikelyToKeepUp = likelyToKeepUp;
2991
2992     ASSERT(m_pendingStatusChanges);
2993     if (!--m_pendingStatusChanges)
2994         updateStates();
2995 }
2996
2997 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2998 {
2999     m_pendingStatusChanges++;
3000 }
3001
3002 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3003 {
3004     m_cachedBufferEmpty = bufferEmpty;
3005
3006     ASSERT(m_pendingStatusChanges);
3007     if (!--m_pendingStatusChanges)
3008         updateStates();
3009 }
3010
3011 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3012 {
3013     m_pendingStatusChanges++;
3014 }
3015
3016 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3017 {
3018     m_cachedBufferFull = bufferFull;
3019
3020     ASSERT(m_pendingStatusChanges);
3021     if (!--m_pendingStatusChanges)
3022         updateStates();
3023 }
3024
3025 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3026 {
3027     m_cachedSeekableRanges = seekableRanges;
3028
3029     seekableTimeRangesChanged();
3030     updateStates();
3031 }
3032
3033 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3034 {
3035     m_cachedLoadedRanges = loadedRanges;
3036
3037     loadedTimeRangesChanged();
3038     updateStates();
3039 }
3040
3041 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3042 {
3043     m_cachedIsReadyForDisplay = isReady;
3044     if (!hasVideo() && isReady)
3045         tracksChanged();
3046     updateStates();
3047 }
3048
3049 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3050 {
3051     tracksChanged();
3052     updateStates();
3053 }
3054
3055 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3056 {
3057     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
3058     if (m_shouldBufferData == shouldBuffer)
3059         return;
3060
3061     m_shouldBufferData = shouldBuffer;
3062     
3063     if (!m_avPlayer)
3064         return;
3065
3066     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3067 }
3068
3069 #if ENABLE(DATACUE_VALUE)
3070 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3071 {
3072     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3073     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3074     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3075     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3076     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3077
3078     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3079         return quickTimeUserData;
3080     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3081         return isoUserData;
3082     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3083         return quickTimeMetadata;
3084     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3085         return iTunesMetadata;
3086     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3087         return id3Metadata;
3088
3089     return emptyAtom;
3090 }
3091 #endif
3092
3093 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3094 {
3095     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3096
3097     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3098
3099 #if ENABLE(DATACUE_VALUE)
3100     if (seeking())
3101         return;
3102
3103     if (!m_metadataTrack)
3104         processMetadataTrack();
3105
3106     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3107         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3108         return;
3109     }
3110
3111     // Set the duration of all incomplete cues before adding new ones.
3112     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3113     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3114         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3115         if (start < earliestStartTime)
3116             earliestStartTime = start;
3117     }
3118     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3119
3120     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3121         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3122         MediaTime end = MediaTime::positiveInfiniteTime();
3123         if (CMTIME_IS_VALID(item.duration))
3124             end = start + toMediaTime(item.duration);
3125
3126         AtomicString type = nullAtom;
3127         if (item.keySpace)
3128             type = metadataType(item.keySpace);
3129
3130         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3131     }
3132 #endif
3133 }
3134
3135 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3136 {
3137     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3138         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3139
3140     NSArray *assetTracks = [m_avAsset tracks];
3141
3142     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3143         AVAssetTrack* assetTrack = [obj assetTrack];
3144
3145         if ([assetTracks containsObject:assetTrack])
3146             return YES;
3147
3148         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3149         if (!hasLoadedMediaSelectionGroups())
3150             return NO;
3151
3152         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3153             return NO;
3154
3155         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3156             return NO;
3157
3158         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3159             return NO;
3160
3161         return YES;
3162     }]];
3163
3164     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3165         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3166
3167     m_cachedTotalBytes = 0;
3168
3169     tracksChanged();
3170     updateStates();
3171 }
3172
3173 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3174 {
3175     m_cachedHasEnabledAudio = hasEnabledAudio;
3176
3177     tracksChanged();
3178     updateStates();
3179 }
3180
3181 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3182 {
3183     m_cachedPresentationSize = size;
3184
3185     sizeChanged();
3186     updateStates();
3187 }
3188
3189 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3190 {
3191     m_cachedDuration = duration;
3192
3193     invalidateCachedDuration();
3194 }
3195
3196 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3197 {
3198     m_cachedRate = rate;
3199
3200     updateStates();
3201     rateChanged();
3202 }
3203     
3204 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3205 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3206 {
3207     playbackTargetIsWirelessChanged();
3208 }
3209 #endif
3210
3211 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3212 {
3213     m_cachedCanPlayFastForward = newValue;
3214 }
3215
3216 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3217 {
3218     m_cachedCanPlayFastReverse = newValue;
3219 }
3220
3221 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3222 {
3223     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3224         return MediaPlayerPrivateAVFoundation::resolvedURL();
3225
3226     return URL([m_avAsset resolvedURL]);
3227 }
3228
3229 NSArray* assetMetadataKeyNames()
3230 {
3231     static NSArray* keys;
3232     if (!keys) {
3233         keys = [[NSArray alloc] initWithObjects:@"duration",
3234                     @"naturalSize",
3235                     @"preferredTransform",
3236                     @"preferredVolume",
3237                     @"preferredRate",
3238                     @"playable",
3239                     @"resolvedURL",
3240                     @"tracks",
3241                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3242                    nil];
3243     }
3244     return keys;
3245 }
3246
3247 NSArray* itemKVOProperties()
3248 {
3249     static NSArray* keys;
3250     if (!keys) {
3251         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3252                 @"status",
3253                 @"asset",
3254                 @"tracks",
3255                 @"seekableTimeRanges",
3256                 @"loadedTimeRanges",
3257                 @"playbackLikelyToKeepUp",
3258                 @"playbackBufferFull",
3259                 @"playbackBufferEmpty",
3260                 @"duration",
3261                 @"hasEnabledAudio",
3262                 @"timedMetadata",
3263                 @"canPlayFastForward",
3264                 @"canPlayFastReverse",
3265                 nil];
3266     }
3267     return keys;
3268 }
3269
3270 NSArray* assetTrackMetadataKeyNames()
3271 {
3272     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3273     return keys;
3274 }
3275
3276 NSArray* playerKVOProperties()
3277 {
3278     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3279 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3280                             @"externalPlaybackActive", @"allowsExternalPlayback",
3281 #endif
3282                             nil];
3283     return keys;
3284 }
3285 } // namespace WebCore
3286
3287 @implementation WebCoreAVFMovieObserver
3288
3289 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3290 {
3291     self = [super init];
3292     if (!self)
3293         return nil;
3294     m_callback = callback;
3295     return self;
3296 }
3297
3298 - (void)disconnect
3299 {
3300     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3301     m_callback = 0;
3302 }
3303
3304 - (void)metadataLoaded
3305 {
3306     if (!m_callback)
3307         return;
3308
3309     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3310 }
3311
3312 - (void)didEnd:(NSNotification *)unusedNotification
3313 {
3314     UNUSED_PARAM(unusedNotification);
3315     if (!m_callback)
3316         return;
3317     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3318 }
3319
3320 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3321 {
3322     UNUSED_PARAM(object);
3323     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3324
3325     if (!m_callback)
3326         return;
3327
3328     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3329
3330 #if !LOG_DISABLED
3331     if (willChange)
3332         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3333     else {
3334         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3335         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3336     }
3337 #endif
3338
3339     std::function<void ()> function;
3340
3341     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3342         if ([keyPath isEqualToString:@"readyForDisplay"])
3343             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3344     }
3345
3346     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3347         if ([keyPath isEqualToString:@"enabled"])
3348             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3349     }
3350
3351     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3352         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3353             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3354         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3355             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3356         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3357             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3358     }
3359
3360     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3361         // A value changed for an AVPlayerItem
3362         if ([keyPath isEqualToString:@"status"])
3363             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3364         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3365             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3366         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3367             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3368         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3369             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3370         else if ([keyPath isEqualToString:@"asset"])
3371             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3372         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3373             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3374         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3375             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3376         else if ([keyPath isEqualToString:@"tracks"])
3377             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3378         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3379             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3380         else if ([keyPath isEqualToString:@"presentationSize"])
3381             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3382         else if ([keyPath isEqualToString:@"duration"])
3383             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3384         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3385             MediaTime now;
3386             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3387             if (CMTIME_IS_NUMERIC(itemTime))
3388                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3389             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3390         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3391             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3392         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3393             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3394     }
3395
3396     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3397         // A value changed for an AVPlayer.
3398         if ([keyPath isEqualToString:@"rate"])
3399             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3400 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3401         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3402             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3403 #endif
3404     }
3405     
3406     if (!function)
3407         return;
3408
3409     auto weakThis = m_callback->createWeakPtr();
3410     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3411         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3412         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3413         if (!weakThis)
3414             return;
3415         function();
3416     }));
3417 }
3418
3419 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3420 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3421 {
3422     UNUSED_PARAM(output);
3423     UNUSED_PARAM(nativeSamples);
3424
3425     if (!m_callback)
3426         return;
3427
3428     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3429     RetainPtr<NSArray> strongStrings = strings;
3430     RetainPtr<NSArray> strongSamples = nativeSamples;
3431     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3432         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3433         if (!callback)
3434             return;
3435         MediaTime time = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3436         callback->processCue(strongStrings.get(), strongSamples.get(), time);
3437     });
3438 }
3439
3440 - (void)outputSequenceWasFlushed:(id)output
3441 {
3442     UNUSED_PARAM(output);
3443
3444     if (!m_callback)
3445         return;
3446     
3447     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3448     callOnMainThread([strongSelf] {
3449         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3450             callback->flushCues();
3451     });
3452 }
3453 #endif
3454
3455 @end
3456
3457 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3458 @implementation WebCoreAVFLoaderDelegate
3459
3460 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3461 {
3462     self = [super init];
3463     if (!self)
3464         return nil;
3465     m_callback = callback;
3466     return self;