[WebGL] accelerated texImage2D for video doesn't respect flipY
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "Extensions3D.h"
40 #import "FloatConversion.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContext3D.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "Logging.h"
48 #import "MediaPlaybackTargetMac.h"
49 #import "MediaPlaybackTargetMock.h"
50 #import "MediaSelectionGroupAVFObjC.h"
51 #import "MediaTimeAVFoundation.h"
52 #import "OutOfBandTextTrackPrivateAVF.h"
53 #import "PixelBufferConformerCV.h"
54 #import "PlatformTimeRanges.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "Settings.h"
58 #import "TextEncoding.h"
59 #import "TextTrackRepresentation.h"
60 #import "TextureCacheCV.h"
61 #import "URL.h"
62 #import "VideoTextureCopierCV.h"
63 #import "VideoTrackPrivateAVFObjC.h"
64 #import "WebCoreAVFResourceLoader.h"
65 #import "WebCoreCALayerExtras.h"
66 #import "WebCoreNSURLSession.h"
67 #import "WebCoreSystemInterface.h"
68 #import <functional>
69 #import <map>
70 #import <objc/runtime.h>
71 #import <pal/spi/cocoa/QuartzCoreSPI.h>
72 #import <pal/spi/mac/AVFoundationSPI.h>
73 #import <runtime/DataView.h>
74 #import <runtime/JSCInlines.h>
75 #import <runtime/TypedArrayInlines.h>
76 #import <runtime/Uint16Array.h>
77 #import <runtime/Uint32Array.h>
78 #import <runtime/Uint8Array.h>
79 #import <wtf/BlockObjCExceptions.h>
80 #import <wtf/CurrentTime.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
102 #import "VideoFullscreenLayerManager.h"
103 #endif
104
105 #if PLATFORM(IOS)
106 #import "WAKAppKitStubs.h"
107 #import <CoreImage/CoreImage.h>
108 #import <mach/mach_port.h>
109 #else
110 #import <Foundation/NSGeometry.h>
111 #import <QuartzCore/CoreImage.h>
112 #endif
113
114 #if USE(VIDEOTOOLBOX)
115 #import <CoreVideo/CoreVideo.h>
116 #import <VideoToolbox/VideoToolbox.h>
117 #endif
118
119 #if USE(CFURLCONNECTION)
120 #include <pal/spi/cocoa/CFNSURLConnectionSPI.h>
121 #endif
122
123 #import "CoreVideoSoftLink.h"
124
125 namespace std {
126 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
127     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
128 };
129 }
130
131 #if ENABLE(AVF_CAPTIONS)
132 // Note: This must be defined before our SOFT_LINK macros:
133 @class AVMediaSelectionOption;
134 @interface AVMediaSelectionOption (OutOfBandExtensions)
135 @property (nonatomic, readonly) NSString* outOfBandSource;
136 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
137 @end
138 #endif
139
140 @interface AVURLAsset (WebKitExtensions)
141 @property (nonatomic, readonly) NSURL *resolvedURL;
142 @end
143
144 typedef AVPlayer AVPlayerType;
145 typedef AVPlayerItem AVPlayerItemType;
146 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
147 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
148 typedef AVMetadataItem AVMetadataItemType;
149 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
150 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
151 typedef AVAssetCache AVAssetCacheType;
152
153 #pragma mark - Soft Linking
154
155 // Soft-linking headers must be included last since they #define functions, constants, etc.
156 #import "CoreMediaSoftLink.h"
157
158 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
159
160 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
161
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
165 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
166 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
167 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
168 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
169 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
170
171 SOFT_LINK_CLASS(CoreImage, CIContext)
172 SOFT_LINK_CLASS(CoreImage, CIImage)
173
174 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
175 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
176 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
189
190 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
191 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
192 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
193
194 #define AVPlayer initAVPlayer()
195 #define AVPlayerItem initAVPlayerItem()
196 #define AVPlayerLayer initAVPlayerLayer()
197 #define AVURLAsset initAVURLAsset()
198 #define AVAssetImageGenerator initAVAssetImageGenerator()
199 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
200 #define AVMetadataItem initAVMetadataItem()
201 #define AVAssetCache initAVAssetCache()
202
203 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
204 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
205 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
206 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
207 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
208 #define AVMediaTypeVideo getAVMediaTypeVideo()
209 #define AVMediaTypeAudio getAVMediaTypeAudio()
210 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
211 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
212 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
213 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
214 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
215 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
216 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
217 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
218 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
219 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
220 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
221
222 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
223 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
224 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
225
226 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
227 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
228 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
229
230 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
231 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
232 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
233 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
234
235 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
236 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
237 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
238 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
239 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
240 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
241 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
242 #endif
243
244 #if ENABLE(AVF_CAPTIONS)
245 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
258
259 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
260 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
261 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
262 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
263 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
264 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
265 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
266 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
267 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
268 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
269 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
270 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
271 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
272 #endif
273
274 #if ENABLE(DATACUE_VALUE)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
276 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
277 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
280
281 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
282 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
283 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
284 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
285 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
286 #endif
287
288 #if PLATFORM(IOS)
289 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
290 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
291 #endif
292
293 SOFT_LINK_FRAMEWORK(MediaToolbox)
294 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
295
296 using namespace WebCore;
297
298 enum MediaPlayerAVFoundationObservationContext {
299     MediaPlayerAVFoundationObservationContextPlayerItem,
300     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
301     MediaPlayerAVFoundationObservationContextPlayer,
302     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
303 };
304
305 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
306 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
307 #else
308 @interface WebCoreAVFMovieObserver : NSObject
309 #endif
310 {
311     MediaPlayerPrivateAVFoundationObjC* m_callback;
312     int m_delayCallbacks;
313 }
314 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
315 -(void)disconnect;
316 -(void)metadataLoaded;
317 -(void)didEnd:(NSNotification *)notification;
318 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
319 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
320 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
321 - (void)outputSequenceWasFlushed:(id)output;
322 #endif
323 @end
324
325 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
326 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
327     MediaPlayerPrivateAVFoundationObjC* m_callback;
328 }
329 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
330 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
331 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
332 @end
333 #endif
334
335 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
336 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
337     MediaPlayerPrivateAVFoundationObjC *m_callback;
338     dispatch_semaphore_t m_semaphore;
339 }
340 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
341 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
342 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
343 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
344 @end
345 #endif
346
347 namespace WebCore {
348
349 static NSArray *assetMetadataKeyNames();
350 static NSArray *itemKVOProperties();
351 static NSArray *assetTrackMetadataKeyNames();
352 static NSArray *playerKVOProperties();
353 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
354
355 #if !LOG_DISABLED
356 static const char *boolString(bool val)
357 {
358     return val ? "true" : "false";
359 }
360 #endif
361
362 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
363 static dispatch_queue_t globalLoaderDelegateQueue()
364 {
365     static dispatch_queue_t globalQueue;
366     static dispatch_once_t onceToken;
367     dispatch_once(&onceToken, ^{
368         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
369     });
370     return globalQueue;
371 }
372 #endif
373
374 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
375 static dispatch_queue_t globalPullDelegateQueue()
376 {
377     static dispatch_queue_t globalQueue;
378     static dispatch_once_t onceToken;
379     dispatch_once(&onceToken, ^{
380         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
381     });
382     return globalQueue;
383 }
384 #endif
385
386 #if USE(CFURLCONNECTION)
387 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
388 public:
389     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
390     {
391         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
392     }
393
394     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
395     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
396
397 private:
398     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
399         : m_challenge(challenge)
400     {
401         ASSERT(m_challenge);
402     }
403
404     void refAuthenticationClient() override { ref(); }
405     void derefAuthenticationClient() override { deref(); }
406
407     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
408     {
409         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
410     }
411
412     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
413     {
414         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
415     }
416
417     void receivedCancellation(const AuthenticationChallenge&) override
418     {
419         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
420     }
421
422     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
423     {
424         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
425             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
426     }
427
428     void receivedChallengeRejection(const AuthenticationChallenge&) override
429     {
430         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
431             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
432     }
433
434     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
435 };
436 #endif
437
438 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
439 {
440     if (!isAvailable())
441         return;
442
443     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
444             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
445     AVFoundationMIMETypeCache::singleton().loadTypes();
446 }
447
448 static AVAssetCacheType *assetCacheForPath(const String& path)
449 {
450     NSURL *assetCacheURL;
451     
452     if (path.isEmpty())
453         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
454     else
455         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
456
457     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
458 }
459
460 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
461 {
462     HashSet<RefPtr<SecurityOrigin>> origins;
463     for (NSString *key in [assetCacheForPath(path) allKeys]) {
464         URL keyAsURL = URL(URL(), key);
465         if (keyAsURL.isValid())
466             origins.add(SecurityOrigin::create(keyAsURL));
467     }
468     return origins;
469 }
470
471 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
472 {
473     ASSERT(date);
474     using namespace std::chrono;
475
476     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
477 }
478
479 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
480 {
481     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
482     
483     AVAssetCacheType* assetCache = assetCacheForPath(path);
484     
485     for (NSString *key in [assetCache allKeys]) {
486         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
487             [assetCache removeEntryForKey:key];
488     }
489
490     NSFileManager *fileManager = [NSFileManager defaultManager];
491     NSURL *baseURL = [assetCache URL];
492
493     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
494         [fileManager removeItemAtURL:baseURL error:nil];
495         return;
496     }
497     
498     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
499     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
500         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
501         errorHandler:nil];
502     
503     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
504     for (NSURL *fileURL : enumerator) {
505         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
506     
507         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
508             continue;
509         
510         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
511             continue;
512         
513         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
514             continue;
515         
516         [urlsToDelete addObject:fileURL];
517     }
518     
519     for (NSURL *fileURL in urlsToDelete.get())
520         [fileManager removeItemAtURL:fileURL error:nil];
521 }
522
523 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
524 {
525     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
526     AVAssetCacheType* assetCache = assetCacheForPath(path);
527     for (NSString *key in [assetCache allKeys]) {
528         URL keyAsURL = URL(URL(), key);
529         if (keyAsURL.isValid()) {
530             if (origins.contains(SecurityOrigin::create(keyAsURL)))
531                 [assetCache removeEntryForKey:key];
532         }
533     }
534 }
535
536 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
537     : MediaPlayerPrivateAVFoundation(player)
538     , m_weakPtrFactory(this)
539 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
540     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
541     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
542 #endif
543     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
544     , m_videoFrameHasDrawn(false)
545     , m_haveCheckedPlayability(false)
546 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
547     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
548     , m_videoOutputSemaphore(nullptr)
549 #endif
550 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
551     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
552 #endif
553     , m_currentTextTrack(0)
554     , m_cachedRate(0)
555     , m_cachedTotalBytes(0)
556     , m_pendingStatusChanges(0)
557     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
558     , m_cachedLikelyToKeepUp(false)
559     , m_cachedBufferEmpty(false)
560     , m_cachedBufferFull(false)
561     , m_cachedHasEnabledAudio(false)
562     , m_shouldBufferData(true)
563     , m_cachedIsReadyForDisplay(false)
564     , m_haveBeenAskedToCreateLayer(false)
565 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
566     , m_allowsWirelessVideoPlayback(true)
567 #endif
568 {
569 }
570
571 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
572 {
573 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
574     [m_loaderDelegate.get() setCallback:0];
575     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
576
577     for (auto& pair : m_resourceLoaderMap)
578         pair.value->invalidate();
579 #endif
580 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
581     [m_videoOutputDelegate setCallback:0];
582     [m_videoOutput setDelegate:nil queue:0];
583     if (m_videoOutputSemaphore)
584         dispatch_release(m_videoOutputSemaphore);
585 #endif
586
587     if (m_videoLayer)
588         destroyVideoLayer();
589
590     cancelLoad();
591 }
592
593 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
594 {
595     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
596     tearDownVideoRendering();
597
598     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
599     [m_objcObserver.get() disconnect];
600
601     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
602     setIgnoreLoadStateChanges(true);
603     if (m_avAsset) {
604         [m_avAsset.get() cancelLoading];
605         m_avAsset = nil;
606     }
607
608     clearTextTracks();
609
610 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
611     if (m_legibleOutput) {
612         if (m_avPlayerItem)
613             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
614         m_legibleOutput = nil;
615     }
616 #endif
617
618     if (m_avPlayerItem) {
619         for (NSString *keyName in itemKVOProperties())
620             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
621         
622         m_avPlayerItem = nil;
623     }
624     if (m_avPlayer) {
625         if (m_timeObserver)
626             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
627         m_timeObserver = nil;
628
629         for (NSString *keyName in playerKVOProperties())
630             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
631
632         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
633         m_avPlayer = nil;
634     }
635
636     // Reset cached properties
637     m_pendingStatusChanges = 0;
638     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
639     m_cachedSeekableRanges = nullptr;
640     m_cachedLoadedRanges = nullptr;
641     m_cachedHasEnabledAudio = false;
642     m_cachedPresentationSize = FloatSize();
643     m_cachedDuration = MediaTime::zeroTime();
644
645     for (AVPlayerItemTrack *track in m_cachedTracks.get())
646         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
647     m_cachedTracks = nullptr;
648
649 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
650     if (m_provider) {
651         m_provider->setPlayerItem(nullptr);
652         m_provider->setAudioTrack(nullptr);
653     }
654 #endif
655
656     setIgnoreLoadStateChanges(false);
657 }
658
659 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
660 {
661     return m_haveBeenAskedToCreateLayer;
662 }
663
664 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
665 {
666 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
667     if (m_videoOutput)
668         return true;
669 #endif
670     return m_imageGenerator;
671 }
672
673 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
674 {
675 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
676     createVideoOutput();
677 #else
678     createImageGenerator();
679 #endif
680 }
681
682 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
683 {
684     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
685
686     if (!m_avAsset || m_imageGenerator)
687         return;
688
689     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
690
691     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
692     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
693     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
694     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
695
696     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
697 }
698
699 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
700 {
701 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
702     destroyVideoOutput();
703     destroyOpenGLVideoOutput();
704 #endif
705     destroyImageGenerator();
706 }
707
708 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
709 {
710     if (!m_imageGenerator)
711         return;
712
713     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
714
715     m_imageGenerator = 0;
716 }
717
718 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
719 {
720     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
721         return;
722
723     callOnMainThread([this, weakThis = createWeakPtr()] {
724         if (!weakThis)
725             return;
726
727         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
728             return;
729         m_haveBeenAskedToCreateLayer = true;
730
731         if (!m_videoLayer)
732             createAVPlayerLayer();
733
734 #if USE(VIDEOTOOLBOX)
735         if (!m_videoOutput)
736             createVideoOutput();
737 #endif
738
739         player()->client().mediaPlayerRenderingModeChanged(player());
740     });
741 }
742
743 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
744 {
745     if (!m_avPlayer)
746         return;
747
748     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
749     [m_videoLayer setPlayer:m_avPlayer.get()];
750
751 #ifndef NDEBUG
752     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
753 #endif
754     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
755     updateVideoLayerGravity();
756     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
757     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
758     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
759
760 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
761     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
762
763 #if PLATFORM(IOS)
764     [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
765 #endif
766 #else
767     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
768 #endif
769 }
770
771 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
772 {
773     if (!m_videoLayer)
774         return;
775
776     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
777
778     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
779     [m_videoLayer setPlayer:nil];
780
781 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
782     m_videoFullscreenLayerManager->didDestroyVideoLayer();
783 #endif
784
785     m_videoLayer = nil;
786 }
787
788 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
789 {
790     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
791     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
792
793     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
794     if (!date)
795         return MediaTime::invalidTime();
796
797     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
798
799     // Rounding due to second offset error when subtracting.
800     return MediaTime::createWithDouble(round(date - currentTime));
801 }
802
803 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
804 {
805     if (currentRenderingMode() == MediaRenderingToLayer)
806         return m_cachedIsReadyForDisplay;
807
808     return m_videoFrameHasDrawn;
809 }
810
811 #if ENABLE(AVF_CAPTIONS)
812 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
813 {
814     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
815     if (manualSelectionMode)
816         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
817
818     // FIXME: Match these to correct types:
819     if (kind == PlatformTextTrack::Caption)
820         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
821
822     if (kind == PlatformTextTrack::Subtitle)
823         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
824
825     if (kind == PlatformTextTrack::Description)
826         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
827
828     if (kind == PlatformTextTrack::Forced)
829         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
830
831     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
832 }
833     
834 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
835 {
836     trackModeChanged();
837 }
838     
839 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
840 {
841     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
842     
843     for (auto& textTrack : m_textTracks) {
844         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
845             continue;
846         
847         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
848         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
849         
850         for (auto& track : outOfBandTrackSources) {
851             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
852             
853             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
854                 continue;
855             
856             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
857             if (track->mode() == PlatformTextTrack::Hidden)
858                 mode = InbandTextTrackPrivate::Hidden;
859             else if (track->mode() == PlatformTextTrack::Disabled)
860                 mode = InbandTextTrackPrivate::Disabled;
861             else if (track->mode() == PlatformTextTrack::Showing)
862                 mode = InbandTextTrackPrivate::Showing;
863             
864             textTrack->setMode(mode);
865             break;
866         }
867     }
868 }
869 #endif
870
871
872 static NSURL *canonicalURL(const String& url)
873 {
874     NSURL *cocoaURL = URL(ParsedURLString, url);
875     if (url.isEmpty())
876         return cocoaURL;
877
878     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
879     if (!request)
880         return cocoaURL;
881
882     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
883     if (!canonicalRequest)
884         return cocoaURL;
885
886     return [canonicalRequest URL];
887 }
888
889 #if PLATFORM(IOS)
890 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
891 {
892     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
893     [properties setDictionary:@{
894         NSHTTPCookieName: cookie.name,
895         NSHTTPCookieValue: cookie.value,
896         NSHTTPCookieDomain: cookie.domain,
897         NSHTTPCookiePath: cookie.path,
898         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
899     }];
900     if (cookie.secure)
901         [properties setObject:@YES forKey:NSHTTPCookieSecure];
902     if (cookie.session)
903         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
904
905     return [NSHTTPCookie cookieWithProperties:properties.get()];
906 }
907 #endif
908
909 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
910 {
911     if (m_avAsset)
912         return;
913
914     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
915
916     setDelayCallbacks(true);
917
918     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
919
920     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
921
922     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
923
924     String referrer = player()->referrer();
925     if (!referrer.isEmpty())
926         [headerFields.get() setObject:referrer forKey:@"Referer"];
927
928     String userAgent = player()->userAgent();
929     if (!userAgent.isEmpty())
930         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
931
932     if ([headerFields.get() count])
933         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
934
935     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
936         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
937
938 #if PLATFORM(IOS)
939     // FIXME: rdar://problem/20354688
940     String identifier = player()->sourceApplicationIdentifier();
941     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
942         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
943
944     if (AVURLAssetRequiresCustomURLLoadingKey)
945         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
946 #endif
947
948     auto type = player()->contentMIMEType();
949     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
950         auto codecs = player()->contentTypeCodecs();
951         if (!codecs.isEmpty()) {
952             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
953             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
954         } else
955             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
956     }
957
958 #if ENABLE(AVF_CAPTIONS)
959     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
960     if (!outOfBandTrackSources.isEmpty()) {
961         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
962         for (auto& trackSource : outOfBandTrackSources) {
963             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
964             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
965             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
966             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
967             [outOfBandTracks.get() addObject:@{
968                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
969                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
970                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
971                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
972                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
973                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
974             }];
975         }
976
977         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
978     }
979 #endif
980
981 #if PLATFORM(IOS)
982     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
983     if (!networkInterfaceName.isEmpty())
984         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
985 #endif
986
987 #if PLATFORM(IOS)
988     Vector<Cookie> cookies;
989     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
990         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
991         for (auto& cookie : cookies)
992             [nsCookies addObject:toNSHTTPCookie(cookie)];
993
994         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
995     }
996 #endif
997
998     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
999     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
1000     
1001     if (usePersistentCache)
1002         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
1003
1004     NSURL *cocoaURL = canonicalURL(url);
1005     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
1006
1007 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1008     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
1009     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
1010
1011 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
1012     if (Settings::isAVFoundationNSURLSessionEnabled()
1013         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
1014         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
1015         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
1016         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
1017         if (mediaResourceLoader)
1018             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
1019     }
1020 #endif
1021
1022 #endif
1023
1024     m_haveCheckedPlayability = false;
1025
1026     setDelayCallbacks(false);
1027 }
1028
1029 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1030 {
1031     if (!m_avPlayer)
1032         return;
1033
1034     if (pthread_main_np()) {
1035         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1036         return;
1037     }
1038
1039     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1040     RetainPtr<AVPlayerItemType> strongItem = item;
1041     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1042         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1043     });
1044 }
1045
1046 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1047 {
1048     if (m_avPlayer)
1049         return;
1050
1051     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1052
1053     setDelayCallbacks(true);
1054
1055     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1056     for (NSString *keyName in playerKVOProperties())
1057         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1058
1059 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1060     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1061 #endif
1062
1063 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1064     updateDisableExternalPlayback();
1065     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1066 #endif
1067
1068 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1069     if (m_shouldPlayToPlaybackTarget) {
1070         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1071         m_shouldPlayToPlaybackTarget = false;
1072         setShouldPlayToPlaybackTarget(true);
1073     }
1074 #endif
1075
1076 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1077     setShouldDisableSleep(player()->shouldDisableSleep());
1078 #endif
1079
1080     if (m_muted) {
1081         // Clear m_muted so setMuted doesn't return without doing anything.
1082         m_muted = false;
1083         [m_avPlayer.get() setMuted:m_muted];
1084     }
1085
1086     if (player()->client().mediaPlayerIsVideo())
1087         createAVPlayerLayer();
1088
1089     if (m_avPlayerItem)
1090         setAVPlayerItem(m_avPlayerItem.get());
1091
1092     setDelayCallbacks(false);
1093 }
1094
1095 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1096 {
1097     if (m_avPlayerItem)
1098         return;
1099
1100     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1101
1102     setDelayCallbacks(true);
1103
1104     // Create the player item so we can load media data. 
1105     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1106
1107     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1108
1109     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1110     for (NSString *keyName in itemKVOProperties())
1111         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1112
1113     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1114
1115     if (m_avPlayer)
1116         setAVPlayerItem(m_avPlayerItem.get());
1117
1118 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1119     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1120
1121     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1122     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1123     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1124
1125     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1126     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1127     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1128     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1129 #endif
1130
1131 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1132     if (m_provider) {
1133         m_provider->setPlayerItem(m_avPlayerItem.get());
1134         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1135     }
1136 #endif
1137
1138     setDelayCallbacks(false);
1139 }
1140
1141 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1142 {
1143     if (m_haveCheckedPlayability)
1144         return;
1145     m_haveCheckedPlayability = true;
1146
1147     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1148     auto weakThis = createWeakPtr();
1149
1150     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1151         callOnMainThread([weakThis] {
1152             if (weakThis)
1153                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1154         });
1155     }];
1156 }
1157
1158 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1159 {
1160     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1161
1162     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1163     dispatch_group_enter(metadataLoadingGroup.get());
1164     auto weakThis = createWeakPtr();
1165     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1166
1167         callOnMainThread([weakThis, metadataLoadingGroup] {
1168             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1169                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1170                     dispatch_group_enter(metadataLoadingGroup.get());
1171                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1172                         dispatch_group_leave(metadataLoadingGroup.get());
1173                     }];
1174                 }
1175             }
1176             dispatch_group_leave(metadataLoadingGroup.get());
1177         });
1178     }];
1179
1180     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1181         callOnMainThread([weakThis] {
1182             if (weakThis)
1183                 [weakThis->m_objcObserver.get() metadataLoaded];
1184         });
1185     });
1186 }
1187
1188 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1189 {
1190     if (!m_avPlayerItem)
1191         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1192
1193     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1194         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1195     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1196         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1197     if (m_cachedLikelyToKeepUp)
1198         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1199     if (m_cachedBufferFull)
1200         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1201     if (m_cachedBufferEmpty)
1202         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1203
1204     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1205 }
1206
1207 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1208 {
1209     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1210     PlatformMedia pm;
1211     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1212     pm.media.avfMediaPlayer = m_avPlayer.get();
1213     return pm;
1214 }
1215
1216 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1217 {
1218 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1219     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1220 #else
1221     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1222 #endif
1223 }
1224
1225 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1226 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1227 {
1228     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1229         completionHandler();
1230         return;
1231     }
1232
1233     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
1234
1235     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1236         syncTextTrackBounds();
1237         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1238     }
1239
1240     updateDisableExternalPlayback();
1241 }
1242
1243 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1244 {
1245     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1246     syncTextTrackBounds();
1247 }
1248
1249 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1250 {
1251     m_videoFullscreenGravity = gravity;
1252
1253     if (!m_videoLayer)
1254         return;
1255
1256     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1257     if (gravity == MediaPlayer::VideoGravityResize)
1258         videoGravity = AVLayerVideoGravityResize;
1259     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1260         videoGravity = AVLayerVideoGravityResizeAspect;
1261     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1262         videoGravity = AVLayerVideoGravityResizeAspectFill;
1263     else
1264         ASSERT_NOT_REACHED();
1265     
1266     if ([m_videoLayer videoGravity] == videoGravity)
1267         return;
1268
1269     [m_videoLayer setVideoGravity:videoGravity];
1270     syncTextTrackBounds();
1271 }
1272
1273 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1274 {
1275 #if PLATFORM(IOS)
1276     [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1277     updateDisableExternalPlayback();
1278 #else
1279     UNUSED_PARAM(mode);
1280 #endif
1281 }
1282
1283 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1284
1285 #if PLATFORM(IOS)
1286 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1287 {
1288     if (m_currentMetaData)
1289         return m_currentMetaData.get();
1290     return nil;
1291 }
1292
1293 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1294 {
1295     if (!m_avPlayerItem)
1296         return emptyString();
1297     
1298     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1299     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1300
1301     return logString.get();
1302 }
1303
1304 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1305 {
1306     if (!m_avPlayerItem)
1307         return emptyString();
1308
1309     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1310     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1311
1312     return logString.get();
1313 }
1314 #endif
1315
1316 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1317 {
1318     [CATransaction begin];
1319     [CATransaction setDisableActions:YES];    
1320     if (m_videoLayer)
1321         [m_videoLayer.get() setHidden:!isVisible];
1322     [CATransaction commit];
1323 }
1324     
1325 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1326 {
1327     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1328     if (!metaDataAvailable())
1329         return;
1330
1331     setDelayCallbacks(true);
1332     m_cachedRate = requestedRate();
1333     [m_avPlayer.get() setRate:requestedRate()];
1334     setDelayCallbacks(false);
1335 }
1336
1337 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1338 {
1339     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1340     if (!metaDataAvailable())
1341         return;
1342
1343     setDelayCallbacks(true);
1344     m_cachedRate = 0;
1345     [m_avPlayer.get() setRate:0];
1346     setDelayCallbacks(false);
1347 }
1348
1349 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1350 {
1351     // Do not ask the asset for duration before it has been loaded or it will fetch the
1352     // answer synchronously.
1353     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1354         return MediaTime::invalidTime();
1355     
1356     CMTime cmDuration;
1357     
1358     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1359     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1360         cmDuration = [m_avPlayerItem.get() duration];
1361     else
1362         cmDuration = [m_avAsset.get() duration];
1363
1364     if (CMTIME_IS_NUMERIC(cmDuration))
1365         return toMediaTime(cmDuration);
1366
1367     if (CMTIME_IS_INDEFINITE(cmDuration))
1368         return MediaTime::positiveInfiniteTime();
1369
1370     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1371     return MediaTime::invalidTime();
1372 }
1373
1374 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1375 {
1376     if (!metaDataAvailable() || !m_avPlayerItem)
1377         return MediaTime::zeroTime();
1378
1379     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1380     if (CMTIME_IS_NUMERIC(itemTime))
1381         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1382
1383     return MediaTime::zeroTime();
1384 }
1385
1386 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1387 {
1388     // setCurrentTime generates several event callbacks, update afterwards.
1389     setDelayCallbacks(true);
1390
1391     if (m_metadataTrack)
1392         m_metadataTrack->flushPartialCues();
1393
1394     CMTime cmTime = toCMTime(time);
1395     CMTime cmBefore = toCMTime(negativeTolerance);
1396     CMTime cmAfter = toCMTime(positiveTolerance);
1397
1398     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1399     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1400         cmBefore = kCMTimeZero;
1401     
1402     auto weakThis = createWeakPtr();
1403
1404     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1405
1406     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1407         callOnMainThread([weakThis, finished] {
1408             auto _this = weakThis.get();
1409             if (!_this)
1410                 return;
1411
1412             _this->seekCompleted(finished);
1413         });
1414     }];
1415
1416     setDelayCallbacks(false);
1417 }
1418
1419 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1420 {
1421 #if PLATFORM(IOS)
1422     UNUSED_PARAM(volume);
1423     return;
1424 #else
1425     if (!m_avPlayer)
1426         return;
1427
1428     [m_avPlayer.get() setVolume:volume];
1429 #endif
1430 }
1431
1432 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1433 {
1434     if (m_muted == muted)
1435         return;
1436
1437     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setMuted(%p) - set to %s", this, boolString(muted));
1438
1439     m_muted = muted;
1440
1441     if (!m_avPlayer)
1442         return;
1443
1444     [m_avPlayer.get() setMuted:m_muted];
1445 }
1446
1447 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1448 {
1449     UNUSED_PARAM(closedCaptionsVisible);
1450
1451     if (!metaDataAvailable())
1452         return;
1453
1454     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1455 }
1456
1457 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1458 {
1459     setDelayCallbacks(true);
1460     m_cachedRate = rate;
1461     [m_avPlayer.get() setRate:rate];
1462     setDelayCallbacks(false);
1463 }
1464
1465 double MediaPlayerPrivateAVFoundationObjC::rate() const
1466 {
1467     if (!metaDataAvailable())
1468         return 0;
1469
1470     return m_cachedRate;
1471 }
1472
1473 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1474 {
1475 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1476     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1477 #else
1478     return 0;
1479 #endif
1480 }
1481
1482 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1483 {
1484 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1485     return [m_avPlayerItem liveUpdateInterval];
1486 #else
1487     return 0;
1488 #endif
1489 }
1490
1491 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1492 {
1493     if (m_avPlayerItem)
1494         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1495 }
1496
1497 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1498 {
1499     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1500
1501     if (!m_avPlayerItem)
1502         return timeRanges;
1503
1504     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1505         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1506         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1507             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1508     }
1509     return timeRanges;
1510 }
1511
1512 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1513 {
1514     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1515         return MediaTime::zeroTime();
1516
1517     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1518     bool hasValidRange = false;
1519     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1520         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1521         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1522             continue;
1523
1524         hasValidRange = true;
1525         MediaTime startOfRange = toMediaTime(timeRange.start);
1526         if (minTimeSeekable > startOfRange)
1527             minTimeSeekable = startOfRange;
1528     }
1529     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1530 }
1531
1532 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1533 {
1534     if (!m_cachedSeekableRanges)
1535         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1536
1537     MediaTime maxTimeSeekable;
1538     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1539         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1540         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1541             continue;
1542         
1543         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1544         if (maxTimeSeekable < endOfRange)
1545             maxTimeSeekable = endOfRange;
1546     }
1547     return maxTimeSeekable;
1548 }
1549
1550 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1551 {
1552     if (!m_cachedLoadedRanges)
1553         return MediaTime::zeroTime();
1554
1555     MediaTime maxTimeLoaded;
1556     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1557         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1558         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1559             continue;
1560         
1561         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1562         if (maxTimeLoaded < endOfRange)
1563             maxTimeLoaded = endOfRange;
1564     }
1565
1566     return maxTimeLoaded;   
1567 }
1568
1569 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1570 {
1571     if (!metaDataAvailable())
1572         return 0;
1573
1574     if (m_cachedTotalBytes)
1575         return m_cachedTotalBytes;
1576
1577     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1578         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1579
1580     return m_cachedTotalBytes;
1581 }
1582
1583 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1584 {
1585     m_avAsset = asset;
1586 }
1587
1588 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1589 {
1590     if (!m_avAsset)
1591         return MediaPlayerAVAssetStatusDoesNotExist;
1592
1593     for (NSString *keyName in assetMetadataKeyNames()) {
1594         NSError *error = nil;
1595         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1596 #if !LOG_DISABLED
1597         if (error)
1598             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1599 #endif
1600
1601         if (keyStatus < AVKeyValueStatusLoaded)
1602             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1603         
1604         if (keyStatus == AVKeyValueStatusFailed)
1605             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1606
1607         if (keyStatus == AVKeyValueStatusCancelled)
1608             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1609     }
1610
1611     if (!player()->shouldCheckHardwareSupport())
1612         m_tracksArePlayable = true;
1613
1614     if (!m_tracksArePlayable) {
1615         m_tracksArePlayable = true;
1616         for (AVAssetTrack *track in [m_avAsset tracks]) {
1617             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1618                 m_tracksArePlayable = false;
1619                 break;
1620             }
1621         }
1622     }
1623
1624     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1625         return MediaPlayerAVAssetStatusPlayable;
1626
1627     return MediaPlayerAVAssetStatusLoaded;
1628 }
1629
1630 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1631 {
1632     if (!m_avAsset)
1633         return 0;
1634
1635     NSError *error = nil;
1636     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1637     return [error code];
1638 }
1639
1640 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1641 {
1642     if (!metaDataAvailable() || context.paintingDisabled())
1643         return;
1644
1645     setDelayCallbacks(true);
1646     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1647
1648 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1649     if (videoOutputHasAvailableFrame())
1650         paintWithVideoOutput(context, rect);
1651     else
1652 #endif
1653         paintWithImageGenerator(context, rect);
1654
1655     END_BLOCK_OBJC_EXCEPTIONS;
1656     setDelayCallbacks(false);
1657
1658     m_videoFrameHasDrawn = true;
1659 }
1660
1661 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1662 {
1663     if (!metaDataAvailable() || context.paintingDisabled())
1664         return;
1665
1666     // We can ignore the request if we are already rendering to a layer.
1667     if (currentRenderingMode() == MediaRenderingToLayer)
1668         return;
1669
1670     // paint() is best effort, so only paint if we already have an image generator or video output available.
1671     if (!hasContextRenderer())
1672         return;
1673
1674     paintCurrentFrameInContext(context, rect);
1675 }
1676
1677 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1678 {
1679     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1680
1681     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1682     if (image) {
1683         GraphicsContextStateSaver stateSaver(context);
1684         context.translate(rect.x(), rect.y() + rect.height());
1685         context.scale(FloatSize(1.0f, -1.0f));
1686         context.setImageInterpolationQuality(InterpolationLow);
1687         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1688         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1689     }
1690 }
1691
1692 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1693 {
1694     if (!m_imageGenerator)
1695         createImageGenerator();
1696     ASSERT(m_imageGenerator);
1697
1698 #if !LOG_DISABLED
1699     double start = monotonicallyIncreasingTime();
1700 #endif
1701
1702     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1703     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1704     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1705
1706 #if !LOG_DISABLED
1707     double duration = monotonicallyIncreasingTime() - start;
1708     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1709 #endif
1710
1711     return image;
1712 }
1713
1714 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1715 {
1716     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::getSupportedTypes");
1717     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1718
1719
1720 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1721 static bool keySystemIsSupported(const String& keySystem)
1722 {
1723     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1724         return true;
1725     return false;
1726 }
1727 #endif
1728
1729 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1730 {
1731 #if ENABLE(MEDIA_SOURCE)
1732     if (parameters.isMediaSource)
1733         return MediaPlayer::IsNotSupported;
1734 #endif
1735 #if ENABLE(MEDIA_STREAM)
1736     if (parameters.isMediaStream)
1737         return MediaPlayer::IsNotSupported;
1738 #endif
1739
1740     auto containerType = parameters.type.containerType();
1741     if (isUnsupportedMIMEType(containerType))
1742         return MediaPlayer::IsNotSupported;
1743
1744     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1745         return MediaPlayer::IsNotSupported;
1746
1747     // The spec says:
1748     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1749     if (parameters.type.codecs().isEmpty())
1750         return MediaPlayer::MayBeSupported;
1751
1752     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1753         return MediaPlayer::IsNotSupported;
1754
1755     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1756     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1757 }
1758
1759 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1760 {
1761 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1762     if (!keySystem.isEmpty()) {
1763         // "Clear Key" is only supported with HLS:
1764         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1765             return MediaPlayer::IsNotSupported;
1766
1767         if (!keySystemIsSupported(keySystem))
1768             return false;
1769
1770         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1771             return false;
1772
1773         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1774             return false;
1775
1776         return true;
1777     }
1778 #else
1779     UNUSED_PARAM(keySystem);
1780     UNUSED_PARAM(mimeType);
1781 #endif
1782     return false;
1783 }
1784
1785 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1786 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1787 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1788 {
1789     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1790         [infoRequest setContentLength:keyData->byteLength()];
1791         [infoRequest setByteRangeAccessSupported:YES];
1792     }
1793
1794     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1795         long long start = [dataRequest currentOffset];
1796         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1797
1798         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1799             [request finishLoadingWithError:nil];
1800             return;
1801         }
1802
1803         ASSERT(start <= std::numeric_limits<int>::max());
1804         ASSERT(end <= std::numeric_limits<int>::max());
1805         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1806         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1807         [dataRequest respondWithData:nsData.get()];
1808     }
1809
1810     [request finishLoading];
1811 }
1812 #endif
1813
1814 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1815 {
1816     String scheme = [[[avRequest request] URL] scheme];
1817     String keyURI = [[[avRequest request] URL] absoluteString];
1818
1819 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1820     if (scheme == "skd") {
1821         // Create an initData with the following layout:
1822         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1823         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1824         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1825         unsigned byteLength = initDataBuffer->byteLength();
1826         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1827         initDataView->set<uint32_t>(0, keyURISize, true);
1828
1829         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1830         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1831
1832         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1833         if (!player()->keyNeeded(initData.get()))
1834             return false;
1835
1836         m_keyURIToRequestMap.set(keyURI, avRequest);
1837         return true;
1838     }
1839
1840     if (scheme == "clearkey") {
1841         String keyID = [[[avRequest request] URL] resourceSpecifier];
1842         StringView keyIDView(keyID);
1843         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1844
1845         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1846         initData->setRange(reinterpret_cast<const JSC::Uint8Adaptor::Type*>(utf8EncodedKeyId.data()), utf8EncodedKeyId.length(), 0);
1847
1848         auto keyData = player()->cachedKeyForKeyId(keyID);
1849         if (keyData) {
1850             fulfillRequestWithKeyData(avRequest, keyData.get());
1851             return false;
1852         }
1853
1854         if (!player()->keyNeeded(initData.get()))
1855             return false;
1856
1857         m_keyURIToRequestMap.set(keyID, avRequest);
1858         return true;
1859     }
1860 #endif
1861
1862     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1863     m_resourceLoaderMap.add(avRequest, resourceLoader);
1864     resourceLoader->startLoading();
1865     return true;
1866 }
1867
1868 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1869 {
1870 #if USE(CFURLCONNECTION)
1871     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1872     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1873     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1874 #else
1875     AuthenticationChallenge challenge(nsChallenge);
1876 #endif
1877
1878     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1879 }
1880
1881 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1882 {
1883     String scheme = [[[avRequest request] URL] scheme];
1884
1885     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1886
1887     if (resourceLoader)
1888         resourceLoader->stopLoading();
1889 }
1890
1891 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1892 {
1893     m_resourceLoaderMap.remove(avRequest);
1894 }
1895 #endif
1896
1897 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1898 {
1899     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1900 }
1901
1902 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1903 {
1904     if (!metaDataAvailable())
1905         return timeValue;
1906
1907     // FIXME - impossible to implement until rdar://8721510 is fixed.
1908     return timeValue;
1909 }
1910
1911 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1912 {
1913 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1914     return 0;
1915 #else
1916     return 5;
1917 #endif
1918 }
1919
1920 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1921 {
1922     if (!m_videoLayer)
1923         return;
1924
1925 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1926     // Do not attempt to change the video gravity while in full screen mode.
1927     // See setVideoFullscreenGravity().
1928     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1929         return;
1930 #endif
1931
1932     [CATransaction begin];
1933     [CATransaction setDisableActions:YES];    
1934     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1935     [m_videoLayer.get() setVideoGravity:gravity];
1936     [CATransaction commit];
1937 }
1938
1939 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1940 {
1941     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1942         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1943     }];
1944     if (index == NSNotFound)
1945         return nil;
1946     return [tracks objectAtIndex:index];
1947 }
1948
1949 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1950 {
1951     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1952     m_languageOfPrimaryAudioTrack = String();
1953
1954     if (!m_avAsset)
1955         return;
1956
1957     setDelayCharacteristicsChangedNotification(true);
1958
1959     bool haveCCTrack = false;
1960     bool hasCaptions = false;
1961
1962     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1963     // asked about those fairly fequently.
1964     if (!m_avPlayerItem) {
1965         // We don't have a player item yet, so check with the asset because some assets support inspection
1966         // prior to becoming ready to play.
1967         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1968         setHasVideo(firstEnabledVideoTrack);
1969         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1970 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1971         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1972 #endif
1973         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1974         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1975         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1976         if (size.width() < 0)
1977             size.setWidth(-size.width());
1978         if (size.height() < 0)
1979             size.setHeight(-size.height());
1980         presentationSizeDidChange(size);
1981     } else {
1982         bool hasVideo = false;
1983         bool hasAudio = false;
1984         bool hasMetaData = false;
1985         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1986             if ([track isEnabled]) {
1987                 AVAssetTrack *assetTrack = [track assetTrack];
1988                 NSString *mediaType = [assetTrack mediaType];
1989                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1990                     hasVideo = true;
1991                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1992                     hasAudio = true;
1993                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1994 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1995                     hasCaptions = true;
1996 #endif
1997                     haveCCTrack = true;
1998                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1999                     hasMetaData = true;
2000                 }
2001             }
2002         }
2003
2004 #if ENABLE(VIDEO_TRACK)
2005         updateAudioTracks();
2006         updateVideoTracks();
2007
2008 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2009         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
2010         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
2011 #endif
2012 #endif
2013
2014         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
2015         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
2016         // when it is not.
2017         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
2018
2019         setHasAudio(hasAudio);
2020 #if ENABLE(DATACUE_VALUE)
2021         if (hasMetaData)
2022             processMetadataTrack();
2023 #endif
2024     }
2025
2026 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2027     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2028     if (legibleGroup && m_cachedTracks) {
2029         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
2030         if (hasCaptions)
2031             processMediaSelectionOptions();
2032     }
2033 #endif
2034
2035 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2036     if (!hasCaptions && haveCCTrack)
2037         processLegacyClosedCaptionsTracks();
2038 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2039     if (haveCCTrack)
2040         processLegacyClosedCaptionsTracks();
2041 #endif
2042
2043     setHasClosedCaptions(hasCaptions);
2044
2045     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
2046         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
2047
2048     sizeChanged();
2049
2050     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2051         characteristicsChanged();
2052
2053 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2054     if (m_provider)
2055         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2056 #endif
2057
2058     setDelayCharacteristicsChangedNotification(false);
2059 }
2060
2061 #if ENABLE(VIDEO_TRACK)
2062
2063 template <typename RefT, typename PassRefT>
2064 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2065 {
2066     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2067         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2068     }]]]);
2069     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2070
2071     for (auto& oldItem : oldItems) {
2072         if (oldItem->playerItemTrack())
2073             [oldTracks addObject:oldItem->playerItemTrack()];
2074     }
2075
2076     // Find the added & removed AVPlayerItemTracks:
2077     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2078     [removedTracks minusSet:newTracks.get()];
2079
2080     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2081     [addedTracks minusSet:oldTracks.get()];
2082
2083     typedef Vector<RefT> ItemVector;
2084     ItemVector replacementItems;
2085     ItemVector addedItems;
2086     ItemVector removedItems;
2087     for (auto& oldItem : oldItems) {
2088         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2089             removedItems.append(oldItem);
2090         else
2091             replacementItems.append(oldItem);
2092     }
2093
2094     for (AVPlayerItemTrack* track in addedTracks.get())
2095         addedItems.append(itemFactory(track));
2096
2097     replacementItems.appendVector(addedItems);
2098     oldItems.swap(replacementItems);
2099
2100     for (auto& removedItem : removedItems)
2101         (player->*removedFunction)(*removedItem);
2102
2103     for (auto& addedItem : addedItems)
2104         (player->*addedFunction)(*addedItem);
2105 }
2106
2107 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2108
2109 template <typename RefT, typename PassRefT>
2110 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2111 {
2112     group->updateOptions(characteristics);
2113
2114     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2115     for (auto& option : group->options()) {
2116         if (!option)
2117             continue;
2118         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2119         if (!avOption)
2120             continue;
2121         newSelectionOptions.add(option);
2122     }
2123
2124     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2125     for (auto& oldItem : oldItems) {
2126         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2127             oldSelectionOptions.add(option);
2128     }
2129
2130     // Find the added & removed AVMediaSelectionOptions:
2131     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2132     for (auto& oldOption : oldSelectionOptions) {
2133         if (!newSelectionOptions.contains(oldOption))
2134             removedSelectionOptions.add(oldOption);
2135     }
2136
2137     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2138     for (auto& newOption : newSelectionOptions) {
2139         if (!oldSelectionOptions.contains(newOption))
2140             addedSelectionOptions.add(newOption);
2141     }
2142
2143     typedef Vector<RefT> ItemVector;
2144     ItemVector replacementItems;
2145     ItemVector addedItems;
2146     ItemVector removedItems;
2147     for (auto& oldItem : oldItems) {
2148         if (!oldItem->mediaSelectionOption())
2149             removedItems.append(oldItem);
2150         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2151             removedItems.append(oldItem);
2152         else
2153             replacementItems.append(oldItem);
2154     }
2155
2156     for (auto& option : addedSelectionOptions)
2157         addedItems.append(itemFactory(*option.get()));
2158
2159     replacementItems.appendVector(addedItems);
2160     oldItems.swap(replacementItems);
2161     
2162     for (auto& removedItem : removedItems)
2163         (player->*removedFunction)(*removedItem);
2164
2165     for (auto& addedItem : addedItems)
2166         (player->*addedFunction)(*addedItem);
2167 }
2168
2169 #endif
2170
2171 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2172 {
2173 #if !LOG_DISABLED
2174     size_t count = m_audioTracks.size();
2175 #endif
2176
2177 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2178     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2179     if (!m_audibleGroup) {
2180         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2181             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2182     }
2183
2184     if (m_audibleGroup)
2185         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2186     else
2187 #endif
2188         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2189
2190     for (auto& track : m_audioTracks)
2191         track->resetPropertiesFromTrack();
2192
2193 #if !LOG_DISABLED
2194     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2195 #endif
2196 }
2197
2198 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2199 {
2200 #if !LOG_DISABLED
2201     size_t count = m_videoTracks.size();
2202 #endif
2203
2204     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2205
2206 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2207     if (!m_visualGroup) {
2208         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2209             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2210     }
2211
2212     if (m_visualGroup)
2213         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2214 #endif
2215
2216     for (auto& track : m_audioTracks)
2217         track->resetPropertiesFromTrack();
2218
2219 #if !LOG_DISABLED
2220     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2221 #endif
2222 }
2223
2224 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2225 {
2226 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2227     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2228         return true;
2229 #endif
2230     return false;
2231 }
2232
2233 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2234 {
2235 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2236     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2237         return;
2238
2239     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2240     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2241     [m_textTrackRepresentationLayer setFrame:textFrame];
2242 #endif
2243 }
2244
2245 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2246 {
2247 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2248     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2249     if (representationLayer == m_textTrackRepresentationLayer) {
2250         syncTextTrackBounds();
2251         return;
2252     }
2253
2254     if (m_textTrackRepresentationLayer)
2255         [m_textTrackRepresentationLayer removeFromSuperlayer];
2256
2257     m_textTrackRepresentationLayer = representationLayer;
2258
2259     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2260         syncTextTrackBounds();
2261         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2262     }
2263
2264 #else
2265     UNUSED_PARAM(representation);
2266 #endif
2267 }
2268
2269 #endif // ENABLE(VIDEO_TRACK)
2270
2271 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2272
2273 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2274 {
2275     if (!m_provider) {
2276         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2277         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2278     }
2279     return m_provider.get();
2280 }
2281
2282 #endif
2283
2284 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2285 {
2286     if (!m_avAsset)
2287         return;
2288
2289     setNaturalSize(m_cachedPresentationSize);
2290 }
2291     
2292 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2293 {
2294     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2295         return false;
2296     
2297     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2298     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2299     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
2300 }
2301
2302 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2303 {
2304 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2305     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2306     if (!Settings::isAVFoundationNSURLSessionEnabled()
2307         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2308         return false;
2309
2310     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2311     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2312         return session.didPassCORSAccessChecks;
2313 #endif
2314     return false;
2315 }
2316
2317 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2318
2319 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2320 {
2321     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2322
2323     if (!m_avPlayerItem || m_videoOutput)
2324         return;
2325
2326 #if USE(VIDEOTOOLBOX)
2327     NSDictionary* attributes = nil;
2328 #else
2329     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2330 #endif
2331     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2332     ASSERT(m_videoOutput);
2333
2334     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2335
2336     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2337
2338     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2339 }
2340
2341 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2342 {
2343     if (!m_videoOutput)
2344         return;
2345
2346     if (m_avPlayerItem)
2347         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2348     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2349
2350     m_videoOutput = 0;
2351 }
2352
2353 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2354 {
2355     if (!m_videoOutput)
2356         createVideoOutput();
2357     ASSERT(m_videoOutput);
2358
2359     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2360
2361     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2362         return 0;
2363
2364     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2365 }
2366
2367 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2368 {
2369     if (!m_avPlayerItem)
2370         return false;
2371
2372     if (m_lastImage)
2373         return true;
2374
2375     if (!m_videoOutput)
2376         createVideoOutput();
2377
2378     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2379 }
2380
2381 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2382 {
2383     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2384
2385     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2386     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2387     // should be displayed.
2388     if (!pixelBuffer)
2389         return;
2390
2391     if (!m_pixelBufferConformer) {
2392 #if USE(VIDEOTOOLBOX)
2393         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2394 #else
2395         NSDictionary *attributes = nil;
2396 #endif
2397         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2398     }
2399
2400 #if !LOG_DISABLED
2401     double start = monotonicallyIncreasingTime();
2402 #endif
2403
2404     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2405
2406 #if !LOG_DISABLED
2407     double duration = monotonicallyIncreasingTime() - start;
2408     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2409 #endif
2410 }
2411
2412 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2413 {
2414     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2415         waitForVideoOutputMediaDataWillChange();
2416
2417     updateLastImage();
2418
2419     if (!m_lastImage)
2420         return;
2421
2422     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2423     if (!firstEnabledVideoTrack)
2424         return;
2425
2426     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2427
2428     GraphicsContextStateSaver stateSaver(context);
2429     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2430     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2431     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2432
2433     context.concatCTM(videoTransform);
2434     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2435
2436     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2437     // video frame, destroy it now that it is no longer needed.
2438     if (m_imageGenerator)
2439         destroyImageGenerator();
2440
2441 }
2442
2443 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2444 {
2445     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2446
2447     if (!m_avPlayerItem || m_openGLVideoOutput)
2448         return;
2449
2450 #if PLATFORM(IOS)
2451     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2452 #else
2453     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2454 #endif
2455     m_openGLVideoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2456     ASSERT(m_openGLVideoOutput);
2457
2458     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2459
2460     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2461 }
2462
2463 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2464 {
2465     if (!m_openGLVideoOutput)
2466         return;
2467
2468     if (m_avPlayerItem)
2469         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2470     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2471
2472     m_openGLVideoOutput = 0;
2473 }
2474
2475 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2476 {
2477     if (!m_openGLVideoOutput)
2478         return;
2479
2480     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2481     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2482         return;
2483
2484     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2485 }
2486
2487 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2488 {
2489     ASSERT(context);
2490
2491     if (!m_openGLVideoOutput)
2492         createOpenGLVideoOutput();
2493
2494     updateLastOpenGLImage();
2495
2496     if (!m_lastOpenGLImage)
2497         return false;
2498
2499     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2500     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2501
2502     if (!m_textureCache) {
2503         m_textureCache = TextureCacheCV::create(*context);
2504         if (!m_textureCache)
2505             return false;
2506     }
2507
2508     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2509
2510     if (!m_videoTextureCopier)
2511         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2512
2513     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2514 }
2515
2516 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2517 {
2518     updateLastImage();
2519     return m_lastImage;
2520 }
2521
2522 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2523 {
2524     if (!m_videoOutputSemaphore)
2525         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2526
2527     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2528
2529     // Wait for 1 second.
2530     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2531
2532     if (result)
2533         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2534 }
2535
2536 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2537 {
2538     dispatch_semaphore_signal(m_videoOutputSemaphore);
2539 }
2540
2541 #endif
2542
2543 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2544
2545 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2546 {
2547     return m_keyURIToRequestMap.take(keyURI);
2548 }
2549
2550 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2551 {
2552     Vector<String> fulfilledKeyIds;
2553
2554     for (auto& pair : m_keyURIToRequestMap) {
2555         const String& keyId = pair.key;
2556         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2557
2558         auto keyData = player()->cachedKeyForKeyId(keyId);
2559         if (!keyData)
2560             continue;
2561
2562         fulfillRequestWithKeyData(request.get(), keyData.get());
2563         fulfilledKeyIds.append(keyId);
2564     }
2565
2566     for (auto& keyId : fulfilledKeyIds)
2567         m_keyURIToRequestMap.remove(keyId);
2568 }
2569
2570 void MediaPlayerPrivateAVFoundationObjC::removeSession(CDMSession& session)
2571 {
2572     ASSERT_UNUSED(session, &session == m_session);
2573     m_session = nullptr;
2574 }
2575
2576 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2577 {
2578     if (!keySystemIsSupported(keySystem))
2579         return nullptr;
2580     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2581     m_session = session->createWeakPtr();
2582     return WTFMove(session);
2583 }
2584
2585 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2586 {
2587     if (m_session && newValue)
2588         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2589 }
2590
2591 #endif
2592
2593 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2594
2595 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2596 {
2597 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2598     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2599 #endif
2600
2601     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2602     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2603
2604         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2605         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2606             continue;
2607
2608         bool newCCTrack = true;
2609         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2610             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2611                 continue;
2612
2613             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2614             if (track->avPlayerItemTrack() == playerItemTrack) {
2615                 removedTextTracks.remove(i - 1);
2616                 newCCTrack = false;
2617                 break;
2618             }
2619         }
2620
2621         if (!newCCTrack)
2622             continue;
2623         
2624         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2625     }
2626
2627     processNewAndRemovedTextTracks(removedTextTracks);
2628 }
2629
2630 #endif
2631
2632 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2633 {
2634     if (!m_avAsset)
2635         return nil;
2636
2637     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2638         return nil;
2639
2640     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2641 }
2642
2643 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2644
2645 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2646 {
2647     if (!m_avAsset)
2648         return false;
2649
2650     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2651         return false;
2652
2653     return true;
2654 }
2655
2656 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2657 {
2658     if (!hasLoadedMediaSelectionGroups())
2659         return nil;
2660
2661     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2662 }
2663
2664 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2665 {
2666     if (!hasLoadedMediaSelectionGroups())
2667         return nil;
2668
2669     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2670 }
2671
2672 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2673 {
2674     if (!hasLoadedMediaSelectionGroups())
2675         return nil;
2676
2677     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2678 }
2679
2680 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2681 {
2682     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2683     if (!legibleGroup) {
2684         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2685         return;
2686     }
2687
2688     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2689     // but set the selected legible track to nil so text tracks will not be automatically configured.
2690     if (!m_textTracks.size())
2691         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2692
2693     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2694     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2695     for (AVMediaSelectionOptionType *option in legibleOptions) {
2696         bool newTrack = true;
2697         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2698             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2699                 continue;
2700             
2701             RetainPtr<AVMediaSelectionOptionType> currentOption;
2702 #if ENABLE(AVF_CAPTIONS)
2703             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2704                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2705                 currentOption = track->mediaSelectionOption();
2706             } else
2707 #endif
2708             {
2709                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2710                 currentOption = track->mediaSelectionOption();
2711             }
2712             
2713             if ([currentOption.get() isEqual:option]) {
2714                 removedTextTracks.remove(i - 1);
2715                 newTrack = false;
2716                 break;
2717             }
2718         }
2719         if (!newTrack)
2720             continue;
2721
2722 #if ENABLE(AVF_CAPTIONS)
2723         if ([option outOfBandSource]) {
2724             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2725             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2726             continue;
2727         }
2728 #endif
2729
2730         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2731     }
2732
2733     processNewAndRemovedTextTracks(removedTextTracks);
2734 }
2735
2736 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2737 {
2738     if (m_metadataTrack)
2739         return;
2740
2741     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2742     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2743     player()->addTextTrack(*m_metadataTrack);
2744 }
2745
2746 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2747 {
2748     ASSERT(time >= MediaTime::zeroTime());
2749
2750     if (!m_currentTextTrack)
2751         return;
2752
2753     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2754 }
2755
2756 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2757 {
2758     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2759
2760     if (!m_currentTextTrack)
2761         return;
2762     
2763     m_currentTextTrack->resetCueValues();
2764 }
2765
2766 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2767
2768 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2769 {
2770     if (m_currentTextTrack == track)
2771         return;
2772
2773     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2774         
2775     m_currentTextTrack = track;
2776
2777     if (track) {
2778         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2779 #pragma clang diagnostic push
2780 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2781             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2782 #pragma clang diagnostic pop
2783 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2784 #if ENABLE(AVF_CAPTIONS)
2785         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2786             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2787 #endif
2788         else
2789             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2790 #endif
2791     } else {
2792 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2793         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2794 #endif
2795 #pragma clang diagnostic push
2796 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2797         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2798 #pragma clang diagnostic pop
2799     }
2800
2801 }
2802
2803 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2804 {
2805     if (!m_languageOfPrimaryAudioTrack.isNull())
2806         return m_languageOfPrimaryAudioTrack;
2807
2808     if (!m_avPlayerItem.get())
2809         return emptyString();
2810
2811 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2812     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2813     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2814 #pragma clang diagnostic push
2815 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2816     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2817 #pragma clang diagnostic pop
2818     if (currentlySelectedAudibleOption) {
2819         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2820         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2821
2822         return m_languageOfPrimaryAudioTrack;
2823     }
2824 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2825
2826     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2827     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2828     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2829     if (!tracks || [tracks count] != 1) {
2830         m_languageOfPrimaryAudioTrack = emptyString();
2831         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2832         return m_languageOfPrimaryAudioTrack;
2833     }
2834
2835     AVAssetTrack *track = [tracks objectAtIndex:0];
2836     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2837
2838 #if !LOG_DISABLED
2839     if (m_languageOfPrimaryAudioTrack == emptyString())
2840         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2841     else
2842         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2843 #endif
2844
2845     return m_languageOfPrimaryAudioTrack;
2846 }
2847
2848 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2849 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2850 {
2851     bool wirelessTarget = false;
2852
2853 #if !PLATFORM(IOS)
2854     if (m_playbackTarget) {
2855         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2856             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2857         else
2858             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2859     }
2860 #else
2861     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2862 #endif
2863
2864     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2865
2866     return wirelessTarget;
2867 }
2868
2869 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2870 {
2871     if (!m_avPlayer)
2872         return MediaPlayer::TargetTypeNone;
2873
2874 #if PLATFORM(IOS)
2875     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2876     case wkExternalPlaybackTypeNone:
2877         return MediaPlayer::TargetTypeNone;
2878     case wkExternalPlaybackTypeAirPlay:
2879         return MediaPlayer::TargetTypeAirPlay;
2880     case wkExternalPlaybackTypeTVOut:
2881         return MediaPlayer::TargetTypeTVOut;
2882     }
2883
2884     ASSERT_NOT_REACHED();
2885     return MediaPlayer::TargetTypeNone;
2886
2887 #else
2888     return MediaPlayer::TargetTypeAirPlay;
2889 #endif
2890 }
2891
2892 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2893 {
2894     if (!m_avPlayer)
2895         return emptyString();
2896
2897     String wirelessTargetName;
2898 #if !PLATFORM(IOS)
2899     if (m_playbackTarget)
2900         wirelessTargetName = m_playbackTarget->deviceName();
2901 #else
2902     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2903 #endif
2904     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2905
2906     return wirelessTargetName;
2907 }
2908
2909 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2910 {
2911     if (!m_avPlayer)
2912         return !m_allowsWirelessVideoPlayback;
2913
2914     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2915     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2916
2917     return !m_allowsWirelessVideoPlayback;
2918 }
2919
2920 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2921 {
2922     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2923     m_allowsWirelessVideoPlayback = !disabled;
2924     if (!m_avPlayer)
2925         return;
2926
2927     setDelayCallbacks(true);
2928     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2929     setDelayCallbacks(false);
2930 }
2931
2932 #if !PLATFORM(IOS)
2933
2934 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2935 {
2936     m_playbackTarget = WTFMove(target);
2937
2938     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2939
2940     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2941
2942     if (!m_playbackTarget->hasActiveRoute())
2943         setShouldPlayToPlaybackTarget(false);
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2947 {
2948     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2949         return;
2950
2951     m_shouldPlayToPlaybackTarget = shouldPlay;
2952
2953     if (!m_playbackTarget)
2954         return;
2955
2956     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2957         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2958
2959         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2960
2961         if (!m_avPlayer)
2962             return;
2963
2964         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2965         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2966             return;
2967
2968         setDelayCallbacks(true);
2969         m_avPlayer.get().outputContext = newContext;
2970         setDelayCallbacks(false);
2971
2972         return;
2973     }
2974
2975     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2976
2977     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2978
2979     setDelayCallbacks(true);
2980     auto weakThis = createWeakPtr();
2981     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2982         if (!weakThis)
2983             return;
2984         weakThis->playbackTargetIsWirelessDidChange();
2985     }));
2986     setDelayCallbacks(false);
2987 }
2988
2989 #endif // !PLATFORM(IOS)
2990
2991 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2992 {
2993 #if PLATFORM(IOS)
2994     if (!m_avPlayer)
2995         return;
2996
2997     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2998 #endif
2999 }
3000
3001 #endif
3002
3003 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3004 {
3005     m_cachedItemStatus = status;
3006
3007     updateStates();
3008 }
3009
3010 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3011 {
3012     m_pendingStatusChanges++;
3013 }
3014
3015 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3016 {
3017     m_cachedLikelyToKeepUp = likelyToKeepUp;
3018
3019     ASSERT(m_pendingStatusChanges);
3020     if (!--m_pendingStatusChanges)
3021         updateStates();
3022 }
3023
3024 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3025 {
3026     m_pendingStatusChanges++;
3027 }
3028
3029 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3030 {
3031     m_cachedBufferEmpty = bufferEmpty;
3032
3033     ASSERT(m_pendingStatusChanges);
3034     if (!--m_pendingStatusChanges)
3035         updateStates();
3036 }
3037
3038 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3039 {
3040     m_pendingStatusChanges++;
3041 }
3042
3043 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3044 {
3045     m_cachedBufferFull = bufferFull;
3046
3047     ASSERT(m_pendingStatusChanges);
3048     if (!--m_pendingStatusChanges)
3049         updateStates();
3050 }
3051
3052 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3053 {
3054     m_cachedSeekableRanges = seekableRanges;
3055
3056     seekableTimeRangesChanged();
3057     updateStates();
3058 }
3059
3060 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3061 {
3062     m_cachedLoadedRanges = loadedRanges;
3063
3064     loadedTimeRangesChanged();
3065     updateStates();
3066 }
3067
3068 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3069 {
3070     m_cachedIsReadyForDisplay = isReady;
3071     if (!hasVideo() && isReady)
3072         tracksChanged();
3073     updateStates();
3074 }
3075
3076 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3077 {
3078     tracksChanged();
3079     updateStates();
3080 }
3081
3082 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3083 {
3084     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
3085     if (m_shouldBufferData == shouldBuffer)
3086         return;
3087
3088     m_shouldBufferData = shouldBuffer;
3089     
3090     if (!m_avPlayer)
3091         return;
3092
3093     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3094 }
3095
3096 #if ENABLE(DATACUE_VALUE)
3097
3098 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3099 {
3100     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3101     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3102     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3103     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3104     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3105
3106     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3107         return quickTimeUserData;
3108     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3109         return isoUserData;
3110     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3111         return quickTimeMetadata;
3112     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3113         return iTunesMetadata;
3114     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3115         return id3Metadata;
3116
3117     return emptyAtom();
3118 }
3119
3120 #endif
3121
3122 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3123 {
3124     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3125
3126     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3127
3128 #if ENABLE(DATACUE_VALUE)
3129     if (seeking())
3130         return;
3131
3132     if (!m_metadataTrack)
3133         processMetadataTrack();
3134
3135     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3136         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3137         return;
3138     }
3139
3140     // Set the duration of all incomplete cues before adding new ones.
3141     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3142     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3143         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3144         if (start < earliestStartTime)
3145             earliestStartTime = start;
3146     }
3147     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3148
3149     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3150         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3151         MediaTime end = MediaTime::positiveInfiniteTime();
3152         if (CMTIME_IS_VALID(item.duration))
3153             end = start + toMediaTime(item.duration);
3154
3155         AtomicString type = nullAtom();
3156         if (item.keySpace)
3157             type = metadataType(item.keySpace);
3158
3159         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3160     }
3161 #endif
3162 }
3163
3164 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3165 {
3166     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3167         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3168
3169     NSArray *assetTracks = [m_avAsset tracks];
3170
3171     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3172         AVAssetTrack* assetTrack = [obj assetTrack];
3173
3174         if ([assetTracks containsObject:assetTrack])
3175             return YES;
3176
3177         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3178         if (!hasLoadedMediaSelectionGroups())
3179             return NO;
3180
3181         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3182             return NO;
3183
3184         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3185             return NO;
3186
3187         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3188             return NO;
3189
3190         return YES;
3191     }]];
3192
3193     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3194         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3195
3196     m_cachedTotalBytes = 0;
3197
3198     tracksChanged();
3199     updateStates();
3200 }
3201
3202 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3203 {
3204     m_cachedHasEnabledAudio = hasEnabledAudio;
3205
3206     tracksChanged();
3207     updateStates();
3208 }
3209
3210 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3211 {
3212     m_cachedPresentationSize = size;
3213
3214     sizeChanged();
3215     updateStates();
3216 }
3217
3218 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3219 {
3220     m_cachedDuration = duration;
3221
3222     invalidateCachedDuration();
3223 }
3224
3225 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3226 {
3227     m_cachedRate = rate;
3228
3229     updateStates();
3230     rateChanged();
3231 }
3232
3233 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3234
3235 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3236 {
3237     playbackTargetIsWirelessChanged();
3238 }
3239
3240 #endif
3241
3242 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3243 {
3244     m_cachedCanPlayFastForward = newValue;
3245 }
3246
3247 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3248 {
3249     m_cachedCanPlayFastReverse = newValue;
3250 }
3251
3252 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3253 {
3254     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3255         return MediaPlayerPrivateAVFoundation::resolvedURL();
3256
3257     return URL([m_avAsset resolvedURL]);
3258 }
3259
3260 void MediaPlayerPrivateAVFoundationObjC::setShouldDisableSleep(bool flag)
3261 {
3262 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
3263     [m_avPlayer _setPreventsSleepDuringVideoPlayback:flag];
3264 #else
3265     UNUSED_PARAM(flag);
3266 #endif
3267 }
3268
3269 NSArray* assetMetadataKeyNames()
3270 {
3271     static NSArray* keys = [[NSArray alloc] initWithObjects:
3272         @"duration",
3273         @"naturalSize",
3274         @"preferredTransform",
3275         @"preferredVolume",
3276         @"preferredRate",
3277         @"playable",
3278         @"resolvedURL",
3279         @"tracks",
3280         @"availableMediaCharacteristicsWithMediaSelectionOptions",
3281     nil];
3282     return keys;
3283 }
3284
3285 NSArray* itemKVOProperties()
3286 {
3287     static NSArray* keys = [[NSArray alloc] initWithObjects:
3288         @"presentationSize",
3289         @"status",
3290         @"asset",
3291         @"tracks",
3292         @"seekableTimeRanges",
3293         @"loadedTimeRanges",
3294         @"playbackLikelyToKeepUp",
3295         @"playbackBufferFull",
3296         @"playbackBufferEmpty",
3297         @"duration",
3298         @"hasEnabledAudio",
3299         @"timedMetadata",
3300         @"canPlayFastForward",
3301         @"canPlayFastReverse",
3302     nil];
3303     return keys;
3304 }
3305
3306 NSArray* assetTrackMetadataKeyNames()
3307 {
3308     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3309     return keys;
3310 }
3311
3312 NSArray* playerKVOProperties()
3313 {
3314     static NSArray* keys = [[NSArray alloc] initWithObjects:
3315         @"rate",
3316 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3317         @"externalPlaybackActive",
3318         @"allowsExternalPlayback",
3319 #endif
3320 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
3321         @"outputObscuredDueToInsufficientExternalProtection",
3322 #endif
3323     nil];
3324     return keys;
3325 }
3326 } // namespace WebCore
3327
3328 @implementation WebCoreAVFMovieObserver
3329
3330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3331 {
3332     self = [super init];
3333     if (!self)
3334         return nil;
3335     m_callback = callback;
3336     return self;
3337 }
3338
3339 - (void)disconnect
3340 {
3341     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3342     m_callback = nil;
3343 }
3344
3345 - (void)metadataLoaded
3346 {
3347     if (!m_callback)
3348         return;
3349     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3350 }
3351
3352 - (void)didEnd:(NSNotification *)unusedNotification
3353 {
3354     UNUSED_PARAM(unusedNotification);
3355     if (!m_callback)
3356         return;
3357     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3358 }
3359
3360 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3361 {
3362     UNUSED_PARAM(object);
3363     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3364
3365     if (!m_callback)
3366         return;
3367
3368     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3369
3370 #if !LOG_DISABLED
3371     if (willChange)
3372         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3373     else {
3374         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3375         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3376     }
3377 #endif
3378
3379     WTF::Function<void ()> function;
3380
3381     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3382         if ([keyPath isEqualToString:@"readyForDisplay"])
3383             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3384     }
3385
3386     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3387         if ([keyPath isEqualToString:@"enabled"])
3388             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3389     }
3390
3391     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3392         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3393             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3394         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3395             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3396         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3397             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3398     }
3399
3400     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3401         // A value changed for an AVPlayerItem
3402         if ([keyPath isEqualToString:@"status"])
3403             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3404         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3405             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3406         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3407             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3408         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3409             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3410         else if ([keyPath isEqualToString:@"asset"])
3411             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3412         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3413             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3414         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3415             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3416         else if ([keyPath isEqualToString:@"tracks"])
3417             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3418         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3419             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3420         else if ([keyPath isEqualToString:@"presentationSize"])
3421             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3422         else if ([keyPath isEqualToString:@"duration"])
3423             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3424         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3425             MediaTime now;
3426             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3427             if (CMTIME_IS_NUMERIC(itemTime))
3428                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3429             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3430         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3431             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3432         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3433             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3434     }
3435
3436     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3437         // A value changed for an AVPlayer.
3438         if ([keyPath isEqualToString:@"rate"])
3439             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3440 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3441         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3442             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3443 #endif
3444 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
3445         else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3446             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged, m_callback, [newValue boolValue]);
3447 #endif
3448     }
3449     
3450     if (!function)
3451         return;
3452
3453     auto weakThis = m_callback->createWeakPtr();
3454     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function = WTFMove(function)]{
3455         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3456         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3457         if (!weakThis)
3458             return;
3459         function();
3460     }));
3461 }
3462
3463 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3464
3465 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3466 {
3467     UNUSED_PARAM(output);
3468     UNUSED_PARAM(nativeSamples);
3469
3470     if (!m_callback)
3471         return;
3472
3473     RetainPtr<WebCoreAVFMovieObserver> protectedSelf = self;
3474     RetainPtr<NSArray> protectedStrings = strings;
3475     RetainPtr<NSArray> protectedNativeSamples = nativeSamples;
3476     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedStrings = WTFMove(protectedStrings), protectedNativeSamples = WTFMove(protectedNativeSamples), itemTime] {
3477         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3478         if (!callback)