Rename ENABLE_ENCRYPTED_MEDIA_V2 to ENABLE_LEGACY_ENCRYPTED_MEDIA
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationMIMETypeCache.h"
32 #import "AVFoundationSPI.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "MediaTimeAVFoundation.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "Settings.h"
61 #import "TextEncoding.h"
62 #import "TextTrackRepresentation.h"
63 #import "TextureCacheCV.h"
64 #import "URL.h"
65 #import "UUID.h"
66 #import "VideoTextureCopierCV.h"
67 #import "VideoTrackPrivateAVFObjC.h"
68 #import "WebCoreAVFResourceLoader.h"
69 #import "WebCoreCALayerExtras.h"
70 #import "WebCoreNSURLSession.h"
71 #import "WebCoreSystemInterface.h"
72 #import <functional>
73 #import <map>
74 #import <objc/runtime.h>
75 #import <runtime/DataView.h>
76 #import <runtime/JSCInlines.h>
77 #import <runtime/TypedArrayInlines.h>
78 #import <runtime/Uint16Array.h>
79 #import <runtime/Uint32Array.h>
80 #import <runtime/Uint8Array.h>
81 #import <wtf/BlockObjCExceptions.h>
82 #import <wtf/CurrentTime.h>
83 #import <wtf/ListHashSet.h>
84 #import <wtf/NeverDestroyed.h>
85 #import <wtf/OSObjectPtr.h>
86 #import <wtf/text/CString.h>
87
88 #if ENABLE(AVF_CAPTIONS)
89 #include "TextTrack.h"
90 #endif
91
92 #import <AVFoundation/AVFoundation.h>
93
94 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
95 #import "VideoFullscreenLayerManager.h"
96 #endif
97
98 #if PLATFORM(IOS)
99 #import "WAKAppKitStubs.h"
100 #import <CoreImage/CoreImage.h>
101 #import <mach/mach_port.h>
102 #else
103 #import <Foundation/NSGeometry.h>
104 #import <QuartzCore/CoreImage.h>
105 #endif
106
107 #if USE(VIDEOTOOLBOX)
108 #import <CoreVideo/CoreVideo.h>
109 #import <VideoToolbox/VideoToolbox.h>
110 #endif
111
112 #if USE(CFNETWORK)
113 #include "CFNSURLConnectionSPI.h"
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117
118 namespace std {
119 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
120     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
121 };
122 }
123
124 #if ENABLE(AVF_CAPTIONS)
125 // Note: This must be defined before our SOFT_LINK macros:
126 @class AVMediaSelectionOption;
127 @interface AVMediaSelectionOption (OutOfBandExtensions)
128 @property (nonatomic, readonly) NSString* outOfBandSource;
129 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
130 @end
131 #endif
132
133 @interface AVURLAsset (WebKitExtensions)
134 @property (nonatomic, readonly) NSURL *resolvedURL;
135 @end
136
137 typedef AVPlayer AVPlayerType;
138 typedef AVPlayerItem AVPlayerItemType;
139 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
140 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
141 typedef AVMetadataItem AVMetadataItemType;
142 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
143 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
144 typedef AVAssetCache AVAssetCacheType;
145
146 #pragma mark - Soft Linking
147
148 // Soft-linking headers must be included last since they #define functions, constants, etc.
149 #import "CoreMediaSoftLink.h"
150
151 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
152
153 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
154
155 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
156 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
163
164 SOFT_LINK_CLASS(CoreImage, CIContext)
165 SOFT_LINK_CLASS(CoreImage, CIImage)
166
167 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
168 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
169 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
182
183 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
184
185 #define AVPlayer initAVPlayer()
186 #define AVPlayerItem initAVPlayerItem()
187 #define AVPlayerLayer initAVPlayerLayer()
188 #define AVURLAsset initAVURLAsset()
189 #define AVAssetImageGenerator initAVAssetImageGenerator()
190 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
191 #define AVMetadataItem initAVMetadataItem()
192 #define AVAssetCache initAVAssetCache()
193
194 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
195 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
196 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
197 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
198 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
199 #define AVMediaTypeVideo getAVMediaTypeVideo()
200 #define AVMediaTypeAudio getAVMediaTypeAudio()
201 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
202 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
203 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
204 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
205 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
206 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
207 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
208 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
209 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
210
211 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
212 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
213 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
214
215 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
216 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
217 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
218
219 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
220 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
221 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
222 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
223
224 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
225 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
226 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
227 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
228 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
229 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
230 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
231 #endif
232
233 #if ENABLE(AVF_CAPTIONS)
234 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
235 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
236 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
237 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
238 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
239 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
240 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
241 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
242 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
247
248 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
249 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
250 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
251 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
252 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
253 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
254 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
255 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
256 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
257 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
258 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
259 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
260 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
261 #endif
262
263 #if ENABLE(DATACUE_VALUE)
264 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
265 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
266 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
267 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
268 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
269
270 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
271 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
272 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
273 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
274 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
275 #endif
276
277 #if PLATFORM(IOS)
278 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
279 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
280 #endif
281
282 SOFT_LINK_FRAMEWORK(MediaToolbox)
283 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
284
285 using namespace WebCore;
286
287 enum MediaPlayerAVFoundationObservationContext {
288     MediaPlayerAVFoundationObservationContextPlayerItem,
289     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
290     MediaPlayerAVFoundationObservationContextPlayer,
291     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
292 };
293
294 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
295 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
296 #else
297 @interface WebCoreAVFMovieObserver : NSObject
298 #endif
299 {
300     MediaPlayerPrivateAVFoundationObjC* m_callback;
301     int m_delayCallbacks;
302 }
303 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
304 -(void)disconnect;
305 -(void)metadataLoaded;
306 -(void)didEnd:(NSNotification *)notification;
307 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
308 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
309 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
310 - (void)outputSequenceWasFlushed:(id)output;
311 #endif
312 @end
313
314 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
315 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
316     MediaPlayerPrivateAVFoundationObjC* m_callback;
317 }
318 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
319 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
320 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
321 @end
322 #endif
323
324 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
325 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
326     MediaPlayerPrivateAVFoundationObjC *m_callback;
327     dispatch_semaphore_t m_semaphore;
328 }
329 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
330 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
332 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
333 @end
334 #endif
335
336 namespace WebCore {
337
338 static NSArray *assetMetadataKeyNames();
339 static NSArray *itemKVOProperties();
340 static NSArray *assetTrackMetadataKeyNames();
341 static NSArray *playerKVOProperties();
342 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
343
344 #if !LOG_DISABLED
345 static const char *boolString(bool val)
346 {
347     return val ? "true" : "false";
348 }
349 #endif
350
351 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
352 static dispatch_queue_t globalLoaderDelegateQueue()
353 {
354     static dispatch_queue_t globalQueue;
355     static dispatch_once_t onceToken;
356     dispatch_once(&onceToken, ^{
357         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
358     });
359     return globalQueue;
360 }
361 #endif
362
363 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
364 static dispatch_queue_t globalPullDelegateQueue()
365 {
366     static dispatch_queue_t globalQueue;
367     static dispatch_once_t onceToken;
368     dispatch_once(&onceToken, ^{
369         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
370     });
371     return globalQueue;
372 }
373 #endif
374
375 #if USE(CFNETWORK)
376 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
377 public:
378     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
379     {
380         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
381     }
382
383     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
384     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
385
386 private:
387     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
388         : m_challenge(challenge)
389     {
390         ASSERT(m_challenge);
391     }
392
393     void refAuthenticationClient() override { ref(); }
394     void derefAuthenticationClient() override { deref(); }
395
396     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
397     {
398         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
399     }
400
401     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
402     {
403         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
404     }
405
406     void receivedCancellation(const AuthenticationChallenge&) override
407     {
408         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
409     }
410
411     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
412     {
413         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
414             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
415     }
416
417     void receivedChallengeRejection(const AuthenticationChallenge&) override
418     {
419         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
420             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
421     }
422
423     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
424 };
425 #endif
426
427 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
428 {
429     if (!isAvailable())
430         return;
431
432     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
433             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
434     AVFoundationMIMETypeCache::singleton().loadTypes();
435 }
436
437 static AVAssetCacheType *assetCacheForPath(const String& path)
438 {
439     NSURL *assetCacheURL;
440     
441     if (path.isEmpty())
442         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
443     else
444         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
445
446     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
447 }
448
449 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
450 {
451     HashSet<RefPtr<SecurityOrigin>> origins;
452     for (NSString *key in [assetCacheForPath(path) allKeys]) {
453         URL keyAsURL = URL(URL(), key);
454         if (keyAsURL.isValid())
455             origins.add(SecurityOrigin::create(keyAsURL));
456     }
457     return origins;
458 }
459
460 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
461 {
462     ASSERT(date);
463     using namespace std::chrono;
464
465     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
466 }
467
468 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
469 {
470     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCache()");
471     
472     AVAssetCacheType* assetCache = assetCacheForPath(path);
473     
474     for (NSString *key in [assetCache allKeys]) {
475         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
476             [assetCache removeEntryForKey:key];
477     }
478
479     NSFileManager *fileManager = [NSFileManager defaultManager];
480     NSURL *baseURL = [assetCache URL];
481
482     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
483         [fileManager removeItemAtURL:baseURL error:nil];
484         return;
485     }
486     
487     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
488     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
489         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
490         errorHandler:nil];
491     
492     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
493     for (NSURL *fileURL : enumerator) {
494         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
495     
496         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
497             continue;
498         
499         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
500             continue;
501         
502         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
503             continue;
504         
505         [urlsToDelete addObject:fileURL];
506     }
507     
508     for (NSURL *fileURL in urlsToDelete.get())
509         [fileManager removeItemAtURL:fileURL error:nil];
510 }
511
512 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
513 {
514     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins()");
515     AVAssetCacheType* assetCache = assetCacheForPath(path);
516     for (NSString *key in [assetCache allKeys]) {
517         URL keyAsURL = URL(URL(), key);
518         if (keyAsURL.isValid()) {
519             if (origins.contains(SecurityOrigin::create(keyAsURL)))
520                 [assetCache removeEntryForKey:key];
521         }
522     }
523 }
524
525 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
526     : MediaPlayerPrivateAVFoundation(player)
527     , m_weakPtrFactory(this)
528 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
529     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
530     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
531 #endif
532     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
533     , m_videoFrameHasDrawn(false)
534     , m_haveCheckedPlayability(false)
535 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
536     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
537     , m_videoOutputSemaphore(nullptr)
538 #endif
539 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
540     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
541 #endif
542     , m_currentTextTrack(0)
543     , m_cachedRate(0)
544     , m_cachedTotalBytes(0)
545     , m_pendingStatusChanges(0)
546     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
547     , m_cachedLikelyToKeepUp(false)
548     , m_cachedBufferEmpty(false)
549     , m_cachedBufferFull(false)
550     , m_cachedHasEnabledAudio(false)
551     , m_shouldBufferData(true)
552     , m_cachedIsReadyForDisplay(false)
553     , m_haveBeenAskedToCreateLayer(false)
554 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
555     , m_allowsWirelessVideoPlayback(true)
556 #endif
557 {
558 }
559
560 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
561 {
562 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
563     [m_loaderDelegate.get() setCallback:0];
564     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
565
566     for (auto& pair : m_resourceLoaderMap)
567         pair.value->invalidate();
568 #endif
569 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
570     [m_videoOutputDelegate setCallback:0];
571     [m_videoOutput setDelegate:nil queue:0];
572     if (m_videoOutputSemaphore)
573         dispatch_release(m_videoOutputSemaphore);
574 #endif
575
576     if (m_videoLayer)
577         destroyVideoLayer();
578
579     cancelLoad();
580 }
581
582 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
583 {
584     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
585     tearDownVideoRendering();
586
587     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
588     [m_objcObserver.get() disconnect];
589
590     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
591     setIgnoreLoadStateChanges(true);
592     if (m_avAsset) {
593         [m_avAsset.get() cancelLoading];
594         m_avAsset = nil;
595     }
596
597     clearTextTracks();
598
599 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
600     if (m_legibleOutput) {
601         if (m_avPlayerItem)
602             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
603         m_legibleOutput = nil;
604     }
605 #endif
606
607     if (m_avPlayerItem) {
608         for (NSString *keyName in itemKVOProperties())
609             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
610         
611         m_avPlayerItem = nil;
612     }
613     if (m_avPlayer) {
614         if (m_timeObserver)
615             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
616         m_timeObserver = nil;
617
618         for (NSString *keyName in playerKVOProperties())
619             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
620         m_avPlayer = nil;
621     }
622
623     // Reset cached properties
624     m_pendingStatusChanges = 0;
625     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
626     m_cachedSeekableRanges = nullptr;
627     m_cachedLoadedRanges = nullptr;
628     m_cachedHasEnabledAudio = false;
629     m_cachedPresentationSize = FloatSize();
630     m_cachedDuration = MediaTime::zeroTime();
631
632     for (AVPlayerItemTrack *track in m_cachedTracks.get())
633         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
634     m_cachedTracks = nullptr;
635
636 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
637     if (m_provider) {
638         m_provider->setPlayerItem(nullptr);
639         m_provider->setAudioTrack(nullptr);
640     }
641 #endif
642
643     setIgnoreLoadStateChanges(false);
644 }
645
646 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
647 {
648     return m_haveBeenAskedToCreateLayer;
649 }
650
651 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
652 {
653 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
654     if (m_videoOutput)
655         return true;
656 #endif
657     return m_imageGenerator;
658 }
659
660 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
661 {
662 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
663     createVideoOutput();
664 #else
665     createImageGenerator();
666 #endif
667 }
668
669 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
670 {
671     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
672
673     if (!m_avAsset || m_imageGenerator)
674         return;
675
676     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
677
678     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
679     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
680     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
681     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
682
683     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
684 }
685
686 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
687 {
688 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
689     destroyVideoOutput();
690     destroyOpenGLVideoOutput();
691 #endif
692     destroyImageGenerator();
693 }
694
695 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
696 {
697     if (!m_imageGenerator)
698         return;
699
700     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
701
702     m_imageGenerator = 0;
703 }
704
705 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
706 {
707     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
708         return;
709
710     callOnMainThread([this, weakThis = createWeakPtr()] {
711         if (!weakThis)
712             return;
713
714         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
715             return;
716         m_haveBeenAskedToCreateLayer = true;
717
718         if (!m_videoLayer)
719             createAVPlayerLayer();
720
721 #if USE(VIDEOTOOLBOX)
722         if (!m_videoOutput)
723             createVideoOutput();
724 #endif
725
726         player()->client().mediaPlayerRenderingModeChanged(player());
727     });
728 }
729
730 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
731 {
732     if (!m_avPlayer)
733         return;
734
735     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
736     [m_videoLayer setPlayer:m_avPlayer.get()];
737     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
738
739 #ifndef NDEBUG
740     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
741 #endif
742     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
743     updateVideoLayerGravity();
744     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
745     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
746     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
747
748 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
749     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
750
751 #if PLATFORM(IOS)
752     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
753         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
754 #endif
755 #else
756     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
757 #endif
758 }
759
760 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
761 {
762     if (!m_videoLayer)
763         return;
764
765     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
766
767     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
768     [m_videoLayer setPlayer:nil];
769
770 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
771     m_videoFullscreenLayerManager->didDestroyVideoLayer();
772 #endif
773
774     m_videoLayer = nil;
775 }
776
777 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
778 {
779     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
780     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
781
782     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
783     if (!date)
784         return MediaTime::invalidTime();
785
786     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
787
788     // Rounding due to second offset error when subtracting.
789     return MediaTime::createWithDouble(round(date - currentTime));
790 }
791
792 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
793 {
794     if (currentRenderingMode() == MediaRenderingToLayer)
795         return m_cachedIsReadyForDisplay;
796
797     return m_videoFrameHasDrawn;
798 }
799
800 #if ENABLE(AVF_CAPTIONS)
801 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
802 {
803     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
804     if (manualSelectionMode)
805         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
806
807     // FIXME: Match these to correct types:
808     if (kind == PlatformTextTrack::Caption)
809         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
810
811     if (kind == PlatformTextTrack::Subtitle)
812         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
813
814     if (kind == PlatformTextTrack::Description)
815         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
816
817     if (kind == PlatformTextTrack::Forced)
818         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
819
820     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
821 }
822     
823 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
824 {
825     trackModeChanged();
826 }
827     
828 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
829 {
830     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
831     
832     for (auto& textTrack : m_textTracks) {
833         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
834             continue;
835         
836         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
837         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
838         
839         for (auto& track : outOfBandTrackSources) {
840             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
841             
842             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
843                 continue;
844             
845             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
846             if (track->mode() == PlatformTextTrack::Hidden)
847                 mode = InbandTextTrackPrivate::Hidden;
848             else if (track->mode() == PlatformTextTrack::Disabled)
849                 mode = InbandTextTrackPrivate::Disabled;
850             else if (track->mode() == PlatformTextTrack::Showing)
851                 mode = InbandTextTrackPrivate::Showing;
852             
853             textTrack->setMode(mode);
854             break;
855         }
856     }
857 }
858 #endif
859
860
861 static NSURL *canonicalURL(const String& url)
862 {
863     NSURL *cocoaURL = URL(ParsedURLString, url);
864     if (url.isEmpty())
865         return cocoaURL;
866
867     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
868     if (!request)
869         return cocoaURL;
870
871     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
872     if (!canonicalRequest)
873         return cocoaURL;
874
875     return [canonicalRequest URL];
876 }
877
878 #if PLATFORM(IOS)
879 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
880 {
881     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
882     [properties setDictionary:@{
883         NSHTTPCookieName: cookie.name,
884         NSHTTPCookieValue: cookie.value,
885         NSHTTPCookieDomain: cookie.domain,
886         NSHTTPCookiePath: cookie.path,
887         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
888     }];
889     if (cookie.secure)
890         [properties setObject:@YES forKey:NSHTTPCookieSecure];
891     if (cookie.session)
892         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
893
894     return [NSHTTPCookie cookieWithProperties:properties.get()];
895 }
896 #endif
897
898 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
899 {
900     if (m_avAsset)
901         return;
902
903     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
904
905     setDelayCallbacks(true);
906
907     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
908
909     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
910
911     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
912
913     String referrer = player()->referrer();
914     if (!referrer.isEmpty())
915         [headerFields.get() setObject:referrer forKey:@"Referer"];
916
917     String userAgent = player()->userAgent();
918     if (!userAgent.isEmpty())
919         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
920
921     if ([headerFields.get() count])
922         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
923
924     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
925         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
926
927 #if PLATFORM(IOS)
928     // FIXME: rdar://problem/20354688
929     String identifier = player()->sourceApplicationIdentifier();
930     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
931         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
932 #endif
933
934 #if ENABLE(AVF_CAPTIONS)
935     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
936     if (!outOfBandTrackSources.isEmpty()) {
937         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
938         for (auto& trackSource : outOfBandTrackSources) {
939             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
940             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
941             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
942             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
943             [outOfBandTracks.get() addObject:@{
944                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
945                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
946                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
947                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
948                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
949                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
950             }];
951         }
952
953         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
954     }
955 #endif
956
957 #if PLATFORM(IOS)
958     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
959     if (!networkInterfaceName.isEmpty())
960         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
961 #endif
962
963 #if PLATFORM(IOS)
964     Vector<Cookie> cookies;
965     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
966         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
967         for (auto& cookie : cookies)
968             [nsCookies addObject:toNSHTTPCookie(cookie)];
969
970         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
971     }
972 #endif
973
974     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
975     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
976     
977     if (usePersistentCache)
978         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
979
980     NSURL *cocoaURL = canonicalURL(url);
981     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
982
983 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
984     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
985     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
986
987 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
988     if (Settings::isAVFoundationNSURLSessionEnabled()
989         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
990         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
991         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
992         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
993         if (mediaResourceLoader)
994             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
995     }
996 #endif
997
998 #endif
999
1000     m_haveCheckedPlayability = false;
1001
1002     setDelayCallbacks(false);
1003 }
1004
1005 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
1006 {
1007     if (!m_avPlayer)
1008         return;
1009
1010     if (pthread_main_np()) {
1011         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
1012         return;
1013     }
1014
1015     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
1016     RetainPtr<AVPlayerItemType> strongItem = item;
1017     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
1018         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1019     });
1020 }
1021
1022 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1023 {
1024     if (m_avPlayer)
1025         return;
1026
1027     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
1028
1029     setDelayCallbacks(true);
1030
1031     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1032     for (NSString *keyName in playerKVOProperties())
1033         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1034
1035 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1036     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1037 #endif
1038
1039 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1040     updateDisableExternalPlayback();
1041     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1042 #endif
1043
1044 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1045     if (m_shouldPlayToPlaybackTarget) {
1046         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1047         m_shouldPlayToPlaybackTarget = false;
1048         setShouldPlayToPlaybackTarget(true);
1049     }
1050 #endif
1051
1052     if (m_muted) {
1053         // Clear m_muted so setMuted doesn't return without doing anything.
1054         m_muted = false;
1055         [m_avPlayer.get() setMuted:m_muted];
1056     }
1057
1058     if (player()->client().mediaPlayerIsVideo())
1059         createAVPlayerLayer();
1060
1061     if (m_avPlayerItem)
1062         setAVPlayerItem(m_avPlayerItem.get());
1063
1064     setDelayCallbacks(false);
1065 }
1066
1067 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1068 {
1069     if (m_avPlayerItem)
1070         return;
1071
1072     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1073
1074     setDelayCallbacks(true);
1075
1076     // Create the player item so we can load media data. 
1077     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1078
1079     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1080
1081     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1082     for (NSString *keyName in itemKVOProperties())
1083         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1084
1085     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1086
1087     if (m_avPlayer)
1088         setAVPlayerItem(m_avPlayerItem.get());
1089
1090 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1091     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1092
1093     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1094     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1095     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1096
1097     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1098     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1099     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1100     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1101 #endif
1102
1103 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1104     if (m_provider) {
1105         m_provider->setPlayerItem(m_avPlayerItem.get());
1106         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1107     }
1108 #endif
1109
1110     setDelayCallbacks(false);
1111 }
1112
1113 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1114 {
1115     if (m_haveCheckedPlayability)
1116         return;
1117     m_haveCheckedPlayability = true;
1118
1119     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1120     auto weakThis = createWeakPtr();
1121
1122     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1123         callOnMainThread([weakThis] {
1124             if (weakThis)
1125                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1126         });
1127     }];
1128 }
1129
1130 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1131 {
1132     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1133
1134     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1135     dispatch_group_enter(metadataLoadingGroup.get());
1136     auto weakThis = createWeakPtr();
1137     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1138
1139         callOnMainThread([weakThis, metadataLoadingGroup] {
1140             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1141                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1142                     dispatch_group_enter(metadataLoadingGroup.get());
1143                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1144                         dispatch_group_leave(metadataLoadingGroup.get());
1145                     }];
1146                 }
1147             }
1148             dispatch_group_leave(metadataLoadingGroup.get());
1149         });
1150     }];
1151
1152     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1153         callOnMainThread([weakThis] {
1154             if (weakThis)
1155                 [weakThis->m_objcObserver.get() metadataLoaded];
1156         });
1157     });
1158 }
1159
1160 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1161 {
1162     if (!m_avPlayerItem)
1163         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1164
1165     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1166         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1167     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1168         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1169     if (m_cachedLikelyToKeepUp)
1170         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1171     if (m_cachedBufferFull)
1172         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1173     if (m_cachedBufferEmpty)
1174         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1175
1176     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1177 }
1178
1179 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1180 {
1181     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1182     PlatformMedia pm;
1183     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1184     pm.media.avfMediaPlayer = m_avPlayer.get();
1185     return pm;
1186 }
1187
1188 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1189 {
1190 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1191     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1192 #else
1193     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1194 #endif
1195 }
1196
1197 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1198 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, std::function<void()> completionHandler)
1199 {
1200     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1201         completionHandler();
1202         return;
1203     }
1204
1205     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, completionHandler);
1206
1207     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1208         syncTextTrackBounds();
1209         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1210     }
1211
1212     updateDisableExternalPlayback();
1213 }
1214
1215 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1216 {
1217     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1218     syncTextTrackBounds();
1219 }
1220
1221 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1222 {
1223     m_videoFullscreenGravity = gravity;
1224
1225     if (!m_videoLayer)
1226         return;
1227
1228     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1229     if (gravity == MediaPlayer::VideoGravityResize)
1230         videoGravity = AVLayerVideoGravityResize;
1231     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1232         videoGravity = AVLayerVideoGravityResizeAspect;
1233     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1234         videoGravity = AVLayerVideoGravityResizeAspectFill;
1235     else
1236         ASSERT_NOT_REACHED();
1237     
1238     if ([m_videoLayer videoGravity] == videoGravity)
1239         return;
1240
1241     [m_videoLayer setVideoGravity:videoGravity];
1242     syncTextTrackBounds();
1243 }
1244
1245 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1246 {
1247 #if PLATFORM(IOS)
1248     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1249         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1250     updateDisableExternalPlayback();
1251 #else
1252     UNUSED_PARAM(mode);
1253 #endif
1254 }
1255
1256 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1257
1258 #if PLATFORM(IOS)
1259 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1260 {
1261     if (m_currentMetaData)
1262         return m_currentMetaData.get();
1263     return nil;
1264 }
1265
1266 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1267 {
1268     if (!m_avPlayerItem)
1269         return emptyString();
1270     
1271     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1272     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1273
1274     return logString.get();
1275 }
1276
1277 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1278 {
1279     if (!m_avPlayerItem)
1280         return emptyString();
1281
1282     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1283     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1284
1285     return logString.get();
1286 }
1287 #endif
1288
1289 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1290 {
1291     [CATransaction begin];
1292     [CATransaction setDisableActions:YES];    
1293     if (m_videoLayer)
1294         [m_videoLayer.get() setHidden:!isVisible];
1295     [CATransaction commit];
1296 }
1297     
1298 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1299 {
1300     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1301     if (!metaDataAvailable())
1302         return;
1303
1304     setDelayCallbacks(true);
1305     m_cachedRate = requestedRate();
1306     [m_avPlayer.get() setRate:requestedRate()];
1307     setDelayCallbacks(false);
1308 }
1309
1310 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1311 {
1312     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1313     if (!metaDataAvailable())
1314         return;
1315
1316     setDelayCallbacks(true);
1317     m_cachedRate = 0;
1318     [m_avPlayer.get() setRate:0];
1319     setDelayCallbacks(false);
1320 }
1321
1322 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1323 {
1324     // Do not ask the asset for duration before it has been loaded or it will fetch the
1325     // answer synchronously.
1326     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1327         return MediaTime::invalidTime();
1328     
1329     CMTime cmDuration;
1330     
1331     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1332     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1333         cmDuration = [m_avPlayerItem.get() duration];
1334     else
1335         cmDuration = [m_avAsset.get() duration];
1336
1337     if (CMTIME_IS_NUMERIC(cmDuration))
1338         return toMediaTime(cmDuration);
1339
1340     if (CMTIME_IS_INDEFINITE(cmDuration))
1341         return MediaTime::positiveInfiniteTime();
1342
1343     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1344     return MediaTime::invalidTime();
1345 }
1346
1347 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1348 {
1349     if (!metaDataAvailable() || !m_avPlayerItem)
1350         return MediaTime::zeroTime();
1351
1352     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1353     if (CMTIME_IS_NUMERIC(itemTime))
1354         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1355
1356     return MediaTime::zeroTime();
1357 }
1358
1359 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1360 {
1361     // setCurrentTime generates several event callbacks, update afterwards.
1362     setDelayCallbacks(true);
1363
1364     if (m_metadataTrack)
1365         m_metadataTrack->flushPartialCues();
1366
1367     CMTime cmTime = toCMTime(time);
1368     CMTime cmBefore = toCMTime(negativeTolerance);
1369     CMTime cmAfter = toCMTime(positiveTolerance);
1370
1371     auto weakThis = createWeakPtr();
1372
1373     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1374
1375     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1376         callOnMainThread([weakThis, finished] {
1377             auto _this = weakThis.get();
1378             if (!_this)
1379                 return;
1380
1381             _this->seekCompleted(finished);
1382         });
1383     }];
1384
1385     setDelayCallbacks(false);
1386 }
1387
1388 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1389 {
1390 #if PLATFORM(IOS)
1391     UNUSED_PARAM(volume);
1392     return;
1393 #else
1394     if (!m_avPlayer)
1395         return;
1396
1397     [m_avPlayer.get() setVolume:volume];
1398 #endif
1399 }
1400
1401 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1402 {
1403     if (m_muted == muted)
1404         return;
1405
1406     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setMuted(%p) - set to %s", this, boolString(muted));
1407
1408     m_muted = muted;
1409
1410     if (!m_avPlayer)
1411         return;
1412
1413     [m_avPlayer.get() setMuted:m_muted];
1414 }
1415
1416 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1417 {
1418     UNUSED_PARAM(closedCaptionsVisible);
1419
1420     if (!metaDataAvailable())
1421         return;
1422
1423     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1424 }
1425
1426 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1427 {
1428     setDelayCallbacks(true);
1429     m_cachedRate = rate;
1430     [m_avPlayer.get() setRate:rate];
1431     setDelayCallbacks(false);
1432 }
1433
1434 double MediaPlayerPrivateAVFoundationObjC::rate() const
1435 {
1436     if (!metaDataAvailable())
1437         return 0;
1438
1439     return m_cachedRate;
1440 }
1441
1442 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1443 {
1444     if (m_avPlayerItem)
1445         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1446 }
1447
1448 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1449 {
1450     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1451
1452     if (!m_avPlayerItem)
1453         return timeRanges;
1454
1455     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1456         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1457         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1458             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1459     }
1460     return timeRanges;
1461 }
1462
1463 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1464 {
1465     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1466         return MediaTime::zeroTime();
1467
1468     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1469     bool hasValidRange = false;
1470     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1471         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1472         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1473             continue;
1474
1475         hasValidRange = true;
1476         MediaTime startOfRange = toMediaTime(timeRange.start);
1477         if (minTimeSeekable > startOfRange)
1478             minTimeSeekable = startOfRange;
1479     }
1480     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1481 }
1482
1483 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1484 {
1485     if (!m_cachedSeekableRanges)
1486         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1487
1488     MediaTime maxTimeSeekable;
1489     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1490         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1491         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1492             continue;
1493         
1494         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1495         if (maxTimeSeekable < endOfRange)
1496             maxTimeSeekable = endOfRange;
1497     }
1498     return maxTimeSeekable;
1499 }
1500
1501 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1502 {
1503     if (!m_cachedLoadedRanges)
1504         return MediaTime::zeroTime();
1505
1506     MediaTime maxTimeLoaded;
1507     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1508         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1509         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1510             continue;
1511         
1512         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1513         if (maxTimeLoaded < endOfRange)
1514             maxTimeLoaded = endOfRange;
1515     }
1516
1517     return maxTimeLoaded;   
1518 }
1519
1520 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1521 {
1522     if (!metaDataAvailable())
1523         return 0;
1524
1525     if (m_cachedTotalBytes)
1526         return m_cachedTotalBytes;
1527
1528     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1529         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1530
1531     return m_cachedTotalBytes;
1532 }
1533
1534 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1535 {
1536     m_avAsset = asset;
1537 }
1538
1539 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1540 {
1541     if (!m_avAsset)
1542         return MediaPlayerAVAssetStatusDoesNotExist;
1543
1544     for (NSString *keyName in assetMetadataKeyNames()) {
1545         NSError *error = nil;
1546         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1547 #if !LOG_DISABLED
1548         if (error)
1549             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1550 #endif
1551
1552         if (keyStatus < AVKeyValueStatusLoaded)
1553             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1554         
1555         if (keyStatus == AVKeyValueStatusFailed)
1556             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1557
1558         if (keyStatus == AVKeyValueStatusCancelled)
1559             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1560     }
1561
1562     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1563         return MediaPlayerAVAssetStatusPlayable;
1564
1565     return MediaPlayerAVAssetStatusLoaded;
1566 }
1567
1568 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1569 {
1570     if (!m_avAsset)
1571         return 0;
1572
1573     NSError *error = nil;
1574     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1575     return [error code];
1576 }
1577
1578 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1579 {
1580     if (!metaDataAvailable() || context.paintingDisabled())
1581         return;
1582
1583     setDelayCallbacks(true);
1584     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1585
1586 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1587     if (videoOutputHasAvailableFrame())
1588         paintWithVideoOutput(context, rect);
1589     else
1590 #endif
1591         paintWithImageGenerator(context, rect);
1592
1593     END_BLOCK_OBJC_EXCEPTIONS;
1594     setDelayCallbacks(false);
1595
1596     m_videoFrameHasDrawn = true;
1597 }
1598
1599 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1600 {
1601     if (!metaDataAvailable() || context.paintingDisabled())
1602         return;
1603
1604     // We can ignore the request if we are already rendering to a layer.
1605     if (currentRenderingMode() == MediaRenderingToLayer)
1606         return;
1607
1608     // paint() is best effort, so only paint if we already have an image generator or video output available.
1609     if (!hasContextRenderer())
1610         return;
1611
1612     paintCurrentFrameInContext(context, rect);
1613 }
1614
1615 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1616 {
1617     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1618
1619     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1620     if (image) {
1621         GraphicsContextStateSaver stateSaver(context);
1622         context.translate(rect.x(), rect.y() + rect.height());
1623         context.scale(FloatSize(1.0f, -1.0f));
1624         context.setImageInterpolationQuality(InterpolationLow);
1625         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1626         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1627     }
1628 }
1629
1630 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1631 {
1632     if (!m_imageGenerator)
1633         createImageGenerator();
1634     ASSERT(m_imageGenerator);
1635
1636 #if !LOG_DISABLED
1637     double start = monotonicallyIncreasingTime();
1638 #endif
1639
1640     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1641     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1642     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1643
1644 #if !LOG_DISABLED
1645     double duration = monotonicallyIncreasingTime() - start;
1646     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1647 #endif
1648
1649     return image;
1650 }
1651
1652 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1653 {
1654     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::getSupportedTypes");
1655     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1656
1657
1658 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1659 static bool keySystemIsSupported(const String& keySystem)
1660 {
1661     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1662         return true;
1663     return false;
1664 }
1665 #endif
1666
1667 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1668 {
1669 #if ENABLE(MEDIA_SOURCE)
1670     if (parameters.isMediaSource)
1671         return MediaPlayer::IsNotSupported;
1672 #endif
1673 #if ENABLE(MEDIA_STREAM)
1674     if (parameters.isMediaStream)
1675         return MediaPlayer::IsNotSupported;
1676 #endif
1677     if (isUnsupportedMIMEType(parameters.type))
1678         return MediaPlayer::IsNotSupported;
1679
1680     if (!staticMIMETypeList().contains(parameters.type) && !AVFoundationMIMETypeCache::singleton().types().contains(parameters.type))
1681         return MediaPlayer::IsNotSupported;
1682
1683     // The spec says:
1684     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1685     if (parameters.codecs.isEmpty())
1686         return MediaPlayer::MayBeSupported;
1687
1688     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1689     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1690 }
1691
1692 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1693 {
1694 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1695     if (!keySystem.isEmpty()) {
1696         // "Clear Key" is only supported with HLS:
1697         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1698             return MediaPlayer::IsNotSupported;
1699
1700         if (!keySystemIsSupported(keySystem))
1701             return false;
1702
1703         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1704             return false;
1705
1706         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1707             return false;
1708
1709         return true;
1710     }
1711 #else
1712     UNUSED_PARAM(keySystem);
1713     UNUSED_PARAM(mimeType);
1714 #endif
1715     return false;
1716 }
1717
1718 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1719 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1720 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1721 {
1722     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1723         [infoRequest setContentLength:keyData->byteLength()];
1724         [infoRequest setByteRangeAccessSupported:YES];
1725     }
1726
1727     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1728         long long start = [dataRequest currentOffset];
1729         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1730
1731         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1732             [request finishLoadingWithError:nil];
1733             return;
1734         }
1735
1736         ASSERT(start <= std::numeric_limits<int>::max());
1737         ASSERT(end <= std::numeric_limits<int>::max());
1738         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1739         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1740         [dataRequest respondWithData:nsData.get()];
1741     }
1742
1743     [request finishLoading];
1744 }
1745 #endif
1746
1747 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1748 {
1749     String scheme = [[[avRequest request] URL] scheme];
1750     String keyURI = [[[avRequest request] URL] absoluteString];
1751
1752 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1753     if (scheme == "skd") {
1754         // Create an initData with the following layout:
1755         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1756         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1757         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1758         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1759         initDataView->set<uint32_t>(0, keyURISize, true);
1760
1761         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1762         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1763
1764         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1765         if (!player()->keyNeeded(initData.get()))
1766             return false;
1767
1768         m_keyURIToRequestMap.set(keyURI, avRequest);
1769         return true;
1770     } else if (scheme == "clearkey") {
1771         String keyID = [[[avRequest request] URL] resourceSpecifier];
1772         StringView keyIDView(keyID);
1773         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1774
1775         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1776         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1777
1778         auto keyData = player()->cachedKeyForKeyId(keyID);
1779         if (keyData) {
1780             fulfillRequestWithKeyData(avRequest, keyData.get());
1781             return false;
1782         }
1783
1784         if (!player()->keyNeeded(initData.get()))
1785             return false;
1786
1787         m_keyURIToRequestMap.set(keyID, avRequest);
1788         return true;
1789     }
1790 #endif
1791
1792     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1793     m_resourceLoaderMap.add(avRequest, resourceLoader);
1794     resourceLoader->startLoading();
1795     return true;
1796 }
1797
1798 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1799 {
1800 #if USE(CFNETWORK)
1801     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1802     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1803     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1804 #else
1805     AuthenticationChallenge challenge(nsChallenge);
1806 #endif
1807
1808     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1809 }
1810
1811 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1812 {
1813     String scheme = [[[avRequest request] URL] scheme];
1814
1815     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1816
1817     if (resourceLoader)
1818         resourceLoader->stopLoading();
1819 }
1820
1821 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1822 {
1823     m_resourceLoaderMap.remove(avRequest);
1824 }
1825 #endif
1826
1827 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1828 {
1829     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1830 }
1831
1832 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1833 {
1834     if (!metaDataAvailable())
1835         return timeValue;
1836
1837     // FIXME - impossible to implement until rdar://8721510 is fixed.
1838     return timeValue;
1839 }
1840
1841 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1842 {
1843 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1844     return 0;
1845 #else
1846     return 5;
1847 #endif
1848 }
1849
1850 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1851 {
1852     if (!m_videoLayer)
1853         return;
1854
1855 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1856     // Do not attempt to change the video gravity while in full screen mode.
1857     // See setVideoFullscreenGravity().
1858     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1859         return;
1860 #endif
1861
1862     [CATransaction begin];
1863     [CATransaction setDisableActions:YES];    
1864     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1865     [m_videoLayer.get() setVideoGravity:gravity];
1866     [CATransaction commit];
1867 }
1868
1869 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1870 {
1871     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1872         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1873     }];
1874     if (index == NSNotFound)
1875         return nil;
1876     return [tracks objectAtIndex:index];
1877 }
1878
1879 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1880 {
1881     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1882     m_languageOfPrimaryAudioTrack = String();
1883
1884     if (!m_avAsset)
1885         return;
1886
1887     setDelayCharacteristicsChangedNotification(true);
1888
1889     bool haveCCTrack = false;
1890     bool hasCaptions = false;
1891
1892     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1893     // asked about those fairly fequently.
1894     if (!m_avPlayerItem) {
1895         // We don't have a player item yet, so check with the asset because some assets support inspection
1896         // prior to becoming ready to play.
1897         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1898         setHasVideo(firstEnabledVideoTrack);
1899         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1900 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1901         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1902 #endif
1903
1904         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1905     } else {
1906         bool hasVideo = false;
1907         bool hasAudio = false;
1908         bool hasMetaData = false;
1909         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1910             if ([track isEnabled]) {
1911                 AVAssetTrack *assetTrack = [track assetTrack];
1912                 NSString *mediaType = [assetTrack mediaType];
1913                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1914                     hasVideo = true;
1915                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1916                     hasAudio = true;
1917                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1918 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1919                     hasCaptions = true;
1920 #endif
1921                     haveCCTrack = true;
1922                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1923                     hasMetaData = true;
1924                 }
1925             }
1926         }
1927
1928 #if ENABLE(VIDEO_TRACK)
1929         updateAudioTracks();
1930         updateVideoTracks();
1931
1932 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1933         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1934         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1935 #endif
1936 #endif
1937
1938         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1939         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1940         // when it is not.
1941         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1942
1943         setHasAudio(hasAudio);
1944 #if ENABLE(DATACUE_VALUE)
1945         if (hasMetaData)
1946             processMetadataTrack();
1947 #endif
1948     }
1949
1950 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1951     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1952     if (legibleGroup && m_cachedTracks) {
1953         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1954         if (hasCaptions)
1955             processMediaSelectionOptions();
1956     }
1957 #endif
1958
1959 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1960     if (!hasCaptions && haveCCTrack)
1961         processLegacyClosedCaptionsTracks();
1962 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1963     if (haveCCTrack)
1964         processLegacyClosedCaptionsTracks();
1965 #endif
1966
1967     setHasClosedCaptions(hasCaptions);
1968
1969     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1970         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1971
1972     sizeChanged();
1973
1974     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1975         characteristicsChanged();
1976
1977 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1978     if (m_provider)
1979         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1980 #endif
1981
1982     setDelayCharacteristicsChangedNotification(false);
1983 }
1984
1985 #if ENABLE(VIDEO_TRACK)
1986 template <typename RefT, typename PassRefT>
1987 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1988 {
1989     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1990         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1991     }]]]);
1992     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1993
1994     for (auto& oldItem : oldItems) {
1995         if (oldItem->playerItemTrack())
1996             [oldTracks addObject:oldItem->playerItemTrack()];
1997     }
1998
1999     // Find the added & removed AVPlayerItemTracks:
2000     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2001     [removedTracks minusSet:newTracks.get()];
2002
2003     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2004     [addedTracks minusSet:oldTracks.get()];
2005
2006     typedef Vector<RefT> ItemVector;
2007     ItemVector replacementItems;
2008     ItemVector addedItems;
2009     ItemVector removedItems;
2010     for (auto& oldItem : oldItems) {
2011         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2012             removedItems.append(oldItem);
2013         else
2014             replacementItems.append(oldItem);
2015     }
2016
2017     for (AVPlayerItemTrack* track in addedTracks.get())
2018         addedItems.append(itemFactory(track));
2019
2020     replacementItems.appendVector(addedItems);
2021     oldItems.swap(replacementItems);
2022
2023     for (auto& removedItem : removedItems)
2024         (player->*removedFunction)(removedItem);
2025
2026     for (auto& addedItem : addedItems)
2027         (player->*addedFunction)(addedItem);
2028 }
2029
2030 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2031 template <typename RefT, typename PassRefT>
2032 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2033 {
2034     group->updateOptions(characteristics);
2035
2036     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2037     for (auto& option : group->options()) {
2038         if (!option)
2039             continue;
2040         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2041         if (!avOption)
2042             continue;
2043         newSelectionOptions.add(option);
2044     }
2045
2046     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2047     for (auto& oldItem : oldItems) {
2048         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2049             oldSelectionOptions.add(option);
2050     }
2051
2052     // Find the added & removed AVMediaSelectionOptions:
2053     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2054     for (auto& oldOption : oldSelectionOptions) {
2055         if (!newSelectionOptions.contains(oldOption))
2056             removedSelectionOptions.add(oldOption);
2057     }
2058
2059     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2060     for (auto& newOption : newSelectionOptions) {
2061         if (!oldSelectionOptions.contains(newOption))
2062             addedSelectionOptions.add(newOption);
2063     }
2064
2065     typedef Vector<RefT> ItemVector;
2066     ItemVector replacementItems;
2067     ItemVector addedItems;
2068     ItemVector removedItems;
2069     for (auto& oldItem : oldItems) {
2070         if (!oldItem->mediaSelectionOption())
2071             removedItems.append(oldItem);
2072         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2073             removedItems.append(oldItem);
2074         else
2075             replacementItems.append(oldItem);
2076     }
2077
2078     for (auto& option : addedSelectionOptions)
2079         addedItems.append(itemFactory(*option.get()));
2080
2081     replacementItems.appendVector(addedItems);
2082     oldItems.swap(replacementItems);
2083     
2084     for (auto& removedItem : removedItems)
2085         (player->*removedFunction)(removedItem);
2086     
2087     for (auto& addedItem : addedItems)
2088         (player->*addedFunction)(addedItem);
2089 }
2090 #endif
2091
2092 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2093 {
2094 #if !LOG_DISABLED
2095     size_t count = m_audioTracks.size();
2096 #endif
2097
2098 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2099     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2100     if (!m_audibleGroup) {
2101         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2102             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2103     }
2104
2105     if (m_audibleGroup)
2106         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2107     else
2108 #endif
2109         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2110
2111     for (auto& track : m_audioTracks)
2112         track->resetPropertiesFromTrack();
2113
2114 #if !LOG_DISABLED
2115     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2116 #endif
2117 }
2118
2119 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2120 {
2121 #if !LOG_DISABLED
2122     size_t count = m_videoTracks.size();
2123 #endif
2124
2125     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2126
2127 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2128     if (!m_visualGroup) {
2129         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2130             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2131     }
2132
2133     if (m_visualGroup)
2134         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2135 #endif
2136
2137     for (auto& track : m_audioTracks)
2138         track->resetPropertiesFromTrack();
2139
2140 #if !LOG_DISABLED
2141     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2142 #endif
2143 }
2144
2145 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2146 {
2147 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2148     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2149         return true;
2150 #endif
2151     return false;
2152 }
2153
2154 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2155 {
2156 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2157     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2158         return;
2159
2160     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2161     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2162     [m_textTrackRepresentationLayer setFrame:textFrame];
2163 #endif
2164 }
2165
2166 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2167 {
2168 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2169     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2170     if (representationLayer == m_textTrackRepresentationLayer) {
2171         syncTextTrackBounds();
2172         return;
2173     }
2174
2175     if (m_textTrackRepresentationLayer)
2176         [m_textTrackRepresentationLayer removeFromSuperlayer];
2177
2178     m_textTrackRepresentationLayer = representationLayer;
2179
2180     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2181         syncTextTrackBounds();
2182         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2183     }
2184
2185 #else
2186     UNUSED_PARAM(representation);
2187 #endif
2188 }
2189 #endif // ENABLE(VIDEO_TRACK)
2190
2191 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2192 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2193 {
2194     if (!m_provider) {
2195         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2196         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2197     }
2198
2199     return m_provider.get();
2200 }
2201 #endif
2202
2203 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2204 {
2205     if (!m_avAsset)
2206         return;
2207
2208     setNaturalSize(m_cachedPresentationSize);
2209 }
2210     
2211 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2212 {
2213     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2214         return false;
2215     
2216     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2217     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2218     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2219 }
2220
2221 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2222 {
2223 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2224     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2225     if (!Settings::isAVFoundationNSURLSessionEnabled()
2226         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2227         return false;
2228
2229     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2230     if ([session respondsToSelector:@selector(didPassCORSAccessChecks)])
2231         return session.didPassCORSAccessChecks;
2232 #endif
2233     return false;
2234 }
2235
2236 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2237 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2238 {
2239     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2240
2241     if (!m_avPlayerItem || m_videoOutput)
2242         return;
2243
2244 #if USE(VIDEOTOOLBOX)
2245     NSDictionary* attributes = nil;
2246 #else
2247     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2248                                 nil];
2249 #endif
2250     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2251     ASSERT(m_videoOutput);
2252
2253     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2254
2255     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2256
2257     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2258 }
2259
2260 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2261 {
2262     if (!m_videoOutput)
2263         return;
2264
2265     if (m_avPlayerItem)
2266         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2267     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2268
2269     m_videoOutput = 0;
2270 }
2271
2272 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2273 {
2274     if (!m_videoOutput)
2275         createVideoOutput();
2276     ASSERT(m_videoOutput);
2277
2278     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2279
2280     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2281         return 0;
2282
2283     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2284 }
2285
2286 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2287 {
2288     if (!m_avPlayerItem)
2289         return false;
2290
2291     if (m_lastImage)
2292         return true;
2293
2294     if (!m_videoOutput)
2295         createVideoOutput();
2296
2297     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2298 }
2299
2300 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2301 {
2302     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2303
2304     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2305     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2306     // should be displayed.
2307     if (!pixelBuffer)
2308         return;
2309
2310     if (!m_pixelBufferConformer) {
2311 #if USE(VIDEOTOOLBOX)
2312         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2313 #else
2314         NSDictionary *attributes = nil;
2315 #endif
2316         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2317     }
2318
2319 #if !LOG_DISABLED
2320     double start = monotonicallyIncreasingTime();
2321 #endif
2322
2323     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2324
2325 #if !LOG_DISABLED
2326     double duration = monotonicallyIncreasingTime() - start;
2327     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2328 #endif
2329 }
2330
2331 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2332 {
2333     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2334         waitForVideoOutputMediaDataWillChange();
2335
2336     updateLastImage();
2337
2338     if (!m_lastImage)
2339         return;
2340
2341     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2342     if (!firstEnabledVideoTrack)
2343         return;
2344
2345     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2346
2347     GraphicsContextStateSaver stateSaver(context);
2348     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2349     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2350     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2351
2352     context.concatCTM(videoTransform);
2353     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2354
2355     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2356     // video frame, destroy it now that it is no longer needed.
2357     if (m_imageGenerator)
2358         destroyImageGenerator();
2359
2360 }
2361
2362 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2363 {
2364     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2365
2366     if (!m_avPlayerItem || m_openGLVideoOutput)
2367         return;
2368
2369 #if PLATFORM(IOS)
2370     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2371 #else
2372     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2373 #endif
2374     m_openGLVideoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2375     ASSERT(m_openGLVideoOutput);
2376
2377     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2378
2379     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2380 }
2381
2382 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2383 {
2384     if (!m_openGLVideoOutput)
2385         return;
2386
2387     if (m_avPlayerItem)
2388         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2389     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2390
2391     m_openGLVideoOutput = 0;
2392 }
2393
2394 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2395 {
2396     if (!m_openGLVideoOutput)
2397         return;
2398
2399     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2400     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2401         return;
2402
2403     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2404 }
2405
2406 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2407 {
2408     if (flipY || premultiplyAlpha)
2409         return false;
2410
2411     ASSERT(context);
2412
2413     if (!m_openGLVideoOutput)
2414         createOpenGLVideoOutput();
2415
2416     updateLastOpenGLImage();
2417
2418     if (!m_lastOpenGLImage)
2419         return false;
2420
2421     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2422     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2423
2424     if (!m_textureCache) {
2425         m_textureCache = TextureCacheCV::create(*context);
2426         if (!m_textureCache)
2427             return false;
2428     }
2429
2430     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2431
2432     if (!m_videoTextureCopier)
2433         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2434
2435     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2436 }
2437
2438 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2439 {
2440     updateLastImage();
2441     return m_lastImage;
2442 }
2443
2444 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2445 {
2446     if (!m_videoOutputSemaphore)
2447         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2448
2449     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2450
2451     // Wait for 1 second.
2452     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2453
2454     if (result)
2455         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2456 }
2457
2458 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2459 {
2460     dispatch_semaphore_signal(m_videoOutputSemaphore);
2461 }
2462 #endif
2463
2464 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2465 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2466 {
2467     return m_keyURIToRequestMap.take(keyURI);
2468 }
2469
2470 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2471 {
2472     Vector<String> fulfilledKeyIds;
2473
2474     for (auto& pair : m_keyURIToRequestMap) {
2475         const String& keyId = pair.key;
2476         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2477
2478         auto keyData = player()->cachedKeyForKeyId(keyId);
2479         if (!keyData)
2480             continue;
2481
2482         fulfillRequestWithKeyData(request.get(), keyData.get());
2483         fulfilledKeyIds.append(keyId);
2484     }
2485
2486     for (auto& keyId : fulfilledKeyIds)
2487         m_keyURIToRequestMap.remove(keyId);
2488 }
2489
2490 void MediaPlayerPrivateAVFoundationObjC::removeSession(CDMSession& session)
2491 {
2492     ASSERT_UNUSED(session, &session == m_session);
2493     m_session = nullptr;
2494 }
2495
2496 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2497 {
2498     if (!keySystemIsSupported(keySystem))
2499         return nullptr;
2500     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2501     m_session = session->createWeakPtr();
2502     return WTFMove(session);
2503 }
2504
2505 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2506 {
2507     if (m_session && newValue)
2508         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2509 }
2510 #endif
2511
2512 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2513 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2514 {
2515 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2516     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2517 #endif
2518
2519     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2520     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2521
2522         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2523         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2524             continue;
2525
2526         bool newCCTrack = true;
2527         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2528             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2529                 continue;
2530
2531             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2532             if (track->avPlayerItemTrack() == playerItemTrack) {
2533                 removedTextTracks.remove(i - 1);
2534                 newCCTrack = false;
2535                 break;
2536             }
2537         }
2538
2539         if (!newCCTrack)
2540             continue;
2541         
2542         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2543     }
2544
2545     processNewAndRemovedTextTracks(removedTextTracks);
2546 }
2547 #endif
2548
2549 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2550 {
2551     if (!m_avAsset)
2552         return nil;
2553
2554     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2555         return nil;
2556
2557     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2558 }
2559
2560 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2561 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2562 {
2563     if (!m_avAsset)
2564         return false;
2565
2566     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2567         return false;
2568
2569     return true;
2570 }
2571
2572 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2573 {
2574     if (!hasLoadedMediaSelectionGroups())
2575         return nil;
2576
2577     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2578 }
2579
2580 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2581 {
2582     if (!hasLoadedMediaSelectionGroups())
2583         return nil;
2584
2585     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2586 }
2587
2588 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2589 {
2590     if (!hasLoadedMediaSelectionGroups())
2591         return nil;
2592
2593     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2594 }
2595
2596 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2597 {
2598     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2599     if (!legibleGroup) {
2600         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2601         return;
2602     }
2603
2604     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2605     // but set the selected legible track to nil so text tracks will not be automatically configured.
2606     if (!m_textTracks.size())
2607         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2608
2609     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2610     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2611     for (AVMediaSelectionOptionType *option in legibleOptions) {
2612         bool newTrack = true;
2613         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2614             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2615                 continue;
2616             
2617             RetainPtr<AVMediaSelectionOptionType> currentOption;
2618 #if ENABLE(AVF_CAPTIONS)
2619             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2620                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2621                 currentOption = track->mediaSelectionOption();
2622             } else
2623 #endif
2624             {
2625                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2626                 currentOption = track->mediaSelectionOption();
2627             }
2628             
2629             if ([currentOption.get() isEqual:option]) {
2630                 removedTextTracks.remove(i - 1);
2631                 newTrack = false;
2632                 break;
2633             }
2634         }
2635         if (!newTrack)
2636             continue;
2637
2638 #if ENABLE(AVF_CAPTIONS)
2639         if ([option outOfBandSource]) {
2640             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2641             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2642             continue;
2643         }
2644 #endif
2645
2646         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2647     }
2648
2649     processNewAndRemovedTextTracks(removedTextTracks);
2650 }
2651
2652 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2653 {
2654     if (m_metadataTrack)
2655         return;
2656
2657     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2658     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2659     player()->addTextTrack(m_metadataTrack);
2660 }
2661
2662 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2663 {
2664     ASSERT(time >= MediaTime::zeroTime());
2665
2666     if (!m_currentTextTrack)
2667         return;
2668
2669     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2670 }
2671
2672 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2673 {
2674     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2675
2676     if (!m_currentTextTrack)
2677         return;
2678     
2679     m_currentTextTrack->resetCueValues();
2680 }
2681 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2682
2683 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2684 {
2685     if (m_currentTextTrack == track)
2686         return;
2687
2688     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2689         
2690     m_currentTextTrack = track;
2691
2692     if (track) {
2693         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2694             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2695 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2696 #if ENABLE(AVF_CAPTIONS)
2697         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2698             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2699 #endif
2700         else
2701             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2702 #endif
2703     } else {
2704 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2705         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2706 #endif
2707         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2708     }
2709
2710 }
2711
2712 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2713 {
2714     if (!m_languageOfPrimaryAudioTrack.isNull())
2715         return m_languageOfPrimaryAudioTrack;
2716
2717     if (!m_avPlayerItem.get())
2718         return emptyString();
2719
2720 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2721     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2722     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2723     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2724     if (currentlySelectedAudibleOption) {
2725         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2726         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2727
2728         return m_languageOfPrimaryAudioTrack;
2729     }
2730 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2731
2732     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2733     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2734     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2735     if (!tracks || [tracks count] != 1) {
2736         m_languageOfPrimaryAudioTrack = emptyString();
2737         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2738         return m_languageOfPrimaryAudioTrack;
2739     }
2740
2741     AVAssetTrack *track = [tracks objectAtIndex:0];
2742     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2743
2744 #if !LOG_DISABLED
2745     if (m_languageOfPrimaryAudioTrack == emptyString())
2746         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2747     else
2748         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2749 #endif
2750
2751     return m_languageOfPrimaryAudioTrack;
2752 }
2753
2754 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2755 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2756 {
2757     bool wirelessTarget = false;
2758
2759 #if !PLATFORM(IOS)
2760     if (m_playbackTarget) {
2761         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2762             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2763         else
2764             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2765     }
2766 #else
2767     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2768 #endif
2769
2770     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2771
2772     return wirelessTarget;
2773 }
2774
2775 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2776 {
2777     if (!m_avPlayer)
2778         return MediaPlayer::TargetTypeNone;
2779
2780 #if PLATFORM(IOS)
2781     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2782     case wkExternalPlaybackTypeNone:
2783         return MediaPlayer::TargetTypeNone;
2784     case wkExternalPlaybackTypeAirPlay:
2785         return MediaPlayer::TargetTypeAirPlay;
2786     case wkExternalPlaybackTypeTVOut:
2787         return MediaPlayer::TargetTypeTVOut;
2788     }
2789
2790     ASSERT_NOT_REACHED();
2791     return MediaPlayer::TargetTypeNone;
2792
2793 #else
2794     return MediaPlayer::TargetTypeAirPlay;
2795 #endif
2796 }
2797
2798 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2799 {
2800     if (!m_avPlayer)
2801         return emptyString();
2802
2803     String wirelessTargetName;
2804 #if !PLATFORM(IOS)
2805     if (m_playbackTarget)
2806         wirelessTargetName = m_playbackTarget->deviceName();
2807 #else
2808     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2809 #endif
2810     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2811
2812     return wirelessTargetName;
2813 }
2814
2815 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2816 {
2817     if (!m_avPlayer)
2818         return !m_allowsWirelessVideoPlayback;
2819
2820     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2821     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2822
2823     return !m_allowsWirelessVideoPlayback;
2824 }
2825
2826 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2827 {
2828     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2829     m_allowsWirelessVideoPlayback = !disabled;
2830     if (!m_avPlayer)
2831         return;
2832
2833     setDelayCallbacks(true);
2834     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2835     setDelayCallbacks(false);
2836 }
2837
2838 #if !PLATFORM(IOS)
2839 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2840 {
2841     m_playbackTarget = WTFMove(target);
2842
2843     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2844
2845     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2846
2847     if (!m_playbackTarget->hasActiveRoute())
2848         setShouldPlayToPlaybackTarget(false);
2849 }
2850
2851 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2852 {
2853     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2854         return;
2855
2856     m_shouldPlayToPlaybackTarget = shouldPlay;
2857
2858     if (!m_playbackTarget)
2859         return;
2860
2861     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2862         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2863
2864         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2865
2866         if (!m_avPlayer)
2867             return;
2868
2869         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2870         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2871             return;
2872
2873         setDelayCallbacks(true);
2874         m_avPlayer.get().outputContext = newContext;
2875         setDelayCallbacks(false);
2876
2877         return;
2878     }
2879
2880     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2881
2882     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2883
2884     setDelayCallbacks(true);
2885     auto weakThis = createWeakPtr();
2886     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2887         if (!weakThis)
2888             return;
2889         weakThis->playbackTargetIsWirelessDidChange();
2890     }));
2891     setDelayCallbacks(false);
2892 }
2893 #endif // !PLATFORM(IOS)
2894
2895 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2896 {
2897     if (!m_avPlayer)
2898         return;
2899
2900 #if PLATFORM(IOS)
2901     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2902 #endif
2903 }
2904 #endif
2905
2906 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2907 {
2908     m_cachedItemStatus = status;
2909
2910     updateStates();
2911 }
2912
2913 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2914 {
2915     m_pendingStatusChanges++;
2916 }
2917
2918 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2919 {
2920     m_cachedLikelyToKeepUp = likelyToKeepUp;
2921
2922     ASSERT(m_pendingStatusChanges);
2923     if (!--m_pendingStatusChanges)
2924         updateStates();
2925 }
2926
2927 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2928 {
2929     m_pendingStatusChanges++;
2930 }
2931
2932 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2933 {
2934     m_cachedBufferEmpty = bufferEmpty;
2935
2936     ASSERT(m_pendingStatusChanges);
2937     if (!--m_pendingStatusChanges)
2938         updateStates();
2939 }
2940
2941 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2942 {
2943     m_pendingStatusChanges++;
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2947 {
2948     m_cachedBufferFull = bufferFull;
2949
2950     ASSERT(m_pendingStatusChanges);
2951     if (!--m_pendingStatusChanges)
2952         updateStates();
2953 }
2954
2955 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2956 {
2957     m_cachedSeekableRanges = seekableRanges;
2958
2959     seekableTimeRangesChanged();
2960     updateStates();
2961 }
2962
2963 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2964 {
2965     m_cachedLoadedRanges = loadedRanges;
2966
2967     loadedTimeRangesChanged();
2968     updateStates();
2969 }
2970
2971 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2972 {
2973     m_cachedIsReadyForDisplay = isReady;
2974     if (!hasVideo() && isReady)
2975         tracksChanged();
2976     updateStates();
2977 }
2978
2979 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2980 {
2981     tracksChanged();
2982     updateStates();
2983 }
2984
2985 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2986 {
2987     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2988     if (m_shouldBufferData == shouldBuffer)
2989         return;
2990
2991     m_shouldBufferData = shouldBuffer;
2992     
2993     if (!m_avPlayer)
2994         return;
2995
2996     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2997 }
2998
2999 #if ENABLE(DATACUE_VALUE)
3000 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3001 {
3002     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3003     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3004     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3005     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3006     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3007
3008     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3009         return quickTimeUserData;
3010     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3011         return isoUserData;
3012     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3013         return quickTimeMetadata;
3014     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3015         return iTunesMetadata;
3016     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3017         return id3Metadata;
3018
3019     return emptyAtom;
3020 }
3021 #endif
3022
3023 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3024 {
3025     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3026
3027     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3028
3029 #if ENABLE(DATACUE_VALUE)
3030     if (seeking())
3031         return;
3032
3033     if (!m_metadataTrack)
3034         processMetadataTrack();
3035
3036     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3037         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3038         return;
3039     }
3040
3041     // Set the duration of all incomplete cues before adding new ones.
3042     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3043     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3044         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3045         if (start < earliestStartTime)
3046             earliestStartTime = start;
3047     }
3048     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3049
3050     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3051         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3052         MediaTime end = MediaTime::positiveInfiniteTime();
3053         if (CMTIME_IS_VALID(item.duration))
3054             end = start + toMediaTime(item.duration);
3055
3056         AtomicString type = nullAtom;
3057         if (item.keySpace)
3058             type = metadataType(item.keySpace);
3059
3060         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3061     }
3062 #endif
3063 }
3064
3065 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3066 {
3067     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3068         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3069
3070     NSArray *assetTracks = [m_avAsset tracks];
3071
3072     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3073         AVAssetTrack* assetTrack = [obj assetTrack];
3074
3075         if ([assetTracks containsObject:assetTrack])
3076             return YES;
3077
3078         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3079         if (!hasLoadedMediaSelectionGroups())
3080             return NO;
3081
3082         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3083             return NO;
3084
3085         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3086             return NO;
3087
3088         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3089             return NO;
3090
3091         return YES;
3092     }]];
3093
3094     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3095         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3096
3097     m_cachedTotalBytes = 0;
3098
3099     tracksChanged();
3100     updateStates();
3101 }
3102
3103 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3104 {
3105     m_cachedHasEnabledAudio = hasEnabledAudio;
3106
3107     tracksChanged();
3108     updateStates();
3109 }
3110
3111 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3112 {
3113     m_cachedPresentationSize = size;
3114
3115     sizeChanged();
3116     updateStates();
3117 }
3118
3119 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3120 {
3121     m_cachedDuration = duration;
3122
3123     invalidateCachedDuration();
3124 }
3125
3126 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3127 {
3128     m_cachedRate = rate;
3129
3130     updateStates();
3131     rateChanged();
3132 }
3133     
3134 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3135 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3136 {
3137     playbackTargetIsWirelessChanged();
3138 }
3139 #endif
3140
3141 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3142 {
3143     m_cachedCanPlayFastForward = newValue;
3144 }
3145
3146 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3147 {
3148     m_cachedCanPlayFastReverse = newValue;
3149 }
3150
3151 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3152 {
3153     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3154         return MediaPlayerPrivateAVFoundation::resolvedURL();
3155
3156     return URL([m_avAsset resolvedURL]);
3157 }
3158
3159 NSArray* assetMetadataKeyNames()
3160 {
3161     static NSArray* keys;
3162     if (!keys) {
3163         keys = [[NSArray alloc] initWithObjects:@"duration",
3164                     @"naturalSize",
3165                     @"preferredTransform",
3166                     @"preferredVolume",
3167                     @"preferredRate",
3168                     @"playable",
3169                     @"resolvedURL",
3170                     @"tracks",
3171                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3172                    nil];
3173     }
3174     return keys;
3175 }
3176
3177 NSArray* itemKVOProperties()
3178 {
3179     static NSArray* keys;
3180     if (!keys) {
3181         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3182                 @"status",
3183                 @"asset",
3184                 @"tracks",
3185                 @"seekableTimeRanges",
3186                 @"loadedTimeRanges",
3187                 @"playbackLikelyToKeepUp",
3188                 @"playbackBufferFull",
3189                 @"playbackBufferEmpty",
3190                 @"duration",
3191                 @"hasEnabledAudio",
3192                 @"timedMetadata",
3193                 @"canPlayFastForward",
3194                 @"canPlayFastReverse",
3195                 nil];
3196     }
3197     return keys;
3198 }
3199
3200 NSArray* assetTrackMetadataKeyNames()
3201 {
3202     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3203     return keys;
3204 }
3205
3206 NSArray* playerKVOProperties()
3207 {
3208     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3209 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3210                             @"externalPlaybackActive", @"allowsExternalPlayback",
3211 #endif
3212 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
3213                             @"outputObscuredDueToInsufficientExternalProtection",
3214 #endif
3215                             nil];
3216     return keys;
3217 }
3218 } // namespace WebCore
3219
3220 @implementation WebCoreAVFMovieObserver
3221
3222 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3223 {
3224     self = [super init];
3225     if (!self)
3226         return nil;
3227     m_callback = callback;
3228     return self;
3229 }
3230
3231 - (void)disconnect
3232 {
3233     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3234     m_callback = 0;
3235 }
3236
3237 - (void)metadataLoaded
3238 {
3239     if (!m_callback)
3240         return;
3241
3242     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3243 }
3244
3245 - (void)didEnd:(NSNotification *)unusedNotification
3246 {
3247     UNUSED_PARAM(unusedNotification);
3248     if (!m_callback)
3249         return;
3250     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3251 }
3252
3253 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3254 {
3255     UNUSED_PARAM(object);
3256     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3257
3258     if (!m_callback)
3259         return;
3260
3261     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3262
3263 #if !LOG_DISABLED
3264     if (willChange)
3265         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3266     else {
3267         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3268         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3269     }
3270 #endif
3271
3272     std::function<void ()> function;
3273
3274     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3275         if ([keyPath isEqualToString:@"readyForDisplay"])
3276             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3277     }
3278
3279     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3280         if ([keyPath isEqualToString:@"enabled"])
3281             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3282     }
3283
3284     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3285         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3286             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3287         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3288             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3289         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3290             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3291     }
3292
3293     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3294         // A value changed for an AVPlayerItem
3295         if ([keyPath isEqualToString:@"status"])
3296             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3297         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3298             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3299         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3300             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3301         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3302             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3303         else if ([keyPath isEqualToString:@"asset"])
3304             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3305         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3306             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3307         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3308             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3309         else if ([keyPath isEqualToString:@"tracks"])
3310             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3311         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3312             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3313         else if ([keyPath isEqualToString:@"presentationSize"])
3314             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3315         else if ([keyPath isEqualToString:@"duration"])
3316             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3317         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3318             MediaTime now;
3319             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3320             if (CMTIME_IS_NUMERIC(itemTime))
3321                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3322             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3323         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3324             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3325         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3326             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3327     }
3328
3329     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3330         // A value changed for an AVPlayer.
3331         if ([keyPath isEqualToString:@"rate"])
3332             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3333 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3334         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3335             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3336 #endif
3337 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
3338         else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3339             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged, m_callback, [newValue boolValue]);
3340 #endif
3341     }
3342     
3343     if (!function)
3344         return;
3345
3346     auto weakThis = m_callback->createWeakPtr();
3347     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3348         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3349         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3350         if (!weakThis)
3351             return;
3352         function();
3353     }));
3354 }
3355
3356 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3357 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3358 {
3359     UNUSED_PARAM(output);
3360     UNUSED_PARAM(nativeSamples);
3361
3362     if (!m_callback)
3363         return;
3364
3365     RetainPtr<WebCoreAVFMovieObserver> protectedSelf = self;
3366     RetainPtr<NSArray> protectedStrings = strings;
3367     RetainPtr<NSArray> protectedNativeSamples = nativeSamples;
3368     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedStrings = WTFMove(protectedStrings), protectedNativeSamples = WTFMove(protectedNativeSamples), itemTime] {
3369         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3370         if (!callback)
3371             return;
3372         MediaTime time = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3373         callback->processCue(protectedStrings.get(), protectedNativeSamples.get(), time);
3374     });
3375 }
3376
3377 - (void)outputSequenceWasFlushed:(id)output
3378 {
3379     UNUSED_PARAM(output);
3380
3381     if (!m_callback)
3382         return;
3383     
3384     callOnMainThread([protectedSelf = RetainPtr<WebCoreAVFMovieObserver>(self)] {
3385         if (MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback)
3386             callback->flushCues();
3387     });
3388 }
3389 #endif
3390
3391 @end
3392
3393 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3394 @implementation WebCoreAVFLoaderDelegate
3395
3396 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3397 {
3398     self = [super init];
3399     if (!self)
3400         return nil;
3401     m_callback = callback;
3402     return self;
3403 }
3404
3405 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3406 {
3407     UNUSED_PARAM(resourceLoader);
3408     if (!m_callback)
3409         return NO;
3410
3411     RetainPtr<WebCoreAVFLoaderDelegate> protectedSelf = self;
3412     RetainPtr<AVAssetResourceLoadingRequest> protectedLoadingRequest = loadingRequest;
3413     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedLoadingRequest = WTFMove(protectedLoadingRequest)] {
3414         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3415         if (!callback) {
3416             [protectedLoadingRequest finishLoadingWithError:nil];
3417             return;
3418         }
3419
3420         if (!callback->shouldWaitForLoadingOfResource(protectedLoadingRequest.get()))
3421             [protectedLoadingRequest finishLoadingWithError:nil];
3422     });
3423
3424     return YES;
3425 }
3426
3427 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3428 {
3429     UNUSED_PARAM(resourceLoader);
3430     if (!m_callback)
3431         return NO;
3432
3433     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3434         return NO;
3435
3436     RetainPtr<WebCoreAVFLoaderDelegate> protectedSelf = self;
3437     RetainPtr<NSURLAuthenticationChallenge> protectedChallenge = challenge;
3438     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedChallenge = WTFMove(protectedChallenge)] {
3439         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3440         if (!callback) {
3441             [[protectedChallenge sender] cancelAuthenticationChallenge:protectedChallenge.get()];
3442             return;
3443         }
3444
3445         if (!callback->shouldWaitForResponseToAuthenticationChallenge(protectedChallenge.get()))
3446             [[protectedChallenge sender] cancelAuthenticationChallenge:protectedChallenge.get()];
3447     });
3448
3449     return YES;
3450 }
3451
3452 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3453 {
3454     UNUSED_PARAM(resourceLoader);
3455     if (!m_callback)
3456         return;
3457
3458     RetainPtr<WebCoreAVFLoaderDelegate> protectedSelf = self;
3459     RetainPtr<AVAssetResourceLoadingRequest> protectedLoadingRequest = loadingRequest;
3460     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedLoadingRequest = WTFMove(protectedLoadingRequest)] {
3461         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3462         if (callback)
3463             callback->didCancelLoadingRequest(protectedLoadingRequest.get());
3464     });
3465 }
3466
3467 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3468 {
3469     m_callback = callback;
3470 }
3471 @end