Change NativeImagePtr for CG to be RetainPtr<CGImageRef>
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "OutOfBandTextTrackPrivateAVF.h"
50 #import "URL.h"
51 #import "Logging.h"
52 #import "MediaPlaybackTargetMac.h"
53 #import "MediaPlaybackTargetMock.h"
54 #import "MediaSelectionGroupAVFObjC.h"
55 #import "MediaTimeAVFoundation.h"
56 #import "PixelBufferConformerCV.h"
57 #import "PlatformTimeRanges.h"
58 #import "QuartzCoreSPI.h"
59 #import "SecurityOrigin.h"
60 #import "SerializedPlatformRepresentationMac.h"
61 #import "Settings.h"
62 #import "TextEncoding.h"
63 #import "TextTrackRepresentation.h"
64 #import "TextureCacheCV.h"
65 #import "UUID.h"
66 #import "VideoTextureCopierCV.h"
67 #import "VideoTrackPrivateAVFObjC.h"
68 #import "WebCoreAVFResourceLoader.h"
69 #import "WebCoreCALayerExtras.h"
70 #import "WebCoreNSURLSession.h"
71 #import "WebCoreSystemInterface.h"
72 #import <functional>
73 #import <map>
74 #import <objc/runtime.h>
75 #import <runtime/DataView.h>
76 #import <runtime/JSCInlines.h>
77 #import <runtime/TypedArrayInlines.h>
78 #import <runtime/Uint16Array.h>
79 #import <runtime/Uint32Array.h>
80 #import <runtime/Uint8Array.h>
81 #import <wtf/CurrentTime.h>
82 #import <wtf/ListHashSet.h>
83 #import <wtf/NeverDestroyed.h>
84 #import <wtf/OSObjectPtr.h>
85 #import <wtf/text/CString.h>
86 #import <wtf/text/StringBuilder.h>
87
88 #if ENABLE(AVF_CAPTIONS)
89 #include "TextTrack.h"
90 #endif
91
92 #import <AVFoundation/AVFoundation.h>
93
94 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
95 #import "VideoFullscreenLayerManager.h"
96 #endif
97
98 #if PLATFORM(IOS)
99 #import "WAKAppKitStubs.h"
100 #import <CoreImage/CoreImage.h>
101 #import <mach/mach_port.h>
102 #else
103 #import <Foundation/NSGeometry.h>
104 #import <QuartzCore/CoreImage.h>
105 #endif
106
107 #if USE(VIDEOTOOLBOX)
108 #import <CoreVideo/CoreVideo.h>
109 #import <VideoToolbox/VideoToolbox.h>
110 #endif
111
112 #if USE(CFNETWORK)
113 #include "CFNSURLConnectionSPI.h"
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117
118 namespace std {
119 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
120     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
121 };
122 }
123
124 #if ENABLE(AVF_CAPTIONS)
125 // Note: This must be defined before our SOFT_LINK macros:
126 @class AVMediaSelectionOption;
127 @interface AVMediaSelectionOption (OutOfBandExtensions)
128 @property (nonatomic, readonly) NSString* outOfBandSource;
129 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
130 @end
131 #endif
132
133 @interface AVURLAsset (WebKitExtensions)
134 @property (nonatomic, readonly) NSURL *resolvedURL;
135 @end
136
137 typedef AVPlayer AVPlayerType;
138 typedef AVPlayerItem AVPlayerItemType;
139 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
140 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
141 typedef AVMetadataItem AVMetadataItemType;
142 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
143 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
144
145 #pragma mark - Soft Linking
146
147 // Soft-linking headers must be included last since they #define functions, constants, etc.
148 #import "CoreMediaSoftLink.h"
149
150 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
153
154 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
155 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
156 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
157 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
158 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
159 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
160 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
161
162 SOFT_LINK_CLASS(CoreImage, CIContext)
163 SOFT_LINK_CLASS(CoreImage, CIImage)
164
165 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
166 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
167 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
168 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
169 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
180
181 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
182
183 #define AVPlayer getAVPlayerClass()
184 #define AVPlayerItem getAVPlayerItemClass()
185 #define AVPlayerLayer getAVPlayerLayerClass()
186 #define AVURLAsset getAVURLAssetClass()
187 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
188 #define AVMetadataItem getAVMetadataItemClass()
189
190 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
191 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
192 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
193 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
194 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
195 #define AVMediaTypeVideo getAVMediaTypeVideo()
196 #define AVMediaTypeAudio getAVMediaTypeAudio()
197 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
198 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
199 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
200 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
201 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
202 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
203 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
204 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
205 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
206
207 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
208 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
209 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
210
211 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
212 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
213 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
214
215 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
216 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
217 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
218 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
219
220 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
221 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
222 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
223 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
224 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
225 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
226 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
227 #endif
228
229 #if ENABLE(AVF_CAPTIONS)
230 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
231 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
232 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
233 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
234 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
235 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
236 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
237 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
238 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
239 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
240 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
241 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
242
243 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
244 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
245 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
246 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
247 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
248 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
249 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
250 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
251 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
252 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
253 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
254 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
255 #endif
256
257 #if ENABLE(DATACUE_VALUE)
258 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
259 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
263
264 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
265 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
266 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
267 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
268 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
269 #endif
270
271 #if PLATFORM(IOS)
272 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
273 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
274 #endif
275
276 SOFT_LINK_FRAMEWORK(MediaToolbox)
277 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
278
279 using namespace WebCore;
280
281 enum MediaPlayerAVFoundationObservationContext {
282     MediaPlayerAVFoundationObservationContextPlayerItem,
283     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
284     MediaPlayerAVFoundationObservationContextPlayer,
285     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
286 };
287
288 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
289 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
290 #else
291 @interface WebCoreAVFMovieObserver : NSObject
292 #endif
293 {
294     MediaPlayerPrivateAVFoundationObjC* m_callback;
295     int m_delayCallbacks;
296 }
297 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
298 -(void)disconnect;
299 -(void)metadataLoaded;
300 -(void)didEnd:(NSNotification *)notification;
301 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
302 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
303 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
304 - (void)outputSequenceWasFlushed:(id)output;
305 #endif
306 @end
307
308 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
309 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
310     MediaPlayerPrivateAVFoundationObjC* m_callback;
311 }
312 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
313 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
314 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
315 @end
316 #endif
317
318 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
319 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
320     MediaPlayerPrivateAVFoundationObjC *m_callback;
321     dispatch_semaphore_t m_semaphore;
322 }
323 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
324 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
325 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
326 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
327 @end
328 #endif
329
330 namespace WebCore {
331
332 static NSArray *assetMetadataKeyNames();
333 static NSArray *itemKVOProperties();
334 static NSArray *assetTrackMetadataKeyNames();
335 static NSArray *playerKVOProperties();
336 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
337
338 #if !LOG_DISABLED
339 static const char *boolString(bool val)
340 {
341     return val ? "true" : "false";
342 }
343 #endif
344
345 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
346 static dispatch_queue_t globalLoaderDelegateQueue()
347 {
348     static dispatch_queue_t globalQueue;
349     static dispatch_once_t onceToken;
350     dispatch_once(&onceToken, ^{
351         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
352     });
353     return globalQueue;
354 }
355 #endif
356
357 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
358 static dispatch_queue_t globalPullDelegateQueue()
359 {
360     static dispatch_queue_t globalQueue;
361     static dispatch_once_t onceToken;
362     dispatch_once(&onceToken, ^{
363         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
364     });
365     return globalQueue;
366 }
367 #endif
368
369 #if USE(CFNETWORK)
370 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
371 public:
372     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
373     {
374         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
375     }
376
377     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
378     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
379
380 private:
381     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
382         : m_challenge(challenge)
383     {
384         ASSERT(m_challenge);
385     }
386
387     void refAuthenticationClient() override { ref(); }
388     void derefAuthenticationClient() override { deref(); }
389
390     void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
391     {
392         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
393     }
394
395     void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
396     {
397         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
398     }
399
400     void receivedCancellation(const AuthenticationChallenge&) override
401     {
402         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
403     }
404
405     void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
406     {
407         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
408             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
409     }
410
411     void receivedChallengeRejection(const AuthenticationChallenge&) override
412     {
413         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
414             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
415     }
416
417     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
418 };
419 #endif
420
421 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
422 {
423     if (isAvailable())
424         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
425             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
426 }
427
428 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
429     : MediaPlayerPrivateAVFoundation(player)
430     , m_weakPtrFactory(this)
431 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
432     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
433     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
434 #endif
435     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
436     , m_videoFrameHasDrawn(false)
437     , m_haveCheckedPlayability(false)
438 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
439     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
440     , m_videoOutputSemaphore(nullptr)
441 #endif
442 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
443     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
444 #endif
445     , m_currentTextTrack(0)
446     , m_cachedRate(0)
447     , m_cachedTotalBytes(0)
448     , m_pendingStatusChanges(0)
449     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
450     , m_cachedLikelyToKeepUp(false)
451     , m_cachedBufferEmpty(false)
452     , m_cachedBufferFull(false)
453     , m_cachedHasEnabledAudio(false)
454     , m_shouldBufferData(true)
455     , m_cachedIsReadyForDisplay(false)
456     , m_haveBeenAskedToCreateLayer(false)
457 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
458     , m_allowsWirelessVideoPlayback(true)
459 #endif
460 {
461 }
462
463 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
464 {
465 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
466     [m_loaderDelegate.get() setCallback:0];
467     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
468
469     for (auto& pair : m_resourceLoaderMap)
470         pair.value->invalidate();
471 #endif
472 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
473     [m_videoOutputDelegate setCallback:0];
474     [m_videoOutput setDelegate:nil queue:0];
475     if (m_videoOutputSemaphore)
476         dispatch_release(m_videoOutputSemaphore);
477 #endif
478
479     if (m_videoLayer)
480         destroyVideoLayer();
481
482     cancelLoad();
483 }
484
485 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
486 {
487     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
488     tearDownVideoRendering();
489
490     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
491     [m_objcObserver.get() disconnect];
492
493     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
494     setIgnoreLoadStateChanges(true);
495     if (m_avAsset) {
496         [m_avAsset.get() cancelLoading];
497         m_avAsset = nil;
498     }
499
500     clearTextTracks();
501
502 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
503     if (m_legibleOutput) {
504         if (m_avPlayerItem)
505             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
506         m_legibleOutput = nil;
507     }
508 #endif
509
510     if (m_avPlayerItem) {
511         for (NSString *keyName in itemKVOProperties())
512             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
513         
514         m_avPlayerItem = nil;
515     }
516     if (m_avPlayer) {
517         if (m_timeObserver)
518             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
519         m_timeObserver = nil;
520
521         for (NSString *keyName in playerKVOProperties())
522             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
523         m_avPlayer = nil;
524     }
525
526     // Reset cached properties
527     m_pendingStatusChanges = 0;
528     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
529     m_cachedSeekableRanges = nullptr;
530     m_cachedLoadedRanges = nullptr;
531     m_cachedHasEnabledAudio = false;
532     m_cachedPresentationSize = FloatSize();
533     m_cachedDuration = MediaTime::zeroTime();
534
535     for (AVPlayerItemTrack *track in m_cachedTracks.get())
536         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
537     m_cachedTracks = nullptr;
538
539 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
540     if (m_provider) {
541         m_provider->setPlayerItem(nullptr);
542         m_provider->setAudioTrack(nullptr);
543     }
544 #endif
545
546     setIgnoreLoadStateChanges(false);
547 }
548
549 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
550 {
551     return m_haveBeenAskedToCreateLayer;
552 }
553
554 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
555 {
556 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
557     if (m_videoOutput)
558         return true;
559 #endif
560     return m_imageGenerator;
561 }
562
563 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
564 {
565 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
566     createVideoOutput();
567 #else
568     createImageGenerator();
569 #endif
570 }
571
572 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
573 {
574     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
575
576     if (!m_avAsset || m_imageGenerator)
577         return;
578
579     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
580
581     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
582     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
583     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
584     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
585
586     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
587 }
588
589 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
590 {
591 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
592     destroyVideoOutput();
593     destroyOpenGLVideoOutput();
594 #endif
595     destroyImageGenerator();
596 }
597
598 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
599 {
600     if (!m_imageGenerator)
601         return;
602
603     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
604
605     m_imageGenerator = 0;
606 }
607
608 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
609 {
610     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
611         return;
612
613     auto weakThis = createWeakPtr();
614     callOnMainThread([this, weakThis] {
615         if (!weakThis)
616             return;
617
618         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
619             return;
620         m_haveBeenAskedToCreateLayer = true;
621
622         if (!m_videoLayer)
623             createAVPlayerLayer();
624
625 #if USE(VIDEOTOOLBOX)
626         if (!m_videoOutput)
627             createVideoOutput();
628 #endif
629
630         player()->client().mediaPlayerRenderingModeChanged(player());
631     });
632 }
633
634 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
635 {
636     if (!m_avPlayer)
637         return;
638
639     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
640     [m_videoLayer setPlayer:m_avPlayer.get()];
641     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
642 #ifndef NDEBUG
643     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
644 #endif
645     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
646     updateVideoLayerGravity();
647     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
648     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
649     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
650
651 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
652     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
653
654 #if PLATFORM(IOS)
655     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
656         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
657 #endif
658 #else
659     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
660 #endif
661 }
662
663 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
664 {
665     if (!m_videoLayer)
666         return;
667
668     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
669
670     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
671     [m_videoLayer.get() setPlayer:nil];
672
673 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
674     m_videoFullscreenLayerManager->didDestroyVideoLayer();
675 #endif
676
677     m_videoLayer = nil;
678 }
679
680 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
681 {
682     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
683     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
684
685     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
686     if (!date)
687         return MediaTime::invalidTime();
688
689     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
690
691     // Rounding due to second offset error when subtracting.
692     return MediaTime::createWithDouble(round(date - currentTime));
693 }
694
695 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
696 {
697     if (currentRenderingMode() == MediaRenderingToLayer)
698         return m_cachedIsReadyForDisplay;
699
700     return m_videoFrameHasDrawn;
701 }
702
703 #if ENABLE(AVF_CAPTIONS)
704 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
705 {
706     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
707     if (manualSelectionMode)
708         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
709
710     // FIXME: Match these to correct types:
711     if (kind == PlatformTextTrack::Caption)
712         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
713
714     if (kind == PlatformTextTrack::Subtitle)
715         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
716
717     if (kind == PlatformTextTrack::Description)
718         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
719
720     if (kind == PlatformTextTrack::Forced)
721         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
722
723     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
724 }
725     
726 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
727 {
728     trackModeChanged();
729 }
730     
731 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
732 {
733     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
734     
735     for (auto& textTrack : m_textTracks) {
736         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
737             continue;
738         
739         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
740         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
741         
742         for (auto& track : outOfBandTrackSources) {
743             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
744             
745             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
746                 continue;
747             
748             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
749             if (track->mode() == PlatformTextTrack::Hidden)
750                 mode = InbandTextTrackPrivate::Hidden;
751             else if (track->mode() == PlatformTextTrack::Disabled)
752                 mode = InbandTextTrackPrivate::Disabled;
753             else if (track->mode() == PlatformTextTrack::Showing)
754                 mode = InbandTextTrackPrivate::Showing;
755             
756             textTrack->setMode(mode);
757             break;
758         }
759     }
760 }
761 #endif
762
763
764 static NSURL *canonicalURL(const String& url)
765 {
766     NSURL *cocoaURL = URL(ParsedURLString, url);
767     if (url.isEmpty())
768         return cocoaURL;
769
770     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
771     if (!request)
772         return cocoaURL;
773
774     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
775     if (!canonicalRequest)
776         return cocoaURL;
777
778     return [canonicalRequest URL];
779 }
780
781 #if PLATFORM(IOS)
782 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
783 {
784     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
785     [properties setDictionary:@{
786         NSHTTPCookieName: cookie.name,
787         NSHTTPCookieValue: cookie.value,
788         NSHTTPCookieDomain: cookie.domain,
789         NSHTTPCookiePath: cookie.path,
790         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
791     }];
792     if (cookie.secure)
793         [properties setObject:@YES forKey:NSHTTPCookieSecure];
794     if (cookie.session)
795         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
796
797     return [NSHTTPCookie cookieWithProperties:properties.get()];
798 }
799 #endif
800
801 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
802 {
803     if (m_avAsset)
804         return;
805
806     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
807
808     setDelayCallbacks(true);
809
810     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
811
812     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
813
814     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
815
816     String referrer = player()->referrer();
817     if (!referrer.isEmpty())
818         [headerFields.get() setObject:referrer forKey:@"Referer"];
819
820     String userAgent = player()->userAgent();
821     if (!userAgent.isEmpty())
822         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
823
824     if ([headerFields.get() count])
825         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
826
827     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
828         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
829
830 #if PLATFORM(IOS)
831     // FIXME: rdar://problem/20354688
832     String identifier = player()->sourceApplicationIdentifier();
833     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
834         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
835 #endif
836
837 #if ENABLE(AVF_CAPTIONS)
838     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
839     if (!outOfBandTrackSources.isEmpty()) {
840         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
841         for (auto& trackSource : outOfBandTrackSources) {
842             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
843             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
844             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
845             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
846             [outOfBandTracks.get() addObject:@{
847                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
848                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
849                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
850                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
851                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
852                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
853             }];
854         }
855
856         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
857     }
858 #endif
859
860 #if PLATFORM(IOS)
861     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
862     if (!networkInterfaceName.isEmpty())
863         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
864 #endif
865
866 #if PLATFORM(IOS)
867     Vector<Cookie> cookies;
868     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
869         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
870         for (auto& cookie : cookies)
871             [nsCookies addObject:toNSHTTPCookie(cookie)];
872
873         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
874     }
875 #endif
876
877     [options setObject:[NSNumber numberWithBool:!player()->client().mediaPlayerShouldUsePersistentCache()] forKey:AVURLAssetUsesNoPersistentCacheKey];
878
879     NSURL *cocoaURL = canonicalURL(url);
880     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
881
882 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
883     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
884     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
885
886 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
887     if (Settings::isAVFoundationNSURLSessionEnabled()
888         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
889         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
890         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
891         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
892         if (mediaResourceLoader)
893             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
894     }
895 #endif
896
897 #endif
898
899     m_haveCheckedPlayability = false;
900
901     setDelayCallbacks(false);
902 }
903
904 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
905 {
906     if (!m_avPlayer)
907         return;
908
909     if (pthread_main_np()) {
910         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
911         return;
912     }
913
914     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
915     RetainPtr<AVPlayerItemType> strongItem = item;
916     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
917         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
918     });
919 }
920
921 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
922 {
923     if (m_avPlayer)
924         return;
925
926     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
927
928     setDelayCallbacks(true);
929
930     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
931     for (NSString *keyName in playerKVOProperties())
932         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
933
934 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
935     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
936 #endif
937
938 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
939     updateDisableExternalPlayback();
940     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
941 #endif
942
943 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
944     if (m_shouldPlayToPlaybackTarget)
945         setShouldPlayToPlaybackTarget(true);
946 #endif
947
948     if (player()->client().mediaPlayerIsVideo())
949         createAVPlayerLayer();
950
951     if (m_avPlayerItem)
952         setAVPlayerItem(m_avPlayerItem.get());
953
954     setDelayCallbacks(false);
955 }
956
957 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
958 {
959     if (m_avPlayerItem)
960         return;
961
962     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
963
964     setDelayCallbacks(true);
965
966     // Create the player item so we can load media data. 
967     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
968
969     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
970
971     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
972     for (NSString *keyName in itemKVOProperties())
973         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
974
975     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
976
977     if (m_avPlayer)
978         setAVPlayerItem(m_avPlayerItem.get());
979
980 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
981     const NSTimeInterval legibleOutputAdvanceInterval = 2;
982
983     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
984     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
985     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
986
987     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
988     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
989     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
990     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
991 #endif
992
993 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
994     if (m_provider) {
995         m_provider->setPlayerItem(m_avPlayerItem.get());
996         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
997     }
998 #endif
999
1000     setDelayCallbacks(false);
1001 }
1002
1003 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1004 {
1005     if (m_haveCheckedPlayability)
1006         return;
1007     m_haveCheckedPlayability = true;
1008
1009     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1010     auto weakThis = createWeakPtr();
1011
1012     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1013         callOnMainThread([weakThis] {
1014             if (weakThis)
1015                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1016         });
1017     }];
1018 }
1019
1020 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1021 {
1022     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1023
1024     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1025     dispatch_group_enter(metadataLoadingGroup.get());
1026     auto weakThis = createWeakPtr();
1027     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1028
1029         callOnMainThread([weakThis, metadataLoadingGroup] {
1030             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1031                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1032                     dispatch_group_enter(metadataLoadingGroup.get());
1033                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1034                         dispatch_group_leave(metadataLoadingGroup.get());
1035                     }];
1036                 }
1037             }
1038             dispatch_group_leave(metadataLoadingGroup.get());
1039         });
1040     }];
1041
1042     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1043         callOnMainThread([weakThis] {
1044             if (weakThis)
1045                 [weakThis->m_objcObserver.get() metadataLoaded];
1046         });
1047     });
1048 }
1049
1050 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1051 {
1052     if (!m_avPlayerItem)
1053         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1054
1055     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1056         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1057     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1058         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1059     if (m_cachedLikelyToKeepUp)
1060         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1061     if (m_cachedBufferFull)
1062         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1063     if (m_cachedBufferEmpty)
1064         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1065
1066     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1067 }
1068
1069 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1070 {
1071     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1072     PlatformMedia pm;
1073     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1074     pm.media.avfMediaPlayer = m_avPlayer.get();
1075     return pm;
1076 }
1077
1078 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1079 {
1080 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1081     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1082 #else
1083     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1084 #endif
1085 }
1086
1087 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1088 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1089 {
1090     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer)
1091         return;
1092
1093     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer);
1094
1095     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1096         syncTextTrackBounds();
1097         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1098     }
1099
1100     updateDisableExternalPlayback();
1101 }
1102
1103 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1104 {
1105     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1106     syncTextTrackBounds();
1107 }
1108
1109 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1110 {
1111     m_videoFullscreenGravity = gravity;
1112     if (!m_videoLayer)
1113         return;
1114
1115     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1116     if (gravity == MediaPlayer::VideoGravityResize)
1117         videoGravity = AVLayerVideoGravityResize;
1118     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1119         videoGravity = AVLayerVideoGravityResizeAspect;
1120     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1121         videoGravity = AVLayerVideoGravityResizeAspectFill;
1122     else
1123         ASSERT_NOT_REACHED();
1124     
1125     if ([m_videoLayer videoGravity] == videoGravity)
1126         return;
1127
1128     [m_videoLayer setVideoGravity:videoGravity];
1129     syncTextTrackBounds();
1130 }
1131
1132 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1133 {
1134 #if PLATFORM(IOS)
1135     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1136         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1137     updateDisableExternalPlayback();
1138 #else
1139     UNUSED_PARAM(mode);
1140 #endif
1141 }
1142
1143 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1144
1145 #if PLATFORM(IOS)
1146 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1147 {
1148     if (m_currentMetaData)
1149         return m_currentMetaData.get();
1150     return nil;
1151 }
1152
1153 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1154 {
1155     if (!m_avPlayerItem)
1156         return emptyString();
1157     
1158     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1159     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1160
1161     return logString.get();
1162 }
1163
1164 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1165 {
1166     if (!m_avPlayerItem)
1167         return emptyString();
1168
1169     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1170     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1171
1172     return logString.get();
1173 }
1174 #endif
1175
1176 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1177 {
1178     [CATransaction begin];
1179     [CATransaction setDisableActions:YES];    
1180     if (m_videoLayer)
1181         [m_videoLayer.get() setHidden:!isVisible];
1182     [CATransaction commit];
1183 }
1184     
1185 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1186 {
1187     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1188     if (!metaDataAvailable())
1189         return;
1190
1191     setDelayCallbacks(true);
1192     m_cachedRate = requestedRate();
1193     [m_avPlayer.get() setRate:requestedRate()];
1194     setDelayCallbacks(false);
1195 }
1196
1197 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1198 {
1199     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1200     if (!metaDataAvailable())
1201         return;
1202
1203     setDelayCallbacks(true);
1204     m_cachedRate = 0;
1205     [m_avPlayer.get() setRate:0];
1206     setDelayCallbacks(false);
1207 }
1208
1209 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1210 {
1211     // Do not ask the asset for duration before it has been loaded or it will fetch the
1212     // answer synchronously.
1213     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1214         return MediaTime::invalidTime();
1215     
1216     CMTime cmDuration;
1217     
1218     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1219     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1220         cmDuration = [m_avPlayerItem.get() duration];
1221     else
1222         cmDuration = [m_avAsset.get() duration];
1223
1224     if (CMTIME_IS_NUMERIC(cmDuration))
1225         return toMediaTime(cmDuration);
1226
1227     if (CMTIME_IS_INDEFINITE(cmDuration))
1228         return MediaTime::positiveInfiniteTime();
1229
1230     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1231     return MediaTime::invalidTime();
1232 }
1233
1234 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1235 {
1236     if (!metaDataAvailable() || !m_avPlayerItem)
1237         return MediaTime::zeroTime();
1238
1239     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1240     if (CMTIME_IS_NUMERIC(itemTime))
1241         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1242
1243     return MediaTime::zeroTime();
1244 }
1245
1246 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1247 {
1248     // setCurrentTime generates several event callbacks, update afterwards.
1249     setDelayCallbacks(true);
1250
1251     if (m_metadataTrack)
1252         m_metadataTrack->flushPartialCues();
1253
1254     CMTime cmTime = toCMTime(time);
1255     CMTime cmBefore = toCMTime(negativeTolerance);
1256     CMTime cmAfter = toCMTime(positiveTolerance);
1257
1258     auto weakThis = createWeakPtr();
1259
1260     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1261
1262     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1263         callOnMainThread([weakThis, finished] {
1264             auto _this = weakThis.get();
1265             if (!_this)
1266                 return;
1267
1268             _this->seekCompleted(finished);
1269         });
1270     }];
1271
1272     setDelayCallbacks(false);
1273 }
1274
1275 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1276 {
1277 #if PLATFORM(IOS)
1278     UNUSED_PARAM(volume);
1279     return;
1280 #else
1281     if (!metaDataAvailable())
1282         return;
1283
1284     [m_avPlayer.get() setVolume:volume];
1285 #endif
1286 }
1287
1288 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1289 {
1290     UNUSED_PARAM(closedCaptionsVisible);
1291
1292     if (!metaDataAvailable())
1293         return;
1294
1295     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1296 }
1297
1298 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1299 {
1300     setDelayCallbacks(true);
1301     m_cachedRate = rate;
1302     [m_avPlayer.get() setRate:rate];
1303     setDelayCallbacks(false);
1304 }
1305
1306 double MediaPlayerPrivateAVFoundationObjC::rate() const
1307 {
1308     if (!metaDataAvailable())
1309         return 0;
1310
1311     return m_cachedRate;
1312 }
1313
1314 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1315 {
1316     if (m_avPlayerItem)
1317         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1318 }
1319
1320 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1321 {
1322     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1323
1324     if (!m_avPlayerItem)
1325         return timeRanges;
1326
1327     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1328         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1329         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1330             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1331     }
1332     return timeRanges;
1333 }
1334
1335 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1336 {
1337     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1338         return MediaTime::zeroTime();
1339
1340     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1341     bool hasValidRange = false;
1342     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1343         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1344         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1345             continue;
1346
1347         hasValidRange = true;
1348         MediaTime startOfRange = toMediaTime(timeRange.start);
1349         if (minTimeSeekable > startOfRange)
1350             minTimeSeekable = startOfRange;
1351     }
1352     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1353 }
1354
1355 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1356 {
1357     if (!m_cachedSeekableRanges)
1358         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1359
1360     MediaTime maxTimeSeekable;
1361     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1362         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1363         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1364             continue;
1365         
1366         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1367         if (maxTimeSeekable < endOfRange)
1368             maxTimeSeekable = endOfRange;
1369     }
1370     return maxTimeSeekable;
1371 }
1372
1373 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1374 {
1375     if (!m_cachedLoadedRanges)
1376         return MediaTime::zeroTime();
1377
1378     MediaTime maxTimeLoaded;
1379     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1380         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1381         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1382             continue;
1383         
1384         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1385         if (maxTimeLoaded < endOfRange)
1386             maxTimeLoaded = endOfRange;
1387     }
1388
1389     return maxTimeLoaded;   
1390 }
1391
1392 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1393 {
1394     if (!metaDataAvailable())
1395         return 0;
1396
1397     if (m_cachedTotalBytes)
1398         return m_cachedTotalBytes;
1399
1400     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1401         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1402
1403     return m_cachedTotalBytes;
1404 }
1405
1406 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1407 {
1408     m_avAsset = asset;
1409 }
1410
1411 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1412 {
1413     if (!m_avAsset)
1414         return MediaPlayerAVAssetStatusDoesNotExist;
1415
1416     for (NSString *keyName in assetMetadataKeyNames()) {
1417         NSError *error = nil;
1418         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1419 #if !LOG_DISABLED
1420         if (error)
1421             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1422 #endif
1423
1424         if (keyStatus < AVKeyValueStatusLoaded)
1425             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1426         
1427         if (keyStatus == AVKeyValueStatusFailed)
1428             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1429
1430         if (keyStatus == AVKeyValueStatusCancelled)
1431             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1432     }
1433
1434     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1435         return MediaPlayerAVAssetStatusPlayable;
1436
1437     return MediaPlayerAVAssetStatusLoaded;
1438 }
1439
1440 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1441 {
1442     if (!m_avAsset)
1443         return 0;
1444
1445     NSError *error = nil;
1446     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1447     return [error code];
1448 }
1449
1450 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1451 {
1452     if (!metaDataAvailable() || context.paintingDisabled())
1453         return;
1454
1455     setDelayCallbacks(true);
1456     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1457
1458 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1459     if (videoOutputHasAvailableFrame())
1460         paintWithVideoOutput(context, rect);
1461     else
1462 #endif
1463         paintWithImageGenerator(context, rect);
1464
1465     END_BLOCK_OBJC_EXCEPTIONS;
1466     setDelayCallbacks(false);
1467
1468     m_videoFrameHasDrawn = true;
1469 }
1470
1471 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1472 {
1473     if (!metaDataAvailable() || context.paintingDisabled())
1474         return;
1475
1476     // We can ignore the request if we are already rendering to a layer.
1477     if (currentRenderingMode() == MediaRenderingToLayer)
1478         return;
1479
1480     // paint() is best effort, so only paint if we already have an image generator or video output available.
1481     if (!hasContextRenderer())
1482         return;
1483
1484     paintCurrentFrameInContext(context, rect);
1485 }
1486
1487 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1488 {
1489     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1490
1491     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1492     if (image) {
1493         GraphicsContextStateSaver stateSaver(context);
1494         context.translate(rect.x(), rect.y() + rect.height());
1495         context.scale(FloatSize(1.0f, -1.0f));
1496         context.setImageInterpolationQuality(InterpolationLow);
1497         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1498         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1499     }
1500 }
1501
1502 static const HashSet<String, ASCIICaseInsensitiveHash>& avfMIMETypes()
1503 {
1504     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []() {
1505         HashSet<String, ASCIICaseInsensitiveHash> types;
1506         for (NSString *type in [AVURLAsset audiovisualMIMETypes])
1507             types.add(type);
1508         return types;
1509     }();
1510     
1511     return cache;
1512 }
1513
1514 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1515 {
1516     if (!m_imageGenerator)
1517         createImageGenerator();
1518     ASSERT(m_imageGenerator);
1519
1520 #if !LOG_DISABLED
1521     double start = monotonicallyIncreasingTime();
1522 #endif
1523
1524     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1525     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1526     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1527
1528 #if !LOG_DISABLED
1529     double duration = monotonicallyIncreasingTime() - start;
1530     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1531 #endif
1532
1533     return image;
1534 }
1535
1536 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1537 {
1538     supportedTypes = avfMIMETypes();
1539
1540
1541 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1542 static bool keySystemIsSupported(const String& keySystem)
1543 {
1544     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1545         return true;
1546     return false;
1547 }
1548 #endif
1549
1550 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1551 {
1552 #if ENABLE(ENCRYPTED_MEDIA)
1553     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1554     // In addition to the steps in the current specification, this method must run the following steps:
1555
1556     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1557     //    If keySystem is null, continue to the next step.
1558     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1559         // "Clear Key" is only supported with HLS:
1560         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1561             return MediaPlayer::IsNotSupported;
1562
1563         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1564         if (!keySystemIsSupported(parameters.keySystem))
1565             return MediaPlayer::IsNotSupported;
1566
1567         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1568         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1569     }
1570
1571     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1572 #endif
1573
1574 #if ENABLE(MEDIA_SOURCE)
1575     if (parameters.isMediaSource)
1576         return MediaPlayer::IsNotSupported;
1577 #endif
1578 #if ENABLE(MEDIA_STREAM)
1579     if (parameters.isMediaStream)
1580         return MediaPlayer::IsNotSupported;
1581 #endif
1582     if (isUnsupportedMIMEType(parameters.type))
1583         return MediaPlayer::IsNotSupported;
1584
1585     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1586         return MediaPlayer::IsNotSupported;
1587
1588     // The spec says:
1589     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1590     if (parameters.codecs.isEmpty())
1591         return MediaPlayer::MayBeSupported;
1592
1593     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1594     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1595 }
1596
1597 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1598 {
1599 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1600     if (!keySystem.isEmpty()) {
1601         // "Clear Key" is only supported with HLS:
1602         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1603             return MediaPlayer::IsNotSupported;
1604
1605         if (!keySystemIsSupported(keySystem))
1606             return false;
1607
1608         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1609             return false;
1610
1611         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1612             return false;
1613
1614         return true;
1615     }
1616 #else
1617     UNUSED_PARAM(keySystem);
1618     UNUSED_PARAM(mimeType);
1619 #endif
1620     return false;
1621 }
1622
1623 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1624 #if ENABLE(ENCRYPTED_MEDIA_V2)
1625 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1626 {
1627     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1628         [infoRequest setContentLength:keyData->byteLength()];
1629         [infoRequest setByteRangeAccessSupported:YES];
1630     }
1631
1632     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1633         long long start = [dataRequest currentOffset];
1634         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1635
1636         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1637             [request finishLoadingWithError:nil];
1638             return;
1639         }
1640
1641         ASSERT(start <= std::numeric_limits<int>::max());
1642         ASSERT(end <= std::numeric_limits<int>::max());
1643         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1644         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1645         [dataRequest respondWithData:nsData.get()];
1646     }
1647
1648     [request finishLoading];
1649 }
1650 #endif
1651
1652 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1653 {
1654     String scheme = [[[avRequest request] URL] scheme];
1655     String keyURI = [[[avRequest request] URL] absoluteString];
1656
1657 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1658     if (scheme == "skd") {
1659         // Create an initData with the following layout:
1660         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1661         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1662         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1663         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1664         initDataView->set<uint32_t>(0, keyURISize, true);
1665
1666         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1667         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1668
1669 #if ENABLE(ENCRYPTED_MEDIA)
1670         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1671 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1672         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1673         if (!player()->keyNeeded(initData.get()))
1674 #endif
1675             return false;
1676
1677         m_keyURIToRequestMap.set(keyURI, avRequest);
1678         return true;
1679 #if ENABLE(ENCRYPTED_MEDIA_V2)
1680     } else if (scheme == "clearkey") {
1681         String keyID = [[[avRequest request] URL] resourceSpecifier];
1682         StringView keyIDView(keyID);
1683         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1684
1685         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1686         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1687
1688         auto keyData = player()->cachedKeyForKeyId(keyID);
1689         if (keyData) {
1690             fulfillRequestWithKeyData(avRequest, keyData.get());
1691             return false;
1692         }
1693
1694         if (!player()->keyNeeded(initData.get()))
1695             return false;
1696
1697         m_keyURIToRequestMap.set(keyID, avRequest);
1698         return true;
1699 #endif
1700     }
1701 #endif
1702
1703     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1704     m_resourceLoaderMap.add(avRequest, resourceLoader);
1705     resourceLoader->startLoading();
1706     return true;
1707 }
1708
1709 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1710 {
1711 #if USE(CFNETWORK)
1712     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1713     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1714     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1715 #else
1716     AuthenticationChallenge challenge(nsChallenge);
1717 #endif
1718
1719     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1720 }
1721
1722 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1723 {
1724     String scheme = [[[avRequest request] URL] scheme];
1725
1726     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1727
1728     if (resourceLoader)
1729         resourceLoader->stopLoading();
1730 }
1731
1732 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1733 {
1734     m_resourceLoaderMap.remove(avRequest);
1735 }
1736 #endif
1737
1738 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1739 {
1740     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1741 }
1742
1743 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1744 {
1745     if (!metaDataAvailable())
1746         return timeValue;
1747
1748     // FIXME - impossible to implement until rdar://8721510 is fixed.
1749     return timeValue;
1750 }
1751
1752 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1753 {
1754 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1755     return 0;
1756 #else
1757     return 5;
1758 #endif
1759 }
1760
1761 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1762 {
1763     if (!m_videoLayer)
1764         return;
1765
1766 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1767     // Do not attempt to change the video gravity while in full screen mode.
1768     // See setVideoFullscreenGravity().
1769     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1770         return;
1771 #endif
1772
1773     [CATransaction begin];
1774     [CATransaction setDisableActions:YES];    
1775     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1776     [m_videoLayer.get() setVideoGravity:gravity];
1777     [CATransaction commit];
1778 }
1779
1780 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1781 {
1782     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1783         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1784     }];
1785     if (index == NSNotFound)
1786         return nil;
1787     return [tracks objectAtIndex:index];
1788 }
1789
1790 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1791 {
1792     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1793     m_languageOfPrimaryAudioTrack = String();
1794
1795     if (!m_avAsset)
1796         return;
1797
1798     setDelayCharacteristicsChangedNotification(true);
1799
1800     bool haveCCTrack = false;
1801     bool hasCaptions = false;
1802
1803     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1804     // asked about those fairly fequently.
1805     if (!m_avPlayerItem) {
1806         // We don't have a player item yet, so check with the asset because some assets support inspection
1807         // prior to becoming ready to play.
1808         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1809         setHasVideo(firstEnabledVideoTrack);
1810         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1811 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1812         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1813 #endif
1814
1815         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1816     } else {
1817         bool hasVideo = false;
1818         bool hasAudio = false;
1819         bool hasMetaData = false;
1820         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1821             if ([track isEnabled]) {
1822                 AVAssetTrack *assetTrack = [track assetTrack];
1823                 NSString *mediaType = [assetTrack mediaType];
1824                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1825                     hasVideo = true;
1826                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1827                     hasAudio = true;
1828                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1829 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1830                     hasCaptions = true;
1831 #endif
1832                     haveCCTrack = true;
1833                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1834                     hasMetaData = true;
1835                 }
1836             }
1837         }
1838
1839 #if ENABLE(VIDEO_TRACK)
1840         updateAudioTracks();
1841         updateVideoTracks();
1842
1843 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1844         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1845         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1846 #endif
1847 #endif
1848
1849         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1850         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1851         // when it is not.
1852         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1853
1854         setHasAudio(hasAudio);
1855 #if ENABLE(DATACUE_VALUE)
1856         if (hasMetaData)
1857             processMetadataTrack();
1858 #endif
1859     }
1860
1861 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1862     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1863     if (legibleGroup && m_cachedTracks) {
1864         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1865         if (hasCaptions)
1866             processMediaSelectionOptions();
1867     }
1868 #endif
1869
1870 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1871     if (!hasCaptions && haveCCTrack)
1872         processLegacyClosedCaptionsTracks();
1873 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1874     if (haveCCTrack)
1875         processLegacyClosedCaptionsTracks();
1876 #endif
1877
1878     setHasClosedCaptions(hasCaptions);
1879
1880     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1881         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1882
1883     sizeChanged();
1884
1885     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1886         characteristicsChanged();
1887
1888 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1889     if (m_provider)
1890         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1891 #endif
1892
1893     setDelayCharacteristicsChangedNotification(false);
1894 }
1895
1896 #if ENABLE(VIDEO_TRACK)
1897 template <typename RefT, typename PassRefT>
1898 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1899 {
1900     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1901         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1902     }]]]);
1903     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1904
1905     for (auto& oldItem : oldItems) {
1906         if (oldItem->playerItemTrack())
1907             [oldTracks addObject:oldItem->playerItemTrack()];
1908     }
1909
1910     // Find the added & removed AVPlayerItemTracks:
1911     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1912     [removedTracks minusSet:newTracks.get()];
1913
1914     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1915     [addedTracks minusSet:oldTracks.get()];
1916
1917     typedef Vector<RefT> ItemVector;
1918     ItemVector replacementItems;
1919     ItemVector addedItems;
1920     ItemVector removedItems;
1921     for (auto& oldItem : oldItems) {
1922         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1923             removedItems.append(oldItem);
1924         else
1925             replacementItems.append(oldItem);
1926     }
1927
1928     for (AVPlayerItemTrack* track in addedTracks.get())
1929         addedItems.append(itemFactory(track));
1930
1931     replacementItems.appendVector(addedItems);
1932     oldItems.swap(replacementItems);
1933
1934     for (auto& removedItem : removedItems)
1935         (player->*removedFunction)(removedItem);
1936
1937     for (auto& addedItem : addedItems)
1938         (player->*addedFunction)(addedItem);
1939 }
1940
1941 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1942 template <typename RefT, typename PassRefT>
1943 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1944 {
1945     group->updateOptions(characteristics);
1946
1947     // Only add selection options which do not have an associated persistant track.
1948     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1949     for (auto& option : group->options()) {
1950         if (!option)
1951             continue;
1952         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1953         if (!avOption)
1954             continue;
1955         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1956             newSelectionOptions.add(option);
1957     }
1958
1959     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1960     for (auto& oldItem : oldItems) {
1961         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1962             oldSelectionOptions.add(option);
1963     }
1964
1965     // Find the added & removed AVMediaSelectionOptions:
1966     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1967     for (auto& oldOption : oldSelectionOptions) {
1968         if (!newSelectionOptions.contains(oldOption))
1969             removedSelectionOptions.add(oldOption);
1970     }
1971
1972     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1973     for (auto& newOption : newSelectionOptions) {
1974         if (!oldSelectionOptions.contains(newOption))
1975             addedSelectionOptions.add(newOption);
1976     }
1977
1978     typedef Vector<RefT> ItemVector;
1979     ItemVector replacementItems;
1980     ItemVector addedItems;
1981     ItemVector removedItems;
1982     for (auto& oldItem : oldItems) {
1983         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1984             removedItems.append(oldItem);
1985         else
1986             replacementItems.append(oldItem);
1987     }
1988
1989     for (auto& option : addedSelectionOptions)
1990         addedItems.append(itemFactory(*option.get()));
1991
1992     replacementItems.appendVector(addedItems);
1993     oldItems.swap(replacementItems);
1994     
1995     for (auto& removedItem : removedItems)
1996         (player->*removedFunction)(removedItem);
1997     
1998     for (auto& addedItem : addedItems)
1999         (player->*addedFunction)(addedItem);
2000 }
2001 #endif
2002
2003 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2004 {
2005 #if !LOG_DISABLED
2006     size_t count = m_audioTracks.size();
2007 #endif
2008
2009     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2010
2011 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2012     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2013     if (!m_audibleGroup) {
2014         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2015             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2016     }
2017
2018     if (m_audibleGroup)
2019         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2020 #endif
2021
2022     for (auto& track : m_audioTracks)
2023         track->resetPropertiesFromTrack();
2024
2025 #if !LOG_DISABLED
2026     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2027 #endif
2028 }
2029
2030 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2031 {
2032 #if !LOG_DISABLED
2033     size_t count = m_videoTracks.size();
2034 #endif
2035
2036     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2037
2038 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2039     if (!m_visualGroup) {
2040         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2041             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2042     }
2043
2044     if (m_visualGroup)
2045         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2046 #endif
2047
2048     for (auto& track : m_audioTracks)
2049         track->resetPropertiesFromTrack();
2050
2051 #if !LOG_DISABLED
2052     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2053 #endif
2054 }
2055
2056 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2057 {
2058 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2059     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2060         return true;
2061 #endif
2062     return false;
2063 }
2064
2065 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2066 {
2067 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2068     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2069         return;
2070
2071     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2072     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2073     [m_textTrackRepresentationLayer setFrame:textFrame];
2074 #endif
2075 }
2076
2077 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2078 {
2079 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2080     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2081     if (representationLayer == m_textTrackRepresentationLayer) {
2082         syncTextTrackBounds();
2083         return;
2084     }
2085
2086     if (m_textTrackRepresentationLayer)
2087         [m_textTrackRepresentationLayer removeFromSuperlayer];
2088
2089     m_textTrackRepresentationLayer = representationLayer;
2090
2091     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2092         syncTextTrackBounds();
2093         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2094     }
2095
2096 #else
2097     UNUSED_PARAM(representation);
2098 #endif
2099 }
2100 #endif // ENABLE(VIDEO_TRACK)
2101
2102 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2103 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2104 {
2105     if (!m_provider) {
2106         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2107         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2108     }
2109
2110     return m_provider.get();
2111 }
2112 #endif
2113
2114 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2115 {
2116     if (!m_avAsset)
2117         return;
2118
2119     setNaturalSize(m_cachedPresentationSize);
2120 }
2121     
2122 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2123 {
2124     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2125         return false;
2126     
2127     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2128     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2129     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2130 }
2131
2132 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2133 {
2134 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2135     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2136     if (!Settings::isAVFoundationNSURLSessionEnabled()
2137         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2138         return false;
2139
2140     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2141     if ([session respondsToSelector:@selector(didPassCORSAccessChecks)])
2142         return session.didPassCORSAccessChecks;
2143 #endif
2144     return false;
2145 }
2146
2147 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2148 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2149 {
2150     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2151
2152     if (!m_avPlayerItem || m_videoOutput)
2153         return;
2154
2155 #if USE(VIDEOTOOLBOX)
2156     NSDictionary* attributes = nil;
2157 #else
2158     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2159                                 nil];
2160 #endif
2161     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2162     ASSERT(m_videoOutput);
2163
2164     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2165
2166     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2167
2168     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2169 }
2170
2171 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2172 {
2173     if (!m_videoOutput)
2174         return;
2175
2176     if (m_avPlayerItem)
2177         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2178     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2179
2180     m_videoOutput = 0;
2181 }
2182
2183 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2184 {
2185     if (!m_videoOutput)
2186         createVideoOutput();
2187     ASSERT(m_videoOutput);
2188
2189     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2190
2191     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2192         return 0;
2193
2194     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2195 }
2196
2197 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2198 {
2199     if (!m_avPlayerItem)
2200         return false;
2201
2202     if (m_lastImage)
2203         return true;
2204
2205     if (!m_videoOutput)
2206         createVideoOutput();
2207
2208     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2209 }
2210
2211 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2212 {
2213     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2214
2215     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2216     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2217     // should be displayed.
2218     if (!pixelBuffer)
2219         return;
2220
2221     if (!m_pixelBufferConformer) {
2222 #if USE(VIDEOTOOLBOX)
2223         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2224 #else
2225         NSDictionary *attributes = nil;
2226 #endif
2227         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2228     }
2229
2230 #if !LOG_DISABLED
2231     double start = monotonicallyIncreasingTime();
2232 #endif
2233
2234     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2235
2236 #if !LOG_DISABLED
2237     double duration = monotonicallyIncreasingTime() - start;
2238     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateLastImage(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2239 #endif
2240 }
2241
2242 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2243 {
2244     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2245         waitForVideoOutputMediaDataWillChange();
2246
2247     updateLastImage();
2248
2249     if (!m_lastImage)
2250         return;
2251
2252     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2253     if (!firstEnabledVideoTrack)
2254         return;
2255
2256     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2257
2258     GraphicsContextStateSaver stateSaver(context);
2259     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2260     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2261     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2262
2263     context.concatCTM(videoTransform);
2264     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2265
2266     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2267     // video frame, destroy it now that it is no longer needed.
2268     if (m_imageGenerator)
2269         destroyImageGenerator();
2270
2271 }
2272
2273 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2274 {
2275     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2276
2277     if (!m_avPlayerItem || m_openGLVideoOutput)
2278         return;
2279
2280 #if PLATFORM(IOS)
2281     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2282 #else
2283     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2284 #endif
2285     m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2286     ASSERT(m_openGLVideoOutput);
2287
2288     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2289
2290     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2291 }
2292
2293 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2294 {
2295     if (!m_openGLVideoOutput)
2296         return;
2297
2298     if (m_avPlayerItem)
2299         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2300     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2301
2302     m_openGLVideoOutput = 0;
2303 }
2304
2305 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2306 {
2307     if (!m_openGLVideoOutput)
2308         return;
2309
2310     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2311     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2312         return;
2313
2314     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2315 }
2316
2317 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2318 {
2319     if (flipY || premultiplyAlpha)
2320         return false;
2321
2322     ASSERT(context);
2323
2324     if (!m_openGLVideoOutput)
2325         createOpenGLVideoOutput();
2326
2327     updateLastOpenGLImage();
2328
2329     if (!m_lastOpenGLImage)
2330         return false;
2331
2332     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2333     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2334
2335     if (!m_textureCache) {
2336         m_textureCache = TextureCacheCV::create(*context);
2337         if (!m_textureCache)
2338             return false;
2339     }
2340
2341     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2342
2343     if (!m_videoTextureCopier)
2344         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2345
2346     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2347 }
2348
2349 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2350 {
2351     updateLastImage();
2352     return m_lastImage;
2353 }
2354
2355 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2356 {
2357     if (!m_videoOutputSemaphore)
2358         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2359
2360     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2361
2362     // Wait for 1 second.
2363     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2364
2365     if (result)
2366         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2367 }
2368
2369 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2370 {
2371     dispatch_semaphore_signal(m_videoOutputSemaphore);
2372 }
2373 #endif
2374
2375 #if ENABLE(ENCRYPTED_MEDIA)
2376 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2377 {
2378     if (!keySystemIsSupported(keySystem))
2379         return MediaPlayer::KeySystemNotSupported;
2380
2381     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2382     String keyURI;
2383     String keyID;
2384     RefPtr<Uint8Array> certificate;
2385     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2386         return MediaPlayer::InvalidPlayerState;
2387
2388     if (!m_keyURIToRequestMap.contains(keyURI))
2389         return MediaPlayer::InvalidPlayerState;
2390
2391     String sessionID = createCanonicalUUIDString();
2392
2393     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2394
2395     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2396     NSString* assetStr = keyID;
2397     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2398     NSError* error = 0;
2399     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2400
2401     if (!keyRequest) {
2402         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2403         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2404         return MediaPlayer::NoError;
2405     }
2406
2407     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2408     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2409     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2410
2411     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2412     m_sessionIDToRequestMap.set(sessionID, avRequest);
2413     m_keyURIToRequestMap.remove(keyURI);
2414
2415     return MediaPlayer::NoError;
2416 }
2417
2418 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2419 {
2420     if (!keySystemIsSupported(keySystem))
2421         return MediaPlayer::KeySystemNotSupported;
2422
2423     if (!m_sessionIDToRequestMap.contains(sessionID))
2424         return MediaPlayer::InvalidPlayerState;
2425
2426     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2427     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2428     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2429     [avRequest.get() finishLoading];
2430     m_sessionIDToRequestMap.remove(sessionID);
2431
2432     player()->keyAdded(keySystem, sessionID);
2433
2434     UNUSED_PARAM(initDataPtr);
2435     UNUSED_PARAM(initDataLength);
2436     return MediaPlayer::NoError;
2437 }
2438
2439 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2440 {
2441     if (!keySystemIsSupported(keySystem))
2442         return MediaPlayer::KeySystemNotSupported;
2443
2444     if (!m_sessionIDToRequestMap.contains(sessionID))
2445         return MediaPlayer::InvalidPlayerState;
2446
2447     m_sessionIDToRequestMap.remove(sessionID);
2448     return MediaPlayer::NoError;
2449 }
2450 #endif
2451
2452 #if ENABLE(ENCRYPTED_MEDIA_V2)
2453 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2454 {
2455     return m_keyURIToRequestMap.take(keyURI);
2456 }
2457
2458 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2459 {
2460     Vector<String> fulfilledKeyIds;
2461
2462     for (auto& pair : m_keyURIToRequestMap) {
2463         const String& keyId = pair.key;
2464         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2465
2466         auto keyData = player()->cachedKeyForKeyId(keyId);
2467         if (!keyData)
2468             continue;
2469
2470         fulfillRequestWithKeyData(request.get(), keyData.get());
2471         fulfilledKeyIds.append(keyId);
2472     }
2473
2474     for (auto& keyId : fulfilledKeyIds)
2475         m_keyURIToRequestMap.remove(keyId);
2476 }
2477
2478 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2479 {
2480     if (!keySystemIsSupported(keySystem))
2481         return nullptr;
2482
2483     return std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2484 }
2485 #endif
2486
2487 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2488 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2489 {
2490 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2491     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2492 #endif
2493
2494     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2495     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2496
2497         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2498         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2499             continue;
2500
2501         bool newCCTrack = true;
2502         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2503             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2504                 continue;
2505
2506             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2507             if (track->avPlayerItemTrack() == playerItemTrack) {
2508                 removedTextTracks.remove(i - 1);
2509                 newCCTrack = false;
2510                 break;
2511             }
2512         }
2513
2514         if (!newCCTrack)
2515             continue;
2516         
2517         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2518     }
2519
2520     processNewAndRemovedTextTracks(removedTextTracks);
2521 }
2522 #endif
2523
2524 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2525 {
2526     if (!m_avAsset)
2527         return nil;
2528
2529     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2530         return nil;
2531
2532     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2533 }
2534
2535 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2536 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2537 {
2538     if (!m_avAsset)
2539         return false;
2540
2541     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2542         return false;
2543
2544     return true;
2545 }
2546
2547 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2548 {
2549     if (!hasLoadedMediaSelectionGroups())
2550         return nil;
2551
2552     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2553 }
2554
2555 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2556 {
2557     if (!hasLoadedMediaSelectionGroups())
2558         return nil;
2559
2560     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2561 }
2562
2563 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2564 {
2565     if (!hasLoadedMediaSelectionGroups())
2566         return nil;
2567
2568     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2569 }
2570
2571 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2572 {
2573     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2574     if (!legibleGroup) {
2575         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2576         return;
2577     }
2578
2579     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2580     // but set the selected legible track to nil so text tracks will not be automatically configured.
2581     if (!m_textTracks.size())
2582         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2583
2584     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2585     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2586     for (AVMediaSelectionOptionType *option in legibleOptions) {
2587         bool newTrack = true;
2588         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2589             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2590                 continue;
2591             
2592             RetainPtr<AVMediaSelectionOptionType> currentOption;
2593 #if ENABLE(AVF_CAPTIONS)
2594             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2595                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2596                 currentOption = track->mediaSelectionOption();
2597             } else
2598 #endif
2599             {
2600                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2601                 currentOption = track->mediaSelectionOption();
2602             }
2603             
2604             if ([currentOption.get() isEqual:option]) {
2605                 removedTextTracks.remove(i - 1);
2606                 newTrack = false;
2607                 break;
2608             }
2609         }
2610         if (!newTrack)
2611             continue;
2612
2613 #if ENABLE(AVF_CAPTIONS)
2614         if ([option outOfBandSource]) {
2615             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2616             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2617             continue;
2618         }
2619 #endif
2620
2621         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2622     }
2623
2624     processNewAndRemovedTextTracks(removedTextTracks);
2625 }
2626
2627 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2628 {
2629     if (m_metadataTrack)
2630         return;
2631
2632     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2633     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2634     player()->addTextTrack(m_metadataTrack);
2635 }
2636
2637 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2638 {
2639     ASSERT(time >= MediaTime::zeroTime());
2640
2641     if (!m_currentTextTrack)
2642         return;
2643
2644     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2645 }
2646
2647 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2648 {
2649     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2650
2651     if (!m_currentTextTrack)
2652         return;
2653     
2654     m_currentTextTrack->resetCueValues();
2655 }
2656 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2657
2658 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2659 {
2660     if (m_currentTextTrack == track)
2661         return;
2662
2663     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2664         
2665     m_currentTextTrack = track;
2666
2667     if (track) {
2668         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2669             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2670 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2671 #if ENABLE(AVF_CAPTIONS)
2672         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2673             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2674 #endif
2675         else
2676             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2677 #endif
2678     } else {
2679 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2680         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2681 #endif
2682         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2683     }
2684
2685 }
2686
2687 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2688 {
2689     if (!m_languageOfPrimaryAudioTrack.isNull())
2690         return m_languageOfPrimaryAudioTrack;
2691
2692     if (!m_avPlayerItem.get())
2693         return emptyString();
2694
2695 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2696     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2697     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2698     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2699     if (currentlySelectedAudibleOption) {
2700         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2701         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2702
2703         return m_languageOfPrimaryAudioTrack;
2704     }
2705 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2706
2707     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2708     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2709     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2710     if (!tracks || [tracks count] != 1) {
2711         m_languageOfPrimaryAudioTrack = emptyString();
2712         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2713         return m_languageOfPrimaryAudioTrack;
2714     }
2715
2716     AVAssetTrack *track = [tracks objectAtIndex:0];
2717     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2718
2719 #if !LOG_DISABLED
2720     if (m_languageOfPrimaryAudioTrack == emptyString())
2721         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2722     else
2723         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2724 #endif
2725
2726     return m_languageOfPrimaryAudioTrack;
2727 }
2728
2729 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2730 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2731 {
2732     bool wirelessTarget = false;
2733
2734 #if !PLATFORM(IOS)
2735     if (m_playbackTarget) {
2736         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2737             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2738         else
2739             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2740     }
2741 #else
2742     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2743 #endif
2744
2745     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2746
2747     return wirelessTarget;
2748 }
2749
2750 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2751 {
2752     if (!m_avPlayer)
2753         return MediaPlayer::TargetTypeNone;
2754
2755 #if PLATFORM(IOS)
2756     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2757     case wkExternalPlaybackTypeNone:
2758         return MediaPlayer::TargetTypeNone;
2759     case wkExternalPlaybackTypeAirPlay:
2760         return MediaPlayer::TargetTypeAirPlay;
2761     case wkExternalPlaybackTypeTVOut:
2762         return MediaPlayer::TargetTypeTVOut;
2763     }
2764
2765     ASSERT_NOT_REACHED();
2766     return MediaPlayer::TargetTypeNone;
2767
2768 #else
2769     return MediaPlayer::TargetTypeAirPlay;
2770 #endif
2771 }
2772
2773 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2774 {
2775     if (!m_avPlayer)
2776         return emptyString();
2777
2778     String wirelessTargetName;
2779 #if !PLATFORM(IOS)
2780     if (m_playbackTarget)
2781         wirelessTargetName = m_playbackTarget->deviceName();
2782 #else
2783     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2784 #endif
2785     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2786
2787     return wirelessTargetName;
2788 }
2789
2790 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2791 {
2792     if (!m_avPlayer)
2793         return !m_allowsWirelessVideoPlayback;
2794
2795     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2796     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2797
2798     return !m_allowsWirelessVideoPlayback;
2799 }
2800
2801 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2802 {
2803     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2804     m_allowsWirelessVideoPlayback = !disabled;
2805     if (!m_avPlayer)
2806         return;
2807
2808     setDelayCallbacks(true);
2809     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2810     setDelayCallbacks(false);
2811 }
2812
2813 #if !PLATFORM(IOS)
2814 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2815 {
2816     m_playbackTarget = WTFMove(target);
2817
2818     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2819
2820     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
2821
2822     if (!m_playbackTarget->hasActiveRoute())
2823         setShouldPlayToPlaybackTarget(false);
2824 }
2825
2826 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2827 {
2828     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2829         return;
2830
2831     m_shouldPlayToPlaybackTarget = shouldPlay;
2832
2833     if (!m_playbackTarget)
2834         return;
2835
2836     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2837         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2838
2839         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2840
2841         if (!m_avPlayer)
2842             return;
2843
2844         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2845         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2846             return;
2847
2848         setDelayCallbacks(true);
2849         m_avPlayer.get().outputContext = newContext;
2850         setDelayCallbacks(false);
2851
2852         return;
2853     }
2854
2855     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2856
2857     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
2858
2859     setDelayCallbacks(true);
2860     auto weakThis = createWeakPtr();
2861     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2862         if (!weakThis)
2863             return;
2864         weakThis->playbackTargetIsWirelessDidChange();
2865     }));
2866     setDelayCallbacks(false);
2867 }
2868 #endif // !PLATFORM(IOS)
2869
2870 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2871 {
2872     if (!m_avPlayer)
2873         return;
2874
2875 #if PLATFORM(IOS)
2876     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2877 #endif
2878 }
2879 #endif
2880
2881 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2882 {
2883     m_cachedItemStatus = status;
2884
2885     updateStates();
2886 }
2887
2888 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2889 {
2890     m_pendingStatusChanges++;
2891 }
2892
2893 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2894 {
2895     m_cachedLikelyToKeepUp = likelyToKeepUp;
2896
2897     ASSERT(m_pendingStatusChanges);
2898     if (!--m_pendingStatusChanges)
2899         updateStates();
2900 }
2901
2902 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2903 {
2904     m_pendingStatusChanges++;
2905 }
2906
2907 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2908 {
2909     m_cachedBufferEmpty = bufferEmpty;
2910
2911     ASSERT(m_pendingStatusChanges);
2912     if (!--m_pendingStatusChanges)
2913         updateStates();
2914 }
2915
2916 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2917 {
2918     m_pendingStatusChanges++;
2919 }
2920
2921 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2922 {
2923     m_cachedBufferFull = bufferFull;
2924
2925     ASSERT(m_pendingStatusChanges);
2926     if (!--m_pendingStatusChanges)
2927         updateStates();
2928 }
2929
2930 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2931 {
2932     m_cachedSeekableRanges = seekableRanges;
2933
2934     seekableTimeRangesChanged();
2935     updateStates();
2936 }
2937
2938 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2939 {
2940     m_cachedLoadedRanges = loadedRanges;
2941
2942     loadedTimeRangesChanged();
2943     updateStates();
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2947 {
2948     m_cachedIsReadyForDisplay = isReady;
2949     if (!hasVideo() && isReady)
2950         tracksChanged();
2951     updateStates();
2952 }
2953
2954 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2955 {
2956     tracksChanged();
2957     updateStates();
2958 }
2959
2960 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2961 {
2962     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2963     if (m_shouldBufferData == shouldBuffer)
2964         return;
2965
2966     m_shouldBufferData = shouldBuffer;
2967     
2968     if (!m_avPlayer)
2969         return;
2970
2971     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2972 }
2973
2974 #if ENABLE(DATACUE_VALUE)
2975 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2976 {
2977     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2978     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2979     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2980     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2981     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2982
2983     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2984         return quickTimeUserData;
2985     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2986         return isoUserData;
2987     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2988         return quickTimeMetadata;
2989     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2990         return iTunesMetadata;
2991     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2992         return id3Metadata;
2993
2994     return emptyAtom;
2995 }
2996 #endif
2997
2998 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2999 {
3000     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3001
3002     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3003
3004 #if ENABLE(DATACUE_VALUE)
3005     if (seeking())
3006         return;
3007
3008     if (!m_metadataTrack)
3009         processMetadataTrack();
3010
3011     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3012         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3013         return;
3014     }
3015
3016     // Set the duration of all incomplete cues before adding new ones.
3017     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3018     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3019         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3020         if (start < earliestStartTime)
3021             earliestStartTime = start;
3022     }
3023     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3024
3025     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3026         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3027         MediaTime end = MediaTime::positiveInfiniteTime();
3028         if (CMTIME_IS_VALID(item.duration))
3029             end = start + toMediaTime(item.duration);
3030
3031         AtomicString type = nullAtom;
3032         if (item.keySpace)
3033             type = metadataType(item.keySpace);
3034
3035         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3036     }
3037 #endif
3038 }
3039
3040 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3041 {
3042     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3043         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3044
3045     NSArray *assetTracks = [m_avAsset tracks];
3046
3047     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3048         AVAssetTrack* assetTrack = [obj assetTrack];
3049
3050         if ([assetTracks containsObject:assetTrack])
3051             return YES;
3052
3053         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3054         if (!hasLoadedMediaSelectionGroups())
3055             return NO;
3056
3057         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3058             return NO;
3059
3060         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3061             return NO;
3062
3063         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3064             return NO;
3065
3066         return YES;
3067     }]];
3068
3069     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3070         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3071
3072     m_cachedTotalBytes = 0;
3073
3074     tracksChanged();
3075     updateStates();
3076 }
3077
3078 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3079 {
3080     m_cachedHasEnabledAudio = hasEnabledAudio;
3081
3082     tracksChanged();
3083     updateStates();
3084 }
3085
3086 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3087 {
3088     m_cachedPresentationSize = size;
3089
3090     sizeChanged();
3091     updateStates();
3092 }
3093
3094 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3095 {
3096     m_cachedDuration = duration;
3097
3098     invalidateCachedDuration();
3099 }
3100
3101 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3102 {
3103     m_cachedRate = rate;
3104
3105     updateStates();
3106     rateChanged();
3107 }
3108     
3109 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3110 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3111 {
3112     playbackTargetIsWirelessChanged();
3113 }
3114 #endif
3115
3116 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3117 {
3118     m_cachedCanPlayFastForward = newValue;
3119 }
3120
3121 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3122 {
3123     m_cachedCanPlayFastReverse = newValue;
3124 }
3125
3126 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3127 {
3128     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3129         return MediaPlayerPrivateAVFoundation::resolvedURL();
3130
3131     return URL([m_avAsset resolvedURL]);
3132 }
3133
3134 NSArray* assetMetadataKeyNames()
3135 {
3136     static NSArray* keys;
3137     if (!keys) {
3138         keys = [[NSArray alloc] initWithObjects:@"duration",
3139                     @"naturalSize",
3140                     @"preferredTransform",
3141                     @"preferredVolume",
3142                     @"preferredRate",
3143                     @"playable",
3144                     @"resolvedURL",
3145                     @"tracks",
3146                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3147                    nil];
3148     }
3149     return keys;
3150 }
3151
3152 NSArray* itemKVOProperties()
3153 {
3154     static NSArray* keys;
3155     if (!keys) {
3156         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3157                 @"status",
3158                 @"asset",
3159                 @"tracks",
3160                 @"seekableTimeRanges",
3161                 @"loadedTimeRanges",
3162                 @"playbackLikelyToKeepUp",
3163                 @"playbackBufferFull",
3164                 @"playbackBufferEmpty",
3165                 @"duration",
3166                 @"hasEnabledAudio",
3167                 @"timedMetadata",
3168                 @"canPlayFastForward",
3169                 @"canPlayFastReverse",
3170                 nil];
3171     }
3172     return keys;
3173 }
3174
3175 NSArray* assetTrackMetadataKeyNames()
3176 {
3177     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3178     return keys;
3179 }
3180
3181 NSArray* playerKVOProperties()
3182 {
3183     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3184 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3185                             @"externalPlaybackActive", @"allowsExternalPlayback",
3186 #endif
3187                             nil];
3188     return keys;
3189 }
3190 } // namespace WebCore
3191
3192 @implementation WebCoreAVFMovieObserver
3193
3194 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3195 {
3196     self = [super init];
3197     if (!self)
3198         return nil;
3199     m_callback = callback;
3200     return self;
3201 }
3202
3203 - (void)disconnect
3204 {
3205     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3206     m_callback = 0;
3207 }
3208
3209 - (void)metadataLoaded
3210 {
3211     if (!m_callback)
3212         return;
3213
3214     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3215 }
3216
3217 - (void)didEnd:(NSNotification *)unusedNotification
3218 {
3219     UNUSED_PARAM(unusedNotification);
3220     if (!m_callback)
3221         return;
3222     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3223 }
3224
3225 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3226 {
3227     UNUSED_PARAM(object);
3228     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3229
3230     if (!m_callback)
3231         return;
3232
3233     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3234
3235 #if !LOG_DISABLED
3236     if (willChange)
3237         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3238     else {
3239         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3240         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3241     }
3242 #endif
3243
3244     std::function<void ()> function;
3245
3246     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3247         if ([keyPath isEqualToString:@"readyForDisplay"])
3248             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3249     }
3250
3251     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3252         if ([keyPath isEqualToString:@"enabled"])
3253             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3254     }
3255
3256     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3257         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3258             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3259         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3260             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3261         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3262             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3263     }
3264
3265     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3266         // A value changed for an AVPlayerItem
3267         if ([keyPath isEqualToString:@"status"])
3268             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3269         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3270             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3271         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3272             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3273         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3274             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3275         else if ([keyPath isEqualToString:@"asset"])
3276             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3277         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3278             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3279         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3280             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3281         else if ([keyPath isEqualToString:@"tracks"])
3282             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3283         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3284             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3285         else if ([keyPath isEqualToString:@"presentationSize"])
3286             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3287         else if ([keyPath isEqualToString:@"duration"])
3288             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3289         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3290             MediaTime now;
3291             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3292             if (CMTIME_IS_NUMERIC(itemTime))
3293                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3294             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3295         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3296             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3297         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3298             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3299     }
3300
3301     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3302         // A value changed for an AVPlayer.
3303         if ([keyPath isEqualToString:@"rate"])
3304             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3305 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3306         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3307             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3308 #endif
3309     }
3310     
3311     if (!function)
3312         return;
3313
3314     auto weakThis = m_callback->createWeakPtr();
3315     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3316         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3317         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3318         if (!weakThis)
3319             return;
3320         function();
3321     }));
3322 }
3323
3324 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3325 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3326 {
3327     UNUSED_PARAM(output);
3328     UNUSED_PARAM(nativeSamples);
3329
3330     if (!m_callback)
3331         return;
3332
3333     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3334     RetainPtr<NSArray> strongStrings = strings;
3335     RetainPtr<NSArray> strongSamples = nativeSamples;
3336     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3337         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3338         if (!callback)
3339             return;
3340         MediaTime time = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3341         callback->processCue(strongStrings.get(), strongSamples.get(), time);
3342     });
3343 }
3344
3345 - (void)outputSequenceWasFlushed:(id)output
3346 {
3347     UNUSED_PARAM(output);
3348
3349     if (!m_callback)
3350         return;
3351     
3352     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3353     callOnMainThread([strongSelf] {
3354         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3355             callback->flushCues();
3356     });
3357 }
3358 #endif
3359
3360 @end
3361
3362 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3363 @implementation WebCoreAVFLoaderDelegate
3364
3365 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3366 {
3367     self = [super init];
3368     if (!self)
3369         return nil;
3370     m_callback = callback;
3371     return self;
3372 }
3373
3374 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3375 {
3376     UNUSED_PARAM(resourceLoader);
3377     if (!m_callback)
3378         return NO;
3379
3380     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3381     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3382     callOnMainThread([strongSelf, strongRequest] {
3383         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3384         if (!callback) {
3385             [strongRequest finishLoadingWithError:nil];
3386             return;
3387         }
3388
3389         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3390             [strongRequest finishLoadingWithError:nil];
3391     });
3392
3393     return YES;
3394 }
3395
3396 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3397 {
3398     UNUSED_PARAM(resourceLoader);
3399     if (!m_callback)
3400         return NO;
3401
3402     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3403         return NO;
3404
3405     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3406     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3407     callOnMainThread([strongSelf, strongChallenge] {
3408         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3409         if (!callback) {
3410             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3411             return;
3412         }
3413
3414         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3415             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3416     });
3417
3418     return YES;
3419 }
3420
3421 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3422 {
3423     UNUSED_PARAM(resourceLoader);
3424     if (!m_callback)
3425         return;
3426
3427     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3428     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3429     callOnMainThread([strongSelf, strongRequest] {
3430         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3431         if (callback)
3432             callback->didCancelLoadingRequest(strongRequest.get());
3433     });
3434 }
3435
3436 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3437 {
3438     m_callback = callback;
3439 }
3440 @end
3441 #endif
3442
3443 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3444 @implementation WebCoreAVFPullDelegate
3445 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3446 {
3447     self = [super init];
3448     if (self)
3449         m_callback = callback;
3450     return self;
3451 }
3452
3453 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3454 {
3455     m_callback = callback;
3456 }
3457
3458 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3459 {
3460     if (m_callback)
3461         m_callback->outputMediaDataWillChange(output);
3462 }
3463
3464 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3465 {
3466     UNUSED_PARAM(output);
3467     // No-op.
3468 }
3469 @end
3470 #endif
3471
3472 #endif
<