[Mac] Adopt MediaResourceLoader (instead of CachedResourceLoader) in WebCoreNSURLSession.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "OutOfBandTextTrackPrivateAVF.h"
50 #import "URL.h"
51 #import "Logging.h"
52 #import "MediaPlaybackTargetMac.h"
53 #import "MediaPlaybackTargetMock.h"
54 #import "MediaSelectionGroupAVFObjC.h"
55 #import "MediaTimeAVFoundation.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "Settings.h"
61 #import "TextEncoding.h"
62 #import "TextTrackRepresentation.h"
63 #import "UUID.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import "WebCoreSystemInterface.h"
69 #import <functional>
70 #import <map>
71 #import <objc/runtime.h>
72 #import <runtime/DataView.h>
73 #import <runtime/JSCInlines.h>
74 #import <runtime/TypedArrayInlines.h>
75 #import <runtime/Uint16Array.h>
76 #import <runtime/Uint32Array.h>
77 #import <runtime/Uint8Array.h>
78 #import <wtf/CurrentTime.h>
79 #import <wtf/ListHashSet.h>
80 #import <wtf/NeverDestroyed.h>
81 #import <wtf/OSObjectPtr.h>
82 #import <wtf/text/CString.h>
83 #import <wtf/text/StringBuilder.h>
84
85 #if ENABLE(AVF_CAPTIONS)
86 #include "TextTrack.h"
87 #endif
88
89 #import <AVFoundation/AVFoundation.h>
90
91 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
92 #import "VideoFullscreenLayerManager.h"
93 #endif
94
95 #if PLATFORM(IOS)
96 #import "WAKAppKitStubs.h"
97 #import <CoreImage/CoreImage.h>
98 #import <mach/mach_port.h>
99 #else
100 #import <Foundation/NSGeometry.h>
101 #import <QuartzCore/CoreImage.h>
102 #endif
103
104 #if USE(VIDEOTOOLBOX)
105 #import <CoreVideo/CoreVideo.h>
106 #import <VideoToolbox/VideoToolbox.h>
107 #endif
108
109 #if USE(CFNETWORK)
110 #include "CFNSURLConnectionSPI.h"
111 #endif
112
113 #if PLATFORM(IOS)
114 #include <OpenGLES/ES3/glext.h>
115 #endif
116
117 namespace std {
118 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
119     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
120 };
121 }
122
123 #if ENABLE(AVF_CAPTIONS)
124 // Note: This must be defined before our SOFT_LINK macros:
125 @class AVMediaSelectionOption;
126 @interface AVMediaSelectionOption (OutOfBandExtensions)
127 @property (nonatomic, readonly) NSString* outOfBandSource;
128 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
129 @end
130 #endif
131
132 @interface AVURLAsset (WebKitExtensions)
133 @property (nonatomic, readonly) NSURL *resolvedURL;
134 @end
135
136 typedef AVPlayer AVPlayerType;
137 typedef AVPlayerItem AVPlayerItemType;
138 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
139 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
140 typedef AVMetadataItem AVMetadataItemType;
141 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
142 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
143
144 #pragma mark - Soft Linking
145
146 // Soft-linking headers must be included last since they #define functions, constants, etc.
147 #import "CoreMediaSoftLink.h"
148
149 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
150
151 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
152 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
153
154 #if USE(VIDEOTOOLBOX)
155 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
156 #endif
157
158 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
159 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
160 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
161 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
162 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
163 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
165 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
166
167 #if USE(VIDEOTOOLBOX)
168 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
169 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
170 #endif
171
172 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
173 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
174 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
175 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
176 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
177 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
178 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
179
180 SOFT_LINK_CLASS(CoreImage, CIContext)
181 SOFT_LINK_CLASS(CoreImage, CIImage)
182
183 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
184 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
185 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
198 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
199
200 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
201
202 #define AVPlayer getAVPlayerClass()
203 #define AVPlayerItem getAVPlayerItemClass()
204 #define AVPlayerLayer getAVPlayerLayerClass()
205 #define AVURLAsset getAVURLAssetClass()
206 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
207 #define AVMetadataItem getAVMetadataItemClass()
208
209 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
210 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
211 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
212 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
213 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
214 #define AVMediaTypeVideo getAVMediaTypeVideo()
215 #define AVMediaTypeAudio getAVMediaTypeAudio()
216 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
217 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
218 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
219 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
220 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
221 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
222 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
223 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
224 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
225 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
226
227 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
228 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
229 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
230
231 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
232 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
233 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
234
235 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
236 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
237 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
238 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
239
240 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
241 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
242 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
243 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
244 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
245 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
246 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
247 #endif
248
249 #if ENABLE(AVF_CAPTIONS)
250 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
261
262 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
263 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
264 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
265 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
266 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
267 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
268 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
269 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
270 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
271 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
272 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
273 #endif
274
275 #if ENABLE(DATACUE_VALUE)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
277 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
281
282 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
283 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
284 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
285 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
286 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
287 #endif
288
289 #if PLATFORM(IOS)
290 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
292 #endif
293
294 #if PLATFORM(IOS)
295 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CVEAGLContext eaglContext, CFDictionaryRef textureAttributes, CVOpenGLESTextureCacheRef* cacheOut), (allocator, cacheAttributes, eaglContext, textureAttributes, cacheOut))
296 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLESTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef textureAttributes, GLenum target, GLint internalFormat, GLsizei width, GLsizei height, GLenum format, GLenum type, size_t planeIndex, CVOpenGLESTextureRef* textureOut), (allocator, textureCache, sourceImage, textureAttributes, target, internalFormat, width, height, format, type, planeIndex, textureOut))
297 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheFlush, void, (CVOpenGLESTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
298 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetTarget, GLenum, (CVOpenGLESTextureRef image), (image))
299 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetName, GLuint, (CVOpenGLESTextureRef image), (image))
300 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey, NSString *)
301 #define kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey()
302 #else
303 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CGLContextObj cglContext, CGLPixelFormatObj cglPixelFormat, CFDictionaryRef textureAttributes, CVOpenGLTextureCacheRef* cacheOut), (allocator, cacheAttributes, cglContext, cglPixelFormat, textureAttributes, cacheOut))
304 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef attributes, CVOpenGLTextureRef* textureOut), (allocator, textureCache, sourceImage, attributes, textureOut))
305 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheFlush, void, (CVOpenGLTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
306 SOFT_LINK(CoreVideo, CVOpenGLTextureGetTarget, GLenum, (CVOpenGLTextureRef image), (image))
307 SOFT_LINK(CoreVideo, CVOpenGLTextureGetName, GLuint, (CVOpenGLTextureRef image), (image))
308 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey, NSString *)
309 #define kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey()
310 #endif
311
312 SOFT_LINK_FRAMEWORK(MediaToolbox)
313 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
314
315 using namespace WebCore;
316
317 enum MediaPlayerAVFoundationObservationContext {
318     MediaPlayerAVFoundationObservationContextPlayerItem,
319     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
320     MediaPlayerAVFoundationObservationContextPlayer,
321     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
322 };
323
324 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
325 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
326 #else
327 @interface WebCoreAVFMovieObserver : NSObject
328 #endif
329 {
330     MediaPlayerPrivateAVFoundationObjC* m_callback;
331     int m_delayCallbacks;
332 }
333 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
334 -(void)disconnect;
335 -(void)metadataLoaded;
336 -(void)didEnd:(NSNotification *)notification;
337 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
338 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
339 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
340 - (void)outputSequenceWasFlushed:(id)output;
341 #endif
342 @end
343
344 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
345 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
346     MediaPlayerPrivateAVFoundationObjC* m_callback;
347 }
348 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
349 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
350 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
351 @end
352 #endif
353
354 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
355 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
356     MediaPlayerPrivateAVFoundationObjC *m_callback;
357     dispatch_semaphore_t m_semaphore;
358 }
359 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
360 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
361 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
362 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
363 @end
364 #endif
365
366 namespace WebCore {
367
368 static NSArray *assetMetadataKeyNames();
369 static NSArray *itemKVOProperties();
370 static NSArray *assetTrackMetadataKeyNames();
371 static NSArray *playerKVOProperties();
372 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
373
374 #if !LOG_DISABLED
375 static const char *boolString(bool val)
376 {
377     return val ? "true" : "false";
378 }
379 #endif
380
381 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
382 static dispatch_queue_t globalLoaderDelegateQueue()
383 {
384     static dispatch_queue_t globalQueue;
385     static dispatch_once_t onceToken;
386     dispatch_once(&onceToken, ^{
387         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
388     });
389     return globalQueue;
390 }
391 #endif
392
393 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
394 static dispatch_queue_t globalPullDelegateQueue()
395 {
396     static dispatch_queue_t globalQueue;
397     static dispatch_once_t onceToken;
398     dispatch_once(&onceToken, ^{
399         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
400     });
401     return globalQueue;
402 }
403 #endif
404
405 #if USE(CFNETWORK)
406 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
407 public:
408     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
409     {
410         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
411     }
412
413     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
414     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
415
416 private:
417     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
418         : m_challenge(challenge)
419     {
420         ASSERT(m_challenge);
421     }
422
423     virtual void refAuthenticationClient() override { ref(); }
424     virtual void derefAuthenticationClient() override { deref(); }
425
426     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
427     {
428         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
429     }
430
431     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
432     {
433         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
434     }
435
436     virtual void receivedCancellation(const AuthenticationChallenge&) override
437     {
438         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
439     }
440
441     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
442     {
443         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
444             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
445     }
446
447     virtual void receivedChallengeRejection(const AuthenticationChallenge&) override
448     {
449         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
450             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
451     }
452
453     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
454 };
455 #endif
456
457 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
458 {
459     if (isAvailable())
460         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
461             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
462 }
463
464 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
465     : MediaPlayerPrivateAVFoundation(player)
466     , m_weakPtrFactory(this)
467 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
468     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
469     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
470 #endif
471     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
472     , m_videoFrameHasDrawn(false)
473     , m_haveCheckedPlayability(false)
474 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
475     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
476     , m_videoOutputSemaphore(nullptr)
477 #endif
478 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
479     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
480 #endif
481     , m_currentTextTrack(0)
482     , m_cachedRate(0)
483     , m_cachedTotalBytes(0)
484     , m_pendingStatusChanges(0)
485     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
486     , m_cachedLikelyToKeepUp(false)
487     , m_cachedBufferEmpty(false)
488     , m_cachedBufferFull(false)
489     , m_cachedHasEnabledAudio(false)
490     , m_shouldBufferData(true)
491     , m_cachedIsReadyForDisplay(false)
492     , m_haveBeenAskedToCreateLayer(false)
493 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
494     , m_allowsWirelessVideoPlayback(true)
495 #endif
496 {
497 }
498
499 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
500 {
501 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
502     [m_loaderDelegate.get() setCallback:0];
503     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
504
505     for (auto& pair : m_resourceLoaderMap)
506         pair.value->invalidate();
507 #endif
508 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
509     [m_videoOutputDelegate setCallback:0];
510     [m_videoOutput setDelegate:nil queue:0];
511     if (m_videoOutputSemaphore)
512         dispatch_release(m_videoOutputSemaphore);
513 #endif
514
515     if (m_videoLayer)
516         destroyVideoLayer();
517
518     cancelLoad();
519 }
520
521 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
522 {
523     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
524     tearDownVideoRendering();
525
526     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
527     [m_objcObserver.get() disconnect];
528
529     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
530     setIgnoreLoadStateChanges(true);
531     if (m_avAsset) {
532         [m_avAsset.get() cancelLoading];
533         m_avAsset = nil;
534     }
535
536     clearTextTracks();
537
538 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
539     if (m_legibleOutput) {
540         if (m_avPlayerItem)
541             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
542         m_legibleOutput = nil;
543     }
544 #endif
545
546     if (m_avPlayerItem) {
547         for (NSString *keyName in itemKVOProperties())
548             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
549         
550         m_avPlayerItem = nil;
551     }
552     if (m_avPlayer) {
553         if (m_timeObserver)
554             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
555         m_timeObserver = nil;
556
557         for (NSString *keyName in playerKVOProperties())
558             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
559         m_avPlayer = nil;
560     }
561
562     // Reset cached properties
563     m_pendingStatusChanges = 0;
564     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
565     m_cachedSeekableRanges = nullptr;
566     m_cachedLoadedRanges = nullptr;
567     m_cachedHasEnabledAudio = false;
568     m_cachedPresentationSize = FloatSize();
569     m_cachedDuration = MediaTime::zeroTime();
570
571     for (AVPlayerItemTrack *track in m_cachedTracks.get())
572         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
573     m_cachedTracks = nullptr;
574
575 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
576     if (m_provider) {
577         m_provider->setPlayerItem(nullptr);
578         m_provider->setAudioTrack(nullptr);
579     }
580 #endif
581
582     setIgnoreLoadStateChanges(false);
583 }
584
585 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
586 {
587     return m_haveBeenAskedToCreateLayer;
588 }
589
590 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
591 {
592 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
593     if (m_videoOutput)
594         return true;
595 #endif
596     return m_imageGenerator;
597 }
598
599 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
600 {
601 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
602     createVideoOutput();
603 #else
604     createImageGenerator();
605 #endif
606 }
607
608 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
609 {
610     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
611
612     if (!m_avAsset || m_imageGenerator)
613         return;
614
615     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
616
617     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
618     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
619     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
620     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
621
622     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
623 }
624
625 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
626 {
627 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
628     destroyVideoOutput();
629     destroyOpenGLVideoOutput();
630 #endif
631     destroyImageGenerator();
632 }
633
634 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
635 {
636     if (!m_imageGenerator)
637         return;
638
639     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
640
641     m_imageGenerator = 0;
642 }
643
644 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
645 {
646     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
647         return;
648
649     auto weakThis = createWeakPtr();
650     callOnMainThread([this, weakThis] {
651         if (!weakThis)
652             return;
653
654         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
655             return;
656         m_haveBeenAskedToCreateLayer = true;
657
658         if (!m_videoLayer)
659             createAVPlayerLayer();
660
661 #if USE(VIDEOTOOLBOX)
662         if (!m_videoOutput)
663             createVideoOutput();
664 #endif
665
666         player()->client().mediaPlayerRenderingModeChanged(player());
667     });
668 }
669
670 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
671 {
672     if (!m_avPlayer)
673         return;
674
675     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
676     [m_videoLayer setPlayer:m_avPlayer.get()];
677     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
678 #ifndef NDEBUG
679     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
680 #endif
681     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
682     updateVideoLayerGravity();
683     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
684     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
685     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
686
687 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
688     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
689
690 #if PLATFORM(IOS)
691     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
692         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
693 #endif
694 #else
695     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
696 #endif
697 }
698
699 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
700 {
701     if (!m_videoLayer)
702         return;
703
704     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
705
706     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
707     [m_videoLayer.get() setPlayer:nil];
708
709 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
710     m_videoFullscreenLayerManager->didDestroyVideoLayer();
711 #endif
712
713     m_videoLayer = nil;
714 }
715
716 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
717 {
718     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
719     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
720
721     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
722     if (!date)
723         return MediaTime::invalidTime();
724
725     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
726
727     // Rounding due to second offset error when subtracting.
728     return MediaTime::createWithDouble(round(date - currentTime));
729 }
730
731 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
732 {
733     if (currentRenderingMode() == MediaRenderingToLayer)
734         return m_cachedIsReadyForDisplay;
735
736     return m_videoFrameHasDrawn;
737 }
738
739 #if ENABLE(AVF_CAPTIONS)
740 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
741 {
742     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
743     if (manualSelectionMode)
744         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
745
746     // FIXME: Match these to correct types:
747     if (kind == PlatformTextTrack::Caption)
748         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
749
750     if (kind == PlatformTextTrack::Subtitle)
751         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
752
753     if (kind == PlatformTextTrack::Description)
754         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
755
756     if (kind == PlatformTextTrack::Forced)
757         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
758
759     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
760 }
761     
762 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
763 {
764     trackModeChanged();
765 }
766     
767 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
768 {
769     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
770     
771     for (auto& textTrack : m_textTracks) {
772         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
773             continue;
774         
775         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
776         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
777         
778         for (auto& track : outOfBandTrackSources) {
779             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
780             
781             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
782                 continue;
783             
784             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
785             if (track->mode() == PlatformTextTrack::Hidden)
786                 mode = InbandTextTrackPrivate::Hidden;
787             else if (track->mode() == PlatformTextTrack::Disabled)
788                 mode = InbandTextTrackPrivate::Disabled;
789             else if (track->mode() == PlatformTextTrack::Showing)
790                 mode = InbandTextTrackPrivate::Showing;
791             
792             textTrack->setMode(mode);
793             break;
794         }
795     }
796 }
797 #endif
798
799
800 static NSURL *canonicalURL(const String& url)
801 {
802     NSURL *cocoaURL = URL(ParsedURLString, url);
803     if (url.isEmpty())
804         return cocoaURL;
805
806     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
807     if (!request)
808         return cocoaURL;
809
810     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
811     if (!canonicalRequest)
812         return cocoaURL;
813
814     return [canonicalRequest URL];
815 }
816
817 #if PLATFORM(IOS)
818 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
819 {
820     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
821     [properties setDictionary:@{
822         NSHTTPCookieName: cookie.name,
823         NSHTTPCookieValue: cookie.value,
824         NSHTTPCookieDomain: cookie.domain,
825         NSHTTPCookiePath: cookie.path,
826         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
827     }];
828     if (cookie.secure)
829         [properties setObject:@YES forKey:NSHTTPCookieSecure];
830     if (cookie.session)
831         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
832
833     return [NSHTTPCookie cookieWithProperties:properties.get()];
834 }
835 #endif
836
837 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
838 {
839     if (m_avAsset)
840         return;
841
842     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
843
844     setDelayCallbacks(true);
845
846     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
847
848     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
849
850     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
851
852     String referrer = player()->referrer();
853     if (!referrer.isEmpty())
854         [headerFields.get() setObject:referrer forKey:@"Referer"];
855
856     String userAgent = player()->userAgent();
857     if (!userAgent.isEmpty())
858         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
859
860     if ([headerFields.get() count])
861         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
862
863     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
864         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
865
866 #if PLATFORM(IOS)
867     // FIXME: rdar://problem/20354688
868     String identifier = player()->sourceApplicationIdentifier();
869     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
870         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
871 #endif
872
873 #if ENABLE(AVF_CAPTIONS)
874     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
875     if (!outOfBandTrackSources.isEmpty()) {
876         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
877         for (auto& trackSource : outOfBandTrackSources) {
878             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
879             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
880             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
881             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
882             [outOfBandTracks.get() addObject:@{
883                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
884                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
885                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
886                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
887                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
888                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
889             }];
890         }
891
892         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
893     }
894 #endif
895
896 #if PLATFORM(IOS)
897     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
898     if (!networkInterfaceName.isEmpty())
899         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
900 #endif
901
902 #if PLATFORM(IOS)
903     Vector<Cookie> cookies;
904     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
905         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
906         for (auto& cookie : cookies)
907             [nsCookies addObject:toNSHTTPCookie(cookie)];
908
909         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
910     }
911 #endif
912
913     NSURL *cocoaURL = canonicalURL(url);
914     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
915
916 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
917     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
918     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
919
920 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 101100
921     if (Settings::isAVFoundationNSURLSessionEnabled()
922         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
923         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
924         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
925         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
926         if (mediaResourceLoader)
927             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
928     }
929 #endif
930
931 #endif
932
933     m_haveCheckedPlayability = false;
934
935     setDelayCallbacks(false);
936 }
937
938 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
939 {
940     if (!m_avPlayer)
941         return;
942
943     if (pthread_main_np()) {
944         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
945         return;
946     }
947
948     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
949     RetainPtr<AVPlayerItemType> strongItem = item;
950     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
951         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
952     });
953 }
954
955 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
956 {
957     if (m_avPlayer)
958         return;
959
960     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
961
962     setDelayCallbacks(true);
963
964     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
965     for (NSString *keyName in playerKVOProperties())
966         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
967
968 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
969     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
970 #endif
971
972 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
973     updateDisableExternalPlayback();
974     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
975 #endif
976
977 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
978     if (m_shouldPlayToPlaybackTarget)
979         setShouldPlayToPlaybackTarget(true);
980 #endif
981
982     if (player()->client().mediaPlayerIsVideo())
983         createAVPlayerLayer();
984
985     if (m_avPlayerItem)
986         setAVPlayerItem(m_avPlayerItem.get());
987
988     setDelayCallbacks(false);
989 }
990
991 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
992 {
993     if (m_avPlayerItem)
994         return;
995
996     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
997
998     setDelayCallbacks(true);
999
1000     // Create the player item so we can load media data. 
1001     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
1002
1003     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1004
1005     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1006     for (NSString *keyName in itemKVOProperties())
1007         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1008
1009     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1010
1011     if (m_avPlayer)
1012         setAVPlayerItem(m_avPlayerItem.get());
1013
1014 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1015     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1016
1017     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1018     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1019     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1020
1021     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1022     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1023     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1024     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1025 #endif
1026
1027 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1028     if (m_provider) {
1029         m_provider->setPlayerItem(m_avPlayerItem.get());
1030         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1031     }
1032 #endif
1033
1034     setDelayCallbacks(false);
1035 }
1036
1037 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1038 {
1039     if (m_haveCheckedPlayability)
1040         return;
1041     m_haveCheckedPlayability = true;
1042
1043     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1044     auto weakThis = createWeakPtr();
1045
1046     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1047         callOnMainThread([weakThis] {
1048             if (weakThis)
1049                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1050         });
1051     }];
1052 }
1053
1054 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1055 {
1056     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1057
1058     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1059     dispatch_group_enter(metadataLoadingGroup.get());
1060     auto weakThis = createWeakPtr();
1061     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1062
1063         callOnMainThread([weakThis, metadataLoadingGroup] {
1064             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1065                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1066                     dispatch_group_enter(metadataLoadingGroup.get());
1067                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1068                         dispatch_group_leave(metadataLoadingGroup.get());
1069                     }];
1070                 }
1071             }
1072             dispatch_group_leave(metadataLoadingGroup.get());
1073         });
1074     }];
1075
1076     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1077         callOnMainThread([weakThis] {
1078             if (weakThis)
1079                 [weakThis->m_objcObserver.get() metadataLoaded];
1080         });
1081     });
1082 }
1083
1084 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1085 {
1086     if (!m_avPlayerItem)
1087         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1088
1089     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1090         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1091     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1092         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1093     if (m_cachedLikelyToKeepUp)
1094         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1095     if (m_cachedBufferFull)
1096         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1097     if (m_cachedBufferEmpty)
1098         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1099
1100     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1101 }
1102
1103 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1104 {
1105     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1106     PlatformMedia pm;
1107     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1108     pm.media.avfMediaPlayer = m_avPlayer.get();
1109     return pm;
1110 }
1111
1112 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1113 {
1114 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1115     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1116 #else
1117     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1118 #endif
1119 }
1120
1121 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1122 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1123 {
1124     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer)
1125         return;
1126
1127     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer);
1128
1129     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1130         syncTextTrackBounds();
1131         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1132     }
1133
1134     updateDisableExternalPlayback();
1135 }
1136
1137 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1138 {
1139     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1140     syncTextTrackBounds();
1141 }
1142
1143 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1144 {
1145     m_videoFullscreenGravity = gravity;
1146     if (!m_videoLayer)
1147         return;
1148
1149     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1150     if (gravity == MediaPlayer::VideoGravityResize)
1151         videoGravity = AVLayerVideoGravityResize;
1152     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1153         videoGravity = AVLayerVideoGravityResizeAspect;
1154     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1155         videoGravity = AVLayerVideoGravityResizeAspectFill;
1156     else
1157         ASSERT_NOT_REACHED();
1158     
1159     if ([m_videoLayer videoGravity] == videoGravity)
1160         return;
1161
1162     [m_videoLayer setVideoGravity:videoGravity];
1163     syncTextTrackBounds();
1164 }
1165
1166 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1167 {
1168 #if PLATFORM(IOS)
1169     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1170         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1171     updateDisableExternalPlayback();
1172 #else
1173     UNUSED_PARAM(mode);
1174 #endif
1175 }
1176
1177 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1178
1179 #if PLATFORM(IOS)
1180 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1181 {
1182     if (m_currentMetaData)
1183         return m_currentMetaData.get();
1184     return nil;
1185 }
1186
1187 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1188 {
1189     if (!m_avPlayerItem)
1190         return emptyString();
1191     
1192     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1193     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1194
1195     return logString.get();
1196 }
1197
1198 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1199 {
1200     if (!m_avPlayerItem)
1201         return emptyString();
1202
1203     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1204     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1205
1206     return logString.get();
1207 }
1208 #endif
1209
1210 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1211 {
1212     [CATransaction begin];
1213     [CATransaction setDisableActions:YES];    
1214     if (m_videoLayer)
1215         [m_videoLayer.get() setHidden:!isVisible];
1216     [CATransaction commit];
1217 }
1218     
1219 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1220 {
1221     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1222     if (!metaDataAvailable())
1223         return;
1224
1225     setDelayCallbacks(true);
1226     m_cachedRate = requestedRate();
1227     [m_avPlayer.get() setRate:requestedRate()];
1228     setDelayCallbacks(false);
1229 }
1230
1231 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1232 {
1233     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1234     if (!metaDataAvailable())
1235         return;
1236
1237     setDelayCallbacks(true);
1238     m_cachedRate = 0;
1239     [m_avPlayer.get() setRate:0];
1240     setDelayCallbacks(false);
1241 }
1242
1243 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1244 {
1245     // Do not ask the asset for duration before it has been loaded or it will fetch the
1246     // answer synchronously.
1247     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1248         return MediaTime::invalidTime();
1249     
1250     CMTime cmDuration;
1251     
1252     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1253     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1254         cmDuration = [m_avPlayerItem.get() duration];
1255     else
1256         cmDuration = [m_avAsset.get() duration];
1257
1258     if (CMTIME_IS_NUMERIC(cmDuration))
1259         return toMediaTime(cmDuration);
1260
1261     if (CMTIME_IS_INDEFINITE(cmDuration))
1262         return MediaTime::positiveInfiniteTime();
1263
1264     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1265     return MediaTime::invalidTime();
1266 }
1267
1268 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1269 {
1270     if (!metaDataAvailable() || !m_avPlayerItem)
1271         return MediaTime::zeroTime();
1272
1273     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1274     if (CMTIME_IS_NUMERIC(itemTime))
1275         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1276
1277     return MediaTime::zeroTime();
1278 }
1279
1280 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1281 {
1282     // setCurrentTime generates several event callbacks, update afterwards.
1283     setDelayCallbacks(true);
1284
1285     if (m_metadataTrack)
1286         m_metadataTrack->flushPartialCues();
1287
1288     CMTime cmTime = toCMTime(time);
1289     CMTime cmBefore = toCMTime(negativeTolerance);
1290     CMTime cmAfter = toCMTime(positiveTolerance);
1291
1292     auto weakThis = createWeakPtr();
1293
1294     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1295
1296     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1297         callOnMainThread([weakThis, finished] {
1298             auto _this = weakThis.get();
1299             if (!_this)
1300                 return;
1301
1302             _this->seekCompleted(finished);
1303         });
1304     }];
1305
1306     setDelayCallbacks(false);
1307 }
1308
1309 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1310 {
1311 #if PLATFORM(IOS)
1312     UNUSED_PARAM(volume);
1313     return;
1314 #else
1315     if (!metaDataAvailable())
1316         return;
1317
1318     [m_avPlayer.get() setVolume:volume];
1319 #endif
1320 }
1321
1322 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1323 {
1324     UNUSED_PARAM(closedCaptionsVisible);
1325
1326     if (!metaDataAvailable())
1327         return;
1328
1329     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1330 }
1331
1332 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1333 {
1334     setDelayCallbacks(true);
1335     m_cachedRate = rate;
1336     [m_avPlayer.get() setRate:rate];
1337     setDelayCallbacks(false);
1338 }
1339
1340 double MediaPlayerPrivateAVFoundationObjC::rate() const
1341 {
1342     if (!metaDataAvailable())
1343         return 0;
1344
1345     return m_cachedRate;
1346 }
1347
1348 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1349 {
1350     if (m_avPlayerItem)
1351         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1352 }
1353
1354 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1355 {
1356     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1357
1358     if (!m_avPlayerItem)
1359         return timeRanges;
1360
1361     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1362         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1363         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1364             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1365     }
1366     return timeRanges;
1367 }
1368
1369 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1370 {
1371     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1372         return MediaTime::zeroTime();
1373
1374     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1375     bool hasValidRange = false;
1376     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1377         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1378         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1379             continue;
1380
1381         hasValidRange = true;
1382         MediaTime startOfRange = toMediaTime(timeRange.start);
1383         if (minTimeSeekable > startOfRange)
1384             minTimeSeekable = startOfRange;
1385     }
1386     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1387 }
1388
1389 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1390 {
1391     if (!m_cachedSeekableRanges)
1392         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1393
1394     MediaTime maxTimeSeekable;
1395     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1396         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1397         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1398             continue;
1399         
1400         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1401         if (maxTimeSeekable < endOfRange)
1402             maxTimeSeekable = endOfRange;
1403     }
1404     return maxTimeSeekable;
1405 }
1406
1407 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1408 {
1409     if (!m_cachedLoadedRanges)
1410         return MediaTime::zeroTime();
1411
1412     MediaTime maxTimeLoaded;
1413     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1414         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1415         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1416             continue;
1417         
1418         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1419         if (maxTimeLoaded < endOfRange)
1420             maxTimeLoaded = endOfRange;
1421     }
1422
1423     return maxTimeLoaded;   
1424 }
1425
1426 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1427 {
1428     if (!metaDataAvailable())
1429         return 0;
1430
1431     if (m_cachedTotalBytes)
1432         return m_cachedTotalBytes;
1433
1434     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1435         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1436
1437     return m_cachedTotalBytes;
1438 }
1439
1440 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1441 {
1442     m_avAsset = asset;
1443 }
1444
1445 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1446 {
1447     if (!m_avAsset)
1448         return MediaPlayerAVAssetStatusDoesNotExist;
1449
1450     for (NSString *keyName in assetMetadataKeyNames()) {
1451         NSError *error = nil;
1452         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1453 #if !LOG_DISABLED
1454         if (error)
1455             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1456 #endif
1457
1458         if (keyStatus < AVKeyValueStatusLoaded)
1459             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1460         
1461         if (keyStatus == AVKeyValueStatusFailed)
1462             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1463
1464         if (keyStatus == AVKeyValueStatusCancelled)
1465             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1466     }
1467
1468     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1469         return MediaPlayerAVAssetStatusPlayable;
1470
1471     return MediaPlayerAVAssetStatusLoaded;
1472 }
1473
1474 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1475 {
1476     if (!m_avAsset)
1477         return 0;
1478
1479     NSError *error = nil;
1480     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1481     return [error code];
1482 }
1483
1484 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1485 {
1486     if (!metaDataAvailable() || context.paintingDisabled())
1487         return;
1488
1489     setDelayCallbacks(true);
1490     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1491
1492 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1493     if (videoOutputHasAvailableFrame())
1494         paintWithVideoOutput(context, rect);
1495     else
1496 #endif
1497         paintWithImageGenerator(context, rect);
1498
1499     END_BLOCK_OBJC_EXCEPTIONS;
1500     setDelayCallbacks(false);
1501
1502     m_videoFrameHasDrawn = true;
1503 }
1504
1505 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1506 {
1507     if (!metaDataAvailable() || context.paintingDisabled())
1508         return;
1509
1510     // We can ignore the request if we are already rendering to a layer.
1511     if (currentRenderingMode() == MediaRenderingToLayer)
1512         return;
1513
1514     // paint() is best effort, so only paint if we already have an image generator or video output available.
1515     if (!hasContextRenderer())
1516         return;
1517
1518     paintCurrentFrameInContext(context, rect);
1519 }
1520
1521 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1522 {
1523     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1524
1525     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1526     if (image) {
1527         GraphicsContextStateSaver stateSaver(context);
1528         context.translate(rect.x(), rect.y() + rect.height());
1529         context.scale(FloatSize(1.0f, -1.0f));
1530         context.setImageInterpolationQuality(InterpolationLow);
1531         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1532         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1533     }
1534 }
1535
1536 static const HashSet<String, ASCIICaseInsensitiveHash>& avfMIMETypes()
1537 {
1538     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []() {
1539         HashSet<String, ASCIICaseInsensitiveHash> types;
1540         for (NSString *type in [AVURLAsset audiovisualMIMETypes])
1541             types.add(type);
1542         return types;
1543     }();
1544     
1545     return cache;
1546 }
1547
1548 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1549 {
1550     if (!m_imageGenerator)
1551         createImageGenerator();
1552     ASSERT(m_imageGenerator);
1553
1554 #if !LOG_DISABLED
1555     double start = monotonicallyIncreasingTime();
1556 #endif
1557
1558     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1559     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1560     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1561
1562 #if !LOG_DISABLED
1563     double duration = monotonicallyIncreasingTime() - start;
1564     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1565 #endif
1566
1567     return image;
1568 }
1569
1570 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1571 {
1572     supportedTypes = avfMIMETypes();
1573
1574
1575 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1576 static bool keySystemIsSupported(const String& keySystem)
1577 {
1578     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1579         return true;
1580     return false;
1581 }
1582 #endif
1583
1584 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1585 {
1586 #if ENABLE(ENCRYPTED_MEDIA)
1587     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1588     // In addition to the steps in the current specification, this method must run the following steps:
1589
1590     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1591     //    If keySystem is null, continue to the next step.
1592     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1593         // "Clear Key" is only supported with HLS:
1594         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1595             return MediaPlayer::IsNotSupported;
1596
1597         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1598         if (!keySystemIsSupported(parameters.keySystem))
1599             return MediaPlayer::IsNotSupported;
1600
1601         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1602         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1603     }
1604
1605     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1606 #endif
1607
1608 #if ENABLE(MEDIA_SOURCE)
1609     if (parameters.isMediaSource)
1610         return MediaPlayer::IsNotSupported;
1611 #endif
1612 #if ENABLE(MEDIA_STREAM)
1613     if (parameters.isMediaStream)
1614         return MediaPlayer::IsNotSupported;
1615 #endif
1616     if (isUnsupportedMIMEType(parameters.type))
1617         return MediaPlayer::IsNotSupported;
1618
1619     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1620         return MediaPlayer::IsNotSupported;
1621
1622     // The spec says:
1623     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1624     if (parameters.codecs.isEmpty())
1625         return MediaPlayer::MayBeSupported;
1626
1627     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1628     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1629 }
1630
1631 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1632 {
1633 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1634     if (!keySystem.isEmpty()) {
1635         // "Clear Key" is only supported with HLS:
1636         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1637             return MediaPlayer::IsNotSupported;
1638
1639         if (!keySystemIsSupported(keySystem))
1640             return false;
1641
1642         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1643             return false;
1644
1645         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1646             return false;
1647
1648         return true;
1649     }
1650 #else
1651     UNUSED_PARAM(keySystem);
1652     UNUSED_PARAM(mimeType);
1653 #endif
1654     return false;
1655 }
1656
1657 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1658 #if ENABLE(ENCRYPTED_MEDIA_V2)
1659 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1660 {
1661     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1662         [infoRequest setContentLength:keyData->byteLength()];
1663         [infoRequest setByteRangeAccessSupported:YES];
1664     }
1665
1666     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1667         long long start = [dataRequest currentOffset];
1668         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1669
1670         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1671             [request finishLoadingWithError:nil];
1672             return;
1673         }
1674
1675         ASSERT(start <= std::numeric_limits<int>::max());
1676         ASSERT(end <= std::numeric_limits<int>::max());
1677         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1678         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1679         [dataRequest respondWithData:nsData.get()];
1680     }
1681
1682     [request finishLoading];
1683 }
1684 #endif
1685
1686 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1687 {
1688     String scheme = [[[avRequest request] URL] scheme];
1689     String keyURI = [[[avRequest request] URL] absoluteString];
1690
1691 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1692     if (scheme == "skd") {
1693         // Create an initData with the following layout:
1694         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1695         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1696         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1697         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1698         initDataView->set<uint32_t>(0, keyURISize, true);
1699
1700         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1701         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1702
1703 #if ENABLE(ENCRYPTED_MEDIA)
1704         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1705 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1706         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1707         if (!player()->keyNeeded(initData.get()))
1708 #endif
1709             return false;
1710
1711         m_keyURIToRequestMap.set(keyURI, avRequest);
1712         return true;
1713 #if ENABLE(ENCRYPTED_MEDIA_V2)
1714     } else if (scheme == "clearkey") {
1715         String keyID = [[[avRequest request] URL] resourceSpecifier];
1716         StringView keyIDView(keyID);
1717         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1718
1719         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1720         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1721
1722         auto keyData = player()->cachedKeyForKeyId(keyID);
1723         if (keyData) {
1724             fulfillRequestWithKeyData(avRequest, keyData.get());
1725             return false;
1726         }
1727
1728         if (!player()->keyNeeded(initData.get()))
1729             return false;
1730
1731         m_keyURIToRequestMap.set(keyID, avRequest);
1732         return true;
1733 #endif
1734     }
1735 #endif
1736
1737     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1738     m_resourceLoaderMap.add(avRequest, resourceLoader);
1739     resourceLoader->startLoading();
1740     return true;
1741 }
1742
1743 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1744 {
1745 #if USE(CFNETWORK)
1746     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1747     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1748     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1749 #else
1750     AuthenticationChallenge challenge(nsChallenge);
1751 #endif
1752
1753     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1754 }
1755
1756 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1757 {
1758     String scheme = [[[avRequest request] URL] scheme];
1759
1760     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1761
1762     if (resourceLoader)
1763         resourceLoader->stopLoading();
1764 }
1765
1766 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1767 {
1768     m_resourceLoaderMap.remove(avRequest);
1769 }
1770 #endif
1771
1772 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1773 {
1774     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1775 }
1776
1777 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1778 {
1779     if (!metaDataAvailable())
1780         return timeValue;
1781
1782     // FIXME - impossible to implement until rdar://8721510 is fixed.
1783     return timeValue;
1784 }
1785
1786 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1787 {
1788 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1789     return 0;
1790 #else
1791     return 5;
1792 #endif
1793 }
1794
1795 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1796 {
1797     if (!m_videoLayer)
1798         return;
1799
1800 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1801     // Do not attempt to change the video gravity while in full screen mode.
1802     // See setVideoFullscreenGravity().
1803     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1804         return;
1805 #endif
1806
1807     [CATransaction begin];
1808     [CATransaction setDisableActions:YES];    
1809     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1810     [m_videoLayer.get() setVideoGravity:gravity];
1811     [CATransaction commit];
1812 }
1813
1814 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1815 {
1816     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1817         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1818     }];
1819     if (index == NSNotFound)
1820         return nil;
1821     return [tracks objectAtIndex:index];
1822 }
1823
1824 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1825 {
1826     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1827     m_languageOfPrimaryAudioTrack = String();
1828
1829     if (!m_avAsset)
1830         return;
1831
1832     setDelayCharacteristicsChangedNotification(true);
1833
1834     bool haveCCTrack = false;
1835     bool hasCaptions = false;
1836
1837     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1838     // asked about those fairly fequently.
1839     if (!m_avPlayerItem) {
1840         // We don't have a player item yet, so check with the asset because some assets support inspection
1841         // prior to becoming ready to play.
1842         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1843         setHasVideo(firstEnabledVideoTrack);
1844         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1845 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1846         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1847 #endif
1848
1849         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1850     } else {
1851         bool hasVideo = false;
1852         bool hasAudio = false;
1853         bool hasMetaData = false;
1854         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1855             if ([track isEnabled]) {
1856                 AVAssetTrack *assetTrack = [track assetTrack];
1857                 NSString *mediaType = [assetTrack mediaType];
1858                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1859                     hasVideo = true;
1860                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1861                     hasAudio = true;
1862                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1863 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1864                     hasCaptions = true;
1865 #endif
1866                     haveCCTrack = true;
1867                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1868                     hasMetaData = true;
1869                 }
1870             }
1871         }
1872
1873 #if ENABLE(VIDEO_TRACK)
1874         updateAudioTracks();
1875         updateVideoTracks();
1876
1877 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1878         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1879         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1880 #endif
1881 #endif
1882
1883         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1884         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1885         // when it is not.
1886         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1887
1888         setHasAudio(hasAudio);
1889 #if ENABLE(DATACUE_VALUE)
1890         if (hasMetaData)
1891             processMetadataTrack();
1892 #endif
1893     }
1894
1895 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1896     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1897     if (legibleGroup && m_cachedTracks) {
1898         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1899         if (hasCaptions)
1900             processMediaSelectionOptions();
1901     }
1902 #endif
1903
1904 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1905     if (!hasCaptions && haveCCTrack)
1906         processLegacyClosedCaptionsTracks();
1907 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1908     if (haveCCTrack)
1909         processLegacyClosedCaptionsTracks();
1910 #endif
1911
1912     setHasClosedCaptions(hasCaptions);
1913
1914     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1915         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1916
1917     sizeChanged();
1918
1919     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1920         characteristicsChanged();
1921
1922 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1923     if (m_provider)
1924         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1925 #endif
1926
1927     setDelayCharacteristicsChangedNotification(false);
1928 }
1929
1930 #if ENABLE(VIDEO_TRACK)
1931 template <typename RefT, typename PassRefT>
1932 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1933 {
1934     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1935         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1936     }]]]);
1937     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1938
1939     for (auto& oldItem : oldItems) {
1940         if (oldItem->playerItemTrack())
1941             [oldTracks addObject:oldItem->playerItemTrack()];
1942     }
1943
1944     // Find the added & removed AVPlayerItemTracks:
1945     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1946     [removedTracks minusSet:newTracks.get()];
1947
1948     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1949     [addedTracks minusSet:oldTracks.get()];
1950
1951     typedef Vector<RefT> ItemVector;
1952     ItemVector replacementItems;
1953     ItemVector addedItems;
1954     ItemVector removedItems;
1955     for (auto& oldItem : oldItems) {
1956         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1957             removedItems.append(oldItem);
1958         else
1959             replacementItems.append(oldItem);
1960     }
1961
1962     for (AVPlayerItemTrack* track in addedTracks.get())
1963         addedItems.append(itemFactory(track));
1964
1965     replacementItems.appendVector(addedItems);
1966     oldItems.swap(replacementItems);
1967
1968     for (auto& removedItem : removedItems)
1969         (player->*removedFunction)(removedItem);
1970
1971     for (auto& addedItem : addedItems)
1972         (player->*addedFunction)(addedItem);
1973 }
1974
1975 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1976 template <typename RefT, typename PassRefT>
1977 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1978 {
1979     group->updateOptions(characteristics);
1980
1981     // Only add selection options which do not have an associated persistant track.
1982     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1983     for (auto& option : group->options()) {
1984         if (!option)
1985             continue;
1986         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1987         if (!avOption)
1988             continue;
1989         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1990             newSelectionOptions.add(option);
1991     }
1992
1993     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1994     for (auto& oldItem : oldItems) {
1995         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1996             oldSelectionOptions.add(option);
1997     }
1998
1999     // Find the added & removed AVMediaSelectionOptions:
2000     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2001     for (auto& oldOption : oldSelectionOptions) {
2002         if (!newSelectionOptions.contains(oldOption))
2003             removedSelectionOptions.add(oldOption);
2004     }
2005
2006     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2007     for (auto& newOption : newSelectionOptions) {
2008         if (!oldSelectionOptions.contains(newOption))
2009             addedSelectionOptions.add(newOption);
2010     }
2011
2012     typedef Vector<RefT> ItemVector;
2013     ItemVector replacementItems;
2014     ItemVector addedItems;
2015     ItemVector removedItems;
2016     for (auto& oldItem : oldItems) {
2017         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2018             removedItems.append(oldItem);
2019         else
2020             replacementItems.append(oldItem);
2021     }
2022
2023     for (auto& option : addedSelectionOptions)
2024         addedItems.append(itemFactory(*option.get()));
2025
2026     replacementItems.appendVector(addedItems);
2027     oldItems.swap(replacementItems);
2028     
2029     for (auto& removedItem : removedItems)
2030         (player->*removedFunction)(removedItem);
2031     
2032     for (auto& addedItem : addedItems)
2033         (player->*addedFunction)(addedItem);
2034 }
2035 #endif
2036
2037 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2038 {
2039 #if !LOG_DISABLED
2040     size_t count = m_audioTracks.size();
2041 #endif
2042
2043     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2044
2045 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2046     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2047     if (!m_audibleGroup) {
2048         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2049             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2050     }
2051
2052     if (m_audibleGroup)
2053         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2054 #endif
2055
2056     for (auto& track : m_audioTracks)
2057         track->resetPropertiesFromTrack();
2058
2059 #if !LOG_DISABLED
2060     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2061 #endif
2062 }
2063
2064 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2065 {
2066 #if !LOG_DISABLED
2067     size_t count = m_videoTracks.size();
2068 #endif
2069
2070     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2071
2072 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2073     if (!m_visualGroup) {
2074         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2075             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2076     }
2077
2078     if (m_visualGroup)
2079         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2080 #endif
2081
2082     for (auto& track : m_audioTracks)
2083         track->resetPropertiesFromTrack();
2084
2085 #if !LOG_DISABLED
2086     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2087 #endif
2088 }
2089
2090 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2091 {
2092 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2093     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2094         return true;
2095 #endif
2096     return false;
2097 }
2098
2099 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2100 {
2101 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2102     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2103         return;
2104
2105     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2106     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2107     [m_textTrackRepresentationLayer setFrame:textFrame];
2108 #endif
2109 }
2110
2111 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2112 {
2113 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2114     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2115     if (representationLayer == m_textTrackRepresentationLayer) {
2116         syncTextTrackBounds();
2117         return;
2118     }
2119
2120     if (m_textTrackRepresentationLayer)
2121         [m_textTrackRepresentationLayer removeFromSuperlayer];
2122
2123     m_textTrackRepresentationLayer = representationLayer;
2124
2125     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2126         syncTextTrackBounds();
2127         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2128     }
2129
2130 #else
2131     UNUSED_PARAM(representation);
2132 #endif
2133 }
2134 #endif // ENABLE(VIDEO_TRACK)
2135
2136 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2137 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2138 {
2139     if (!m_provider) {
2140         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2141         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2142     }
2143
2144     return m_provider.get();
2145 }
2146 #endif
2147
2148 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2149 {
2150     if (!m_avAsset)
2151         return;
2152
2153     setNaturalSize(m_cachedPresentationSize);
2154 }
2155     
2156 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2157 {
2158     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2159         return false;
2160     
2161     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2162     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2163     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2164 }
2165
2166 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2167 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2168 {
2169     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2170
2171     if (!m_avPlayerItem || m_videoOutput)
2172         return;
2173
2174 #if USE(VIDEOTOOLBOX)
2175     NSDictionary* attributes = nil;
2176 #else
2177     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2178                                 nil];
2179 #endif
2180     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2181     ASSERT(m_videoOutput);
2182
2183     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2184
2185     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2186
2187     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2188 }
2189
2190 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2191 {
2192     if (!m_videoOutput)
2193         return;
2194
2195     if (m_avPlayerItem)
2196         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2197     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2198
2199     m_videoOutput = 0;
2200 }
2201
2202 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2203 {
2204     if (!m_videoOutput)
2205         createVideoOutput();
2206     ASSERT(m_videoOutput);
2207
2208 #if !LOG_DISABLED
2209     double start = monotonicallyIncreasingTime();
2210 #endif
2211
2212     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2213
2214     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2215         return 0;
2216
2217     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2218     if (!buffer)
2219         return 0;
2220
2221 #if USE(VIDEOTOOLBOX)
2222     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2223     if (!m_pixelTransferSession) {
2224         VTPixelTransferSessionRef session = 0;
2225         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2226         m_pixelTransferSession = adoptCF(session);
2227     }
2228
2229     CVPixelBufferRef outputBuffer;
2230     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2231     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2232     buffer = adoptCF(outputBuffer);
2233 #endif
2234
2235 #if !LOG_DISABLED
2236     double duration = monotonicallyIncreasingTime() - start;
2237     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2238 #endif
2239
2240     return buffer;
2241 }
2242
2243 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2244 {
2245     if (!m_avPlayerItem)
2246         return false;
2247
2248     if (m_lastImage)
2249         return true;
2250
2251     if (!m_videoOutput)
2252         createVideoOutput();
2253
2254     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2255 }
2256
2257 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2258 {
2259     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2260     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2261     return CVPixelBufferGetBaseAddress(pixelBuffer);
2262 }
2263
2264 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2265 {
2266     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2267     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2268 }
2269
2270 static void CVPixelBufferReleaseInfoCallback(void* info)
2271 {
2272     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2273     CFRelease(pixelBuffer);
2274 }
2275
2276 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2277 {
2278     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2279     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2280
2281     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2282     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2283     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2284     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2285     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2286
2287     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2288     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2289     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2290
2291     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2292 }
2293
2294 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2295 {
2296     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2297
2298     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2299     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2300     // should be displayed.
2301     if (pixelBuffer)
2302         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2303 }
2304
2305 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2306 {
2307     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2308         waitForVideoOutputMediaDataWillChange();
2309
2310     updateLastImage();
2311
2312     if (!m_lastImage)
2313         return;
2314
2315     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2316     if (!firstEnabledVideoTrack)
2317         return;
2318
2319     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2320
2321     GraphicsContextStateSaver stateSaver(context);
2322     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2323     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2324     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2325
2326     context.concatCTM(videoTransform);
2327     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2328
2329     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2330     // video frame, destroy it now that it is no longer needed.
2331     if (m_imageGenerator)
2332         destroyImageGenerator();
2333
2334 }
2335
2336 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2337 {
2338     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2339
2340     if (!m_avPlayerItem || m_openGLVideoOutput)
2341         return;
2342
2343 #if PLATFORM(IOS)
2344     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2345 #else
2346     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2347 #endif
2348     m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2349     ASSERT(m_openGLVideoOutput);
2350
2351     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2352
2353     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2354 }
2355
2356 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2357 {
2358     if (!m_openGLVideoOutput)
2359         return;
2360
2361     if (m_avPlayerItem)
2362         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2363     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2364
2365     m_openGLVideoOutput = 0;
2366 }
2367
2368 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2369 {
2370     if (!m_openGLVideoOutput)
2371         return;
2372
2373     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2374     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2375         return;
2376
2377     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2378 }
2379
2380 #if !LOG_DISABLED
2381
2382 #define STRINGIFY_PAIR(e) e, #e
2383 static std::map<uint32_t, const char*>& enumToStringMap()
2384 {
2385     static NeverDestroyed<std::map<uint32_t, const char*>> map;
2386     if (map.get().empty()) {
2387         std::map<uint32_t, const char*> stringMap;
2388         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2389         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2390         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2391         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2392         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2393         map.get().emplace(STRINGIFY_PAIR(GL_R8));
2394         map.get().emplace(STRINGIFY_PAIR(GL_R16F));
2395         map.get().emplace(STRINGIFY_PAIR(GL_R32F));
2396         map.get().emplace(STRINGIFY_PAIR(GL_R8UI));
2397         map.get().emplace(STRINGIFY_PAIR(GL_R8I));
2398         map.get().emplace(STRINGIFY_PAIR(GL_R16UI));
2399         map.get().emplace(STRINGIFY_PAIR(GL_R16I));
2400         map.get().emplace(STRINGIFY_PAIR(GL_R32UI));
2401         map.get().emplace(STRINGIFY_PAIR(GL_R32I));
2402         map.get().emplace(STRINGIFY_PAIR(GL_RG8));
2403         map.get().emplace(STRINGIFY_PAIR(GL_RG16F));
2404         map.get().emplace(STRINGIFY_PAIR(GL_RG32F));
2405         map.get().emplace(STRINGIFY_PAIR(GL_RG8UI));
2406         map.get().emplace(STRINGIFY_PAIR(GL_RG8I));
2407         map.get().emplace(STRINGIFY_PAIR(GL_RG16UI));
2408         map.get().emplace(STRINGIFY_PAIR(GL_RG16I));
2409         map.get().emplace(STRINGIFY_PAIR(GL_RG32UI));
2410         map.get().emplace(STRINGIFY_PAIR(GL_RG32I));
2411         map.get().emplace(STRINGIFY_PAIR(GL_RGB8));
2412         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8));
2413         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8));
2414         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8_ALPHA8));
2415         map.get().emplace(STRINGIFY_PAIR(GL_RGBA4));
2416         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2));
2417         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT16));
2418         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT24));
2419         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT32F));
2420         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH24_STENCIL8));
2421         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH32F_STENCIL8));
2422         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2423         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2424         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2425         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2426         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2427         map.get().emplace(STRINGIFY_PAIR(GL_RED));
2428         map.get().emplace(STRINGIFY_PAIR(GL_RG_INTEGER));
2429         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_STENCIL));
2430         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_BYTE));
2431         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_6_5));
2432         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_4_4_4_4));
2433         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_5_5_1));
2434         map.get().emplace(STRINGIFY_PAIR(GL_BYTE));
2435         map.get().emplace(STRINGIFY_PAIR(GL_HALF_FLOAT));
2436         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT));
2437         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT));
2438         map.get().emplace(STRINGIFY_PAIR(GL_SHORT));
2439         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT));
2440         map.get().emplace(STRINGIFY_PAIR(GL_INT));
2441         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_2_10_10_10_REV));
2442         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_24_8));
2443         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT_32_UNSIGNED_INT_24_8_REV));
2444
2445 #if PLATFORM(IOS)
2446         map.get().emplace(STRINGIFY_PAIR(GL_RED_INTEGER));
2447         map.get().emplace(STRINGIFY_PAIR(GL_RGB_INTEGER));
2448         map.get().emplace(STRINGIFY_PAIR(GL_RG8_SNORM));
2449         map.get().emplace(STRINGIFY_PAIR(GL_RGB565));
2450         map.get().emplace(STRINGIFY_PAIR(GL_RGB8_SNORM));
2451         map.get().emplace(STRINGIFY_PAIR(GL_R11F_G11F_B10F));
2452         map.get().emplace(STRINGIFY_PAIR(GL_RGB9_E5));
2453         map.get().emplace(STRINGIFY_PAIR(GL_RGB16F));
2454         map.get().emplace(STRINGIFY_PAIR(GL_RGB32F));
2455         map.get().emplace(STRINGIFY_PAIR(GL_RGB8UI));
2456         map.get().emplace(STRINGIFY_PAIR(GL_RGB8I));
2457         map.get().emplace(STRINGIFY_PAIR(GL_RGB16UI));
2458         map.get().emplace(STRINGIFY_PAIR(GL_RGB16I));
2459         map.get().emplace(STRINGIFY_PAIR(GL_RGB32UI));
2460         map.get().emplace(STRINGIFY_PAIR(GL_RGB32I));
2461         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8_SNORM));
2462         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16F));
2463         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32F));
2464         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8UI));
2465         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8I));
2466         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2UI));
2467         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16UI));
2468         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16I));
2469         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32I));
2470         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32UI));
2471         map.get().emplace(STRINGIFY_PAIR(GL_RGB5_A1));
2472         map.get().emplace(STRINGIFY_PAIR(GL_RG));
2473         map.get().emplace(STRINGIFY_PAIR(GL_RGBA_INTEGER));
2474         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT));
2475         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_10F_11F_11F_REV));
2476         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_5_9_9_9_REV));
2477 #endif
2478     }
2479     return map.get();
2480 }
2481
2482 #endif // !LOG_DISABLED
2483
2484 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2485 {
2486     if (flipY || premultiplyAlpha)
2487         return false;
2488
2489     ASSERT(context);
2490
2491     if (!m_openGLVideoOutput)
2492         createOpenGLVideoOutput();
2493
2494     updateLastOpenGLImage();
2495
2496     if (!m_lastOpenGLImage)
2497         return false;
2498
2499     if (!m_openGLTextureCache) {
2500 #if PLATFORM(IOS)
2501         CVOpenGLESTextureCacheRef cache = nullptr;
2502         CVReturn error = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), nullptr, &cache);
2503 #else
2504         CVOpenGLTextureCacheRef cache = nullptr;
2505         CVReturn error = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), CGLGetPixelFormat(context->platformGraphicsContext3D()), nullptr, &cache);
2506 #endif
2507         if (error != kCVReturnSuccess)
2508             return false;
2509         m_openGLTextureCache = adoptCF(cache);
2510     }
2511
2512     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2513     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2514
2515 #if PLATFORM(IOS)
2516     CVOpenGLESTextureRef bareVideoTexture = nullptr;
2517     if (kCVReturnSuccess != CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, outputTarget, internalFormat, width, height, format, type, level, &bareVideoTexture))
2518         return false;
2519     RetainPtr<CVOpenGLESTextureRef> videoTexture = adoptCF(bareVideoTexture);
2520     Platform3DObject videoTextureName = CVOpenGLESTextureGetName(videoTexture.get());
2521     GC3Denum videoTextureTarget = CVOpenGLESTextureGetTarget(videoTexture.get());
2522 #else
2523     CVOpenGLTextureRef bareVideoTexture = nullptr;
2524     if (kCVReturnSuccess != CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, &bareVideoTexture))
2525         return false;
2526     RetainPtr<CVOpenGLTextureRef> videoTexture = adoptCF(bareVideoTexture);
2527     Platform3DObject videoTextureName = CVOpenGLTextureGetName(videoTexture.get());
2528     GC3Denum videoTextureTarget = CVOpenGLTextureGetTarget(videoTexture.get());
2529 #endif
2530
2531     auto weakThis = createWeakPtr();
2532     dispatch_async(dispatch_get_main_queue(), [weakThis] {
2533         if (!weakThis)
2534             return;
2535
2536         if (auto cache = weakThis->m_openGLTextureCache.get())
2537 #if PLATFORM(IOS)
2538             CVOpenGLESTextureCacheFlush(cache, 0);
2539 #else
2540             CVOpenGLTextureCacheFlush(cache, 0);
2541 #endif
2542     });
2543
2544     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(%p) - internalFormat: %s, format: %s, type: %s", this, enumToStringMap()[internalFormat], enumToStringMap()[format], enumToStringMap()[type]);
2545
2546     // Save the origial bound texture & framebuffer names so we can re-bind them after copying the video texture.
2547     GC3Dint boundTexture = 0;
2548     GC3Dint boundReadFramebuffer = 0;
2549     context->getIntegerv(GraphicsContext3D::TEXTURE_BINDING_2D, &boundTexture);
2550     context->getIntegerv(GraphicsContext3D::READ_FRAMEBUFFER_BINDING, &boundReadFramebuffer);
2551
2552     context->bindTexture(videoTextureTarget, videoTextureName);
2553     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2554     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2555     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2556
2557     // Create a framebuffer object to represent the video texture's memory.
2558     Platform3DObject readFramebuffer = context->createFramebuffer();
2559
2560     // Make that framebuffer the read source from which drawing commands will read voxels.
2561     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, readFramebuffer);
2562
2563     // Allocate uninitialized memory for the output texture.
2564     context->bindTexture(outputTarget, outputTexture);
2565     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2566     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2567     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2568     context->texImage2DDirect(outputTarget, level, internalFormat, width, height, 0, format, type, nullptr);
2569
2570     // Attach the video texture to the framebuffer.
2571     context->framebufferTexture2D(GraphicsContext3D::READ_FRAMEBUFFER, GraphicsContext3D::COLOR_ATTACHMENT0, videoTextureTarget, videoTextureName, level);
2572
2573     GC3Denum status = context->checkFramebufferStatus(GraphicsContext3D::READ_FRAMEBUFFER);
2574     if (status != GraphicsContext3D::FRAMEBUFFER_COMPLETE)
2575         return false;
2576
2577     // Copy texture from the read framebuffer (and thus the video texture) to the output texture.
2578     context->copyTexImage2D(outputTarget, level, internalFormat, 0, 0, width, height, 0);
2579
2580     // Restore the previous texture and framebuffer bindings.
2581     context->bindTexture(outputTarget, boundTexture);
2582     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, boundReadFramebuffer);
2583
2584     // Clean up after ourselves.
2585     context->deleteFramebuffer(readFramebuffer);
2586
2587     return !context->getError();
2588 }
2589
2590 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2591 {
2592     updateLastImage();
2593     return m_lastImage.get();
2594 }
2595
2596 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2597 {
2598     if (!m_videoOutputSemaphore)
2599         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2600
2601     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2602
2603     // Wait for 1 second.
2604     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2605
2606     if (result)
2607         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2608 }
2609
2610 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2611 {
2612     dispatch_semaphore_signal(m_videoOutputSemaphore);
2613 }
2614 #endif
2615
2616 #if ENABLE(ENCRYPTED_MEDIA)
2617 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2618 {
2619     if (!keySystemIsSupported(keySystem))
2620         return MediaPlayer::KeySystemNotSupported;
2621
2622     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2623     String keyURI;
2624     String keyID;
2625     RefPtr<Uint8Array> certificate;
2626     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2627         return MediaPlayer::InvalidPlayerState;
2628
2629     if (!m_keyURIToRequestMap.contains(keyURI))
2630         return MediaPlayer::InvalidPlayerState;
2631
2632     String sessionID = createCanonicalUUIDString();
2633
2634     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2635
2636     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2637     NSString* assetStr = keyID;
2638     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2639     NSError* error = 0;
2640     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2641
2642     if (!keyRequest) {
2643         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2644         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2645         return MediaPlayer::NoError;
2646     }
2647
2648     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2649     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2650     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2651
2652     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2653     m_sessionIDToRequestMap.set(sessionID, avRequest);
2654     m_keyURIToRequestMap.remove(keyURI);
2655
2656     return MediaPlayer::NoError;
2657 }
2658
2659 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2660 {
2661     if (!keySystemIsSupported(keySystem))
2662         return MediaPlayer::KeySystemNotSupported;
2663
2664     if (!m_sessionIDToRequestMap.contains(sessionID))
2665         return MediaPlayer::InvalidPlayerState;
2666
2667     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2668     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2669     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2670     [avRequest.get() finishLoading];
2671     m_sessionIDToRequestMap.remove(sessionID);
2672
2673     player()->keyAdded(keySystem, sessionID);
2674
2675     UNUSED_PARAM(initDataPtr);
2676     UNUSED_PARAM(initDataLength);
2677     return MediaPlayer::NoError;
2678 }
2679
2680 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2681 {
2682     if (!keySystemIsSupported(keySystem))
2683         return MediaPlayer::KeySystemNotSupported;
2684
2685     if (!m_sessionIDToRequestMap.contains(sessionID))
2686         return MediaPlayer::InvalidPlayerState;
2687
2688     m_sessionIDToRequestMap.remove(sessionID);
2689     return MediaPlayer::NoError;
2690 }
2691 #endif
2692
2693 #if ENABLE(ENCRYPTED_MEDIA_V2)
2694 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2695 {
2696     return m_keyURIToRequestMap.take(keyURI);
2697 }
2698
2699 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2700 {
2701     Vector<String> fulfilledKeyIds;
2702
2703     for (auto& pair : m_keyURIToRequestMap) {
2704         const String& keyId = pair.key;
2705         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2706
2707         auto keyData = player()->cachedKeyForKeyId(keyId);
2708         if (!keyData)
2709             continue;
2710
2711         fulfillRequestWithKeyData(request.get(), keyData.get());
2712         fulfilledKeyIds.append(keyId);
2713     }
2714
2715     for (auto& keyId : fulfilledKeyIds)
2716         m_keyURIToRequestMap.remove(keyId);
2717 }
2718
2719 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2720 {
2721     if (!keySystemIsSupported(keySystem))
2722         return nullptr;
2723
2724     return std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2725 }
2726 #endif
2727
2728 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2729 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2730 {
2731 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2732     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2733 #endif
2734
2735     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2736     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2737
2738         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2739         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2740             continue;
2741
2742         bool newCCTrack = true;
2743         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2744             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2745                 continue;
2746
2747             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2748             if (track->avPlayerItemTrack() == playerItemTrack) {
2749                 removedTextTracks.remove(i - 1);
2750                 newCCTrack = false;
2751                 break;
2752             }
2753         }
2754
2755         if (!newCCTrack)
2756             continue;
2757         
2758         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2759     }
2760
2761     processNewAndRemovedTextTracks(removedTextTracks);
2762 }
2763 #endif
2764
2765 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2766 {
2767     if (!m_avAsset)
2768         return nil;
2769
2770     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2771         return nil;
2772
2773     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2774 }
2775
2776 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2777 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2778 {
2779     if (!m_avAsset)
2780         return false;
2781
2782     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2783         return false;
2784
2785     return true;
2786 }
2787
2788 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2789 {
2790     if (!hasLoadedMediaSelectionGroups())
2791         return nil;
2792
2793     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2794 }
2795
2796 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2797 {
2798     if (!hasLoadedMediaSelectionGroups())
2799         return nil;
2800
2801     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2802 }
2803
2804 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2805 {
2806     if (!hasLoadedMediaSelectionGroups())
2807         return nil;
2808
2809     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2810 }
2811
2812 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2813 {
2814     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2815     if (!legibleGroup) {
2816         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2817         return;
2818     }
2819
2820     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2821     // but set the selected legible track to nil so text tracks will not be automatically configured.
2822     if (!m_textTracks.size())
2823         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2824
2825     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2826     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2827     for (AVMediaSelectionOptionType *option in legibleOptions) {
2828         bool newTrack = true;
2829         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2830             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2831                 continue;
2832             
2833             RetainPtr<AVMediaSelectionOptionType> currentOption;
2834 #if ENABLE(AVF_CAPTIONS)
2835             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2836                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2837                 currentOption = track->mediaSelectionOption();
2838             } else
2839 #endif
2840             {
2841                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2842                 currentOption = track->mediaSelectionOption();
2843             }
2844             
2845             if ([currentOption.get() isEqual:option]) {
2846                 removedTextTracks.remove(i - 1);
2847                 newTrack = false;
2848                 break;
2849             }
2850         }
2851         if (!newTrack)
2852             continue;
2853
2854 #if ENABLE(AVF_CAPTIONS)
2855         if ([option outOfBandSource]) {
2856             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2857             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2858             continue;
2859         }
2860 #endif
2861
2862         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2863     }
2864
2865     processNewAndRemovedTextTracks(removedTextTracks);
2866 }
2867
2868 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2869 {
2870     if (m_metadataTrack)
2871         return;
2872
2873     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2874     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2875     player()->addTextTrack(m_metadataTrack);
2876 }
2877
2878 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2879 {
2880     ASSERT(time >= MediaTime::zeroTime());
2881
2882     if (!m_currentTextTrack)
2883         return;
2884
2885     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2886 }
2887
2888 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2889 {
2890     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2891
2892     if (!m_currentTextTrack)
2893         return;
2894     
2895     m_currentTextTrack->resetCueValues();
2896 }
2897 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2898
2899 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2900 {
2901     if (m_currentTextTrack == track)
2902         return;
2903
2904     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2905         
2906     m_currentTextTrack = track;
2907
2908     if (track) {
2909         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2910             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2911 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2912 #if ENABLE(AVF_CAPTIONS)
2913         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2914             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2915 #endif
2916         else
2917             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2918 #endif
2919     } else {
2920 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2921         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2922 #endif
2923         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2924     }
2925
2926 }
2927
2928 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2929 {
2930     if (!m_languageOfPrimaryAudioTrack.isNull())
2931         return m_languageOfPrimaryAudioTrack;
2932
2933     if (!m_avPlayerItem.get())
2934         return emptyString();
2935
2936 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2937     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2938     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2939     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2940     if (currentlySelectedAudibleOption) {
2941         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2942         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2943
2944         return m_languageOfPrimaryAudioTrack;
2945     }
2946 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2947
2948     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2949     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2950     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2951     if (!tracks || [tracks count] != 1) {
2952         m_languageOfPrimaryAudioTrack = emptyString();
2953         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2954         return m_languageOfPrimaryAudioTrack;
2955     }
2956
2957     AVAssetTrack *track = [tracks objectAtIndex:0];
2958     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2959
2960 #if !LOG_DISABLED
2961     if (m_languageOfPrimaryAudioTrack == emptyString())
2962         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2963     else
2964         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2965 #endif
2966
2967     return m_languageOfPrimaryAudioTrack;
2968 }
2969
2970 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2971 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2972 {
2973     bool wirelessTarget = false;
2974
2975 #if !PLATFORM(IOS)
2976     if (m_playbackTarget) {
2977         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2978             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2979         else
2980             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2981     }
2982 #else
2983     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2984 #endif
2985
2986     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2987
2988     return wirelessTarget;
2989 }
2990
2991 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2992 {
2993     if (!m_avPlayer)
2994         return MediaPlayer::TargetTypeNone;
2995
2996 #if PLATFORM(IOS)
2997     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2998     case wkExternalPlaybackTypeNone:
2999         return MediaPlayer::TargetTypeNone;
3000     case wkExternalPlaybackTypeAirPlay:
3001         return MediaPlayer::TargetTypeAirPlay;
3002     case wkExternalPlaybackTypeTVOut:
3003         return MediaPlayer::TargetTypeTVOut;
3004     }
3005
3006     ASSERT_NOT_REACHED();
3007     return MediaPlayer::TargetTypeNone;
3008
3009 #else
3010     return MediaPlayer::TargetTypeAirPlay;
3011 #endif
3012 }
3013
3014 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
3015 {
3016     if (!m_avPlayer)
3017         return emptyString();
3018
3019     String wirelessTargetName;
3020 #if !PLATFORM(IOS)
3021     if (m_playbackTarget)
3022         wirelessTargetName = m_playbackTarget->deviceName();
3023 #else
3024     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
3025 #endif
3026     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
3027
3028     return wirelessTargetName;
3029 }
3030
3031 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
3032 {
3033     if (!m_avPlayer)
3034         return !m_allowsWirelessVideoPlayback;
3035
3036     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
3037     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
3038
3039     return !m_allowsWirelessVideoPlayback;
3040 }
3041
3042 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
3043 {
3044     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
3045     m_allowsWirelessVideoPlayback = !disabled;
3046     if (!m_avPlayer)
3047         return;
3048
3049     setDelayCallbacks(true);
3050     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
3051     setDelayCallbacks(false);
3052 }
3053
3054 #if !PLATFORM(IOS)
3055 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
3056 {
3057     m_playbackTarget = WTFMove(target);
3058
3059     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
3060
3061     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
3062
3063     if (!m_playbackTarget->hasActiveRoute())
3064         setShouldPlayToPlaybackTarget(false);
3065 }
3066
3067 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
3068 {
3069     if (m_shouldPlayToPlaybackTarget == shouldPlay)
3070         return;
3071
3072     m_shouldPlayToPlaybackTarget = shouldPlay;
3073
3074     if (!m_playbackTarget)
3075         return;
3076
3077     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
3078         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
3079
3080         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
3081
3082         if (!m_avPlayer)
3083             return;
3084
3085         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
3086         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
3087             return;
3088
3089         setDelayCallbacks(true);
3090         m_avPlayer.get().outputContext = newContext;
3091         setDelayCallbacks(false);
3092
3093         return;
3094     }
3095
3096     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
3097
3098     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
3099
3100     setDelayCallbacks(true);
3101     auto weakThis = createWeakPtr();
3102     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
3103         if (!weakThis)
3104             return;
3105         weakThis->playbackTargetIsWirelessDidChange();
3106     }));
3107     setDelayCallbacks(false);
3108 }
3109 #endif // !PLATFORM(IOS)
3110
3111 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
3112 {
3113     if (!m_avPlayer)
3114         return;
3115
3116 #if PLATFORM(IOS)
3117     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
3118 #endif
3119 }
3120 #endif
3121
3122 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3123 {
3124     m_cachedItemStatus = status;
3125
3126     updateStates();
3127 }
3128
3129 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3130 {
3131     m_pendingStatusChanges++;
3132 }
3133
3134 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3135 {
3136     m_cachedLikelyToKeepUp = likelyToKeepUp;
3137
3138     ASSERT(m_pendingStatusChanges);
3139     if (!--m_pendingStatusChanges)
3140         updateStates();
3141 }
3142
3143 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3144 {
3145     m_pendingStatusChanges++;
3146 }
3147
3148 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3149 {
3150     m_cachedBufferEmpty = bufferEmpty;
3151
3152     ASSERT(m_pendingStatusChanges);
3153     if (!--m_pendingStatusChanges)
3154         updateStates();
3155 }
3156
3157 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3158 {
3159     m_pendingStatusChanges++;
3160 }
3161
3162 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3163 {
3164     m_cachedBufferFull = bufferFull;
3165
3166     ASSERT(m_pendingStatusChanges);
3167     if (!--m_pendingStatusChanges)
3168         updateStates();
3169 }
3170
3171 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3172 {
3173     m_cachedSeekableRanges = seekableRanges;
3174
3175     seekableTimeRangesChanged();
3176     updateStates();
3177 }
3178
3179 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3180 {
3181     m_cachedLoadedRanges = loadedRanges;
3182
3183     loadedTimeRangesChanged();
3184     updateStates();
3185 }
3186
3187 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3188 {
3189     m_cachedIsReadyForDisplay = isReady;
3190     if (!hasVideo() && isReady)
3191         tracksChanged();
3192     updateStates();
3193 }
3194
3195 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3196 {
3197     tracksChanged();
3198     updateStates();
3199 }
3200
3201 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3202 {
3203     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
3204     if (m_shouldBufferData == shouldBuffer)
3205         return;
3206
3207     m_shouldBufferData = shouldBuffer;
3208     
3209     if (!m_avPlayer)
3210         return;
3211
3212     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3213 }
3214
3215 #if ENABLE(DATACUE_VALUE)
3216 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3217 {
3218     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3219     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3220     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3221     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3222     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3223
3224     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3225         return quickTimeUserData;
3226     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3227         return isoUserData;
3228     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3229         return quickTimeMetadata;
3230     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3231         return iTunesMetadata;
3232     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3233         return id3Metadata;
3234
3235     return emptyAtom;
3236 }
3237 #endif
3238
3239 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3240 {
3241     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3242
3243     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3244
3245 #if ENABLE(DATACUE_VALUE)
3246     if (seeking())
3247         return;
3248
3249     if (!m_metadataTrack)
3250         processMetadataTrack();
3251
3252     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3253         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3254         return;
3255     }
3256
3257     // Set the duration of all incomplete cues before adding new ones.
3258     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3259     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3260         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3261         if (start < earliestStartTime)
3262             earliestStartTime = start;
3263     }
3264     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3265
3266     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3267         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3268         MediaTime end = MediaTime::positiveInfiniteTime();
3269         if (CMTIME_IS_VALID(item.duration))
3270             end = start + toMediaTime(item.duration);
3271
3272         AtomicString type = nullAtom;
3273         if (item.keySpace)
3274             type = metadataType(item.keySpace);
3275
3276         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3277     }
3278 #endif
3279 }
3280
3281 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3282 {
3283     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3284         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3285
3286     NSArray *assetTracks = [m_avAsset tracks];
3287
3288     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3289         AVAssetTrack* assetTrack = [obj assetTrack];
3290
3291         if ([assetTracks containsObject:assetTrack])
3292             return YES;
3293
3294         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3295         if (!hasLoadedMediaSelectionGroups())
3296             return NO;
3297
3298         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3299             return NO;
3300
3301         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3302             return NO;
3303
3304         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3305             return NO;
3306
3307         return YES;
3308     }]];
3309
3310     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3311         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3312
3313     m_cachedTotalBytes = 0;
3314
3315     tracksChanged();
3316     updateStates();
3317 }
3318
3319 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3320 {
3321     m_cachedHasEnabledAudio = hasEnabledAudio;
3322
3323     tracksChanged();
3324     updateStates();
3325 }
3326
3327 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3328 {
3329     m_cachedPresentationSize = size;
3330
3331     sizeChanged();
3332     updateStates();
3333 }
3334
3335 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3336 {
3337     m_cachedDuration = duration;
3338
3339     invalidateCachedDuration();
3340 }
3341
3342 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3343 {
3344     m_cachedRate = rate;
3345
3346     updateStates();
3347     rateChanged();
3348 }
3349     
3350 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3351 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3352 {
3353     playbackTargetIsWirelessChanged();
3354 }
3355 #endif
3356
3357 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3358 {
3359     m_cachedCanPlayFastForward = newValue;
3360 }
3361
3362 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3363 {
3364     m_cachedCanPlayFastReverse = newValue;
3365 }
3366
3367 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3368 {
3369     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3370         return MediaPlayerPrivateAVFoundation::resolvedURL();
3371
3372     return URL([m_avAsset resolvedURL]);
3373 }
3374
3375 NSArray* assetMetadataKeyNames()
3376 {
3377     static NSArray* keys;
3378     if (!keys) {
3379         keys = [[NSArray alloc] initWithObjects:@"duration",
3380                     @"naturalSize",
3381                     @"preferredTransform",
3382                     @"preferredVolume",
3383                     @"preferredRate",
3384                     @"playable",
3385                     @"resolvedURL",
3386                     @"tracks",
3387                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3388                    nil];
3389     }
3390     return keys;
3391 }
3392
3393 NSArray* itemKVOProperties()
3394 {
3395     static NSArray* keys;
3396     if (!keys) {
3397         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3398                 @"status",
3399                 @"asset",
3400                 @"tracks",
3401                 @"seekableTimeRanges",
3402                 @"loadedTimeRanges",
3403                 @"playbackLikelyToKeepUp",
3404                 @"playbackBufferFull",
3405                 @"playbackBufferEmpty",
3406                 @"duration",
3407                 @"hasEnabledAudio",
3408                 @"timedMetadata",
3409                 @"canPlayFastForward",
3410                 @"canPlayFastReverse",
3411                 nil];
3412     }
3413     return keys;
3414 }
3415
3416 NSArray* assetTrackMetadataKeyNames()
3417 {
3418     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3419     return keys;
3420 }
3421
3422 NSArray* playerKVOProperties()
3423 {
3424     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3425 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3426                             @"externalPlaybackActive", @"allowsExternalPlayback",
3427 #endif
3428                             nil];
3429     return keys;
3430 }
3431 } // namespace WebCore
3432
3433 @implementation WebCoreAVFMovieObserver
3434
3435 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3436 {
3437     self = [super init];
3438     if (!self)
3439         return nil;
3440     m_callback = callback;
3441     return self;
3442 }
3443
3444 - (void)disconnect
3445 {
3446     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3447     m_callback = 0;
3448 }
3449
3450 - (void)metadataLoaded
3451 {
3452     if (!m_callback)
3453         return;
3454
3455     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3456 }
3457