Use NeverDestroyed instead of DEPRECATED_DEFINE_STATIC_LOCAL
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "OutOfBandTextTrackPrivateAVF.h"
50 #import "URL.h"
51 #import "Logging.h"
52 #import "MediaPlaybackTargetMac.h"
53 #import "MediaPlaybackTargetMock.h"
54 #import "MediaSelectionGroupAVFObjC.h"
55 #import "MediaTimeAVFoundation.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "TextEncoding.h"
61 #import "TextTrackRepresentation.h"
62 #import "UUID.h"
63 #import "VideoTrackPrivateAVFObjC.h"
64 #import "WebCoreAVFResourceLoader.h"
65 #import "WebCoreCALayerExtras.h"
66 #import "WebCoreSystemInterface.h"
67 #import <functional>
68 #import <map>
69 #import <objc/runtime.h>
70 #import <runtime/DataView.h>
71 #import <runtime/JSCInlines.h>
72 #import <runtime/TypedArrayInlines.h>
73 #import <runtime/Uint16Array.h>
74 #import <runtime/Uint32Array.h>
75 #import <runtime/Uint8Array.h>
76 #import <wtf/CurrentTime.h>
77 #import <wtf/ListHashSet.h>
78 #import <wtf/NeverDestroyed.h>
79 #import <wtf/OSObjectPtr.h>
80 #import <wtf/text/CString.h>
81 #import <wtf/text/StringBuilder.h>
82
83 #if ENABLE(AVF_CAPTIONS)
84 #include "TextTrack.h"
85 #endif
86
87 #import <AVFoundation/AVFoundation.h>
88 #if PLATFORM(IOS)
89 #import "WAKAppKitStubs.h"
90 #import <CoreImage/CoreImage.h>
91 #import <mach/mach_port.h>
92 #else
93 #import <Foundation/NSGeometry.h>
94 #import <QuartzCore/CoreImage.h>
95 #endif
96
97 #if USE(VIDEOTOOLBOX)
98 #import <CoreVideo/CoreVideo.h>
99 #import <VideoToolbox/VideoToolbox.h>
100 #endif
101
102 #if USE(CFNETWORK)
103 #include "CFNSURLConnectionSPI.h"
104 #endif
105
106 #if PLATFORM(IOS)
107 #include <OpenGLES/ES3/glext.h>
108 #endif
109
110 namespace std {
111 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
112     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
113 };
114 }
115
116 @interface WebVideoContainerLayer : CALayer
117 @end
118
119 @implementation WebVideoContainerLayer
120
121 - (void)setBounds:(CGRect)bounds
122 {
123     [super setBounds:bounds];
124     for (CALayer* layer in self.sublayers)
125         layer.frame = bounds;
126 }
127
128 - (void)setPosition:(CGPoint)position
129 {
130     if (!CATransform3DIsIdentity(self.transform)) {
131         // Pre-apply the transform added in the WebProcess to fix <rdar://problem/18316542> to the position.
132         position = CGPointApplyAffineTransform(position, CATransform3DGetAffineTransform(self.transform));
133     }
134     [super setPosition:position];
135 }
136 @end
137
138 #if ENABLE(AVF_CAPTIONS)
139 // Note: This must be defined before our SOFT_LINK macros:
140 @class AVMediaSelectionOption;
141 @interface AVMediaSelectionOption (OutOfBandExtensions)
142 @property (nonatomic, readonly) NSString* outOfBandSource;
143 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
144 @end
145 #endif
146
147 @interface AVURLAsset (WebKitExtensions)
148 @property (nonatomic, readonly) NSURL *resolvedURL;
149 @end
150
151 typedef AVPlayer AVPlayerType;
152 typedef AVPlayerItem AVPlayerItemType;
153 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
154 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
155 typedef AVMetadataItem AVMetadataItemType;
156 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
157 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
158
159 #pragma mark - Soft Linking
160
161 // Soft-linking headers must be included last since they #define functions, constants, etc.
162 #import "CoreMediaSoftLink.h"
163
164 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
165
166 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
167 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
168
169 #if USE(VIDEOTOOLBOX)
170 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
171 #endif
172
173 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
174 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
175 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
176 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
177 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
178 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
179 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
180 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
181
182 #if USE(VIDEOTOOLBOX)
183 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
184 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
185 #endif
186
187 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
188 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
189 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
190 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
191 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
192 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
193 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
194
195 SOFT_LINK_CLASS(CoreImage, CIContext)
196 SOFT_LINK_CLASS(CoreImage, CIImage)
197
198 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
199 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
200 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
201 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
203 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
204 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
205 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
206 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
207 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
208 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
209 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
210 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
211 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
212 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
213 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
214
215 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
216
217 #define AVPlayer getAVPlayerClass()
218 #define AVPlayerItem getAVPlayerItemClass()
219 #define AVPlayerLayer getAVPlayerLayerClass()
220 #define AVURLAsset getAVURLAssetClass()
221 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
222 #define AVMetadataItem getAVMetadataItemClass()
223
224 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
225 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
226 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
227 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
228 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
229 #define AVMediaTypeVideo getAVMediaTypeVideo()
230 #define AVMediaTypeAudio getAVMediaTypeAudio()
231 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
232 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
233 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
234 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
235 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
236 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
237 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
238 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
239 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
240 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
241
242 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
243 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
244 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
245
246 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
247 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
248 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
249
250 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
251 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
252 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
253 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
254
255 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
256 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
257 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
258 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
259 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
260 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
261 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
262 #endif
263
264 #if ENABLE(AVF_CAPTIONS)
265 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
266 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
267 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
268 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
269 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
270 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
271 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
272 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
273 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
274 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
275
276 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
277 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
278 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
279 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
280 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
281 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
282 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
283 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
284 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
285 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
286 #endif
287
288 #if ENABLE(DATACUE_VALUE)
289 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
290 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
291 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
292 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
293 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
294
295 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
296 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
297 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
298 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
299 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
300 #endif
301
302 #if PLATFORM(IOS)
303 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
304 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
305 #endif
306
307 #if PLATFORM(IOS)
308 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CVEAGLContext eaglContext, CFDictionaryRef textureAttributes, CVOpenGLESTextureCacheRef* cacheOut), (allocator, cacheAttributes, eaglContext, textureAttributes, cacheOut))
309 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLESTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef textureAttributes, GLenum target, GLint internalFormat, GLsizei width, GLsizei height, GLenum format, GLenum type, size_t planeIndex, CVOpenGLESTextureRef* textureOut), (allocator, textureCache, sourceImage, textureAttributes, target, internalFormat, width, height, format, type, planeIndex, textureOut))
310 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheFlush, void, (CVOpenGLESTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
311 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetTarget, GLenum, (CVOpenGLESTextureRef image), (image))
312 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetName, GLuint, (CVOpenGLESTextureRef image), (image))
313 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey, NSString *)
314 #define kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey()
315 #else
316 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CGLContextObj cglContext, CGLPixelFormatObj cglPixelFormat, CFDictionaryRef textureAttributes, CVOpenGLTextureCacheRef* cacheOut), (allocator, cacheAttributes, cglContext, cglPixelFormat, textureAttributes, cacheOut))
317 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef attributes, CVOpenGLTextureRef* textureOut), (allocator, textureCache, sourceImage, attributes, textureOut))
318 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheFlush, void, (CVOpenGLTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
319 SOFT_LINK(CoreVideo, CVOpenGLTextureGetTarget, GLenum, (CVOpenGLTextureRef image), (image))
320 SOFT_LINK(CoreVideo, CVOpenGLTextureGetName, GLuint, (CVOpenGLTextureRef image), (image))
321 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey, NSString *)
322 #define kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey()
323 #endif
324
325 using namespace WebCore;
326
327 enum MediaPlayerAVFoundationObservationContext {
328     MediaPlayerAVFoundationObservationContextPlayerItem,
329     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
330     MediaPlayerAVFoundationObservationContextPlayer,
331     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
332 };
333
334 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
335 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
336 #else
337 @interface WebCoreAVFMovieObserver : NSObject
338 #endif
339 {
340     MediaPlayerPrivateAVFoundationObjC* m_callback;
341     int m_delayCallbacks;
342 }
343 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
344 -(void)disconnect;
345 -(void)metadataLoaded;
346 -(void)didEnd:(NSNotification *)notification;
347 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
348 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
349 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
350 - (void)outputSequenceWasFlushed:(id)output;
351 #endif
352 @end
353
354 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
355 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
356     MediaPlayerPrivateAVFoundationObjC* m_callback;
357 }
358 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
359 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
360 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
361 @end
362 #endif
363
364 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
365 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
366     MediaPlayerPrivateAVFoundationObjC *m_callback;
367     dispatch_semaphore_t m_semaphore;
368 }
369 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
370 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
371 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
372 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
373 @end
374 #endif
375
376 namespace WebCore {
377
378 static NSArray *assetMetadataKeyNames();
379 static NSArray *itemKVOProperties();
380 static NSArray *assetTrackMetadataKeyNames();
381 static NSArray *playerKVOProperties();
382 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
383
384 #if !LOG_DISABLED
385 static const char *boolString(bool val)
386 {
387     return val ? "true" : "false";
388 }
389 #endif
390
391 #if ENABLE(ENCRYPTED_MEDIA_V2)
392 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
393 static PlayerToPrivateMapType& playerToPrivateMap()
394 {
395     static NeverDestroyed<PlayerToPrivateMapType> map;
396     return map;
397 };
398 #endif
399
400 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
401 static dispatch_queue_t globalLoaderDelegateQueue()
402 {
403     static dispatch_queue_t globalQueue;
404     static dispatch_once_t onceToken;
405     dispatch_once(&onceToken, ^{
406         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
407     });
408     return globalQueue;
409 }
410 #endif
411
412 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
413 static dispatch_queue_t globalPullDelegateQueue()
414 {
415     static dispatch_queue_t globalQueue;
416     static dispatch_once_t onceToken;
417     dispatch_once(&onceToken, ^{
418         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
419     });
420     return globalQueue;
421 }
422 #endif
423
424 #if USE(CFNETWORK)
425 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
426 public:
427     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
428     {
429         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
430     }
431
432     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
433     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
434
435 private:
436     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
437         : m_challenge(challenge)
438     {
439         ASSERT(m_challenge);
440     }
441
442     virtual void refAuthenticationClient() override { ref(); }
443     virtual void derefAuthenticationClient() override { deref(); }
444
445     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
446     {
447         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
448     }
449
450     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
451     {
452         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
453     }
454
455     virtual void receivedCancellation(const AuthenticationChallenge&) override
456     {
457         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
458     }
459
460     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
461     {
462         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
463             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
464     }
465
466     virtual void receivedChallengeRejection(const AuthenticationChallenge&) override
467     {
468         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
469             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
470     }
471
472     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
473 };
474 #endif
475
476 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
477 {
478     if (isAvailable())
479         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
480             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
481 }
482
483 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
484     : MediaPlayerPrivateAVFoundation(player)
485     , m_weakPtrFactory(this)
486 #if PLATFORM(IOS)
487     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
488 #endif
489     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
490     , m_videoFrameHasDrawn(false)
491     , m_haveCheckedPlayability(false)
492 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
493     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
494     , m_videoOutputSemaphore(nullptr)
495 #endif
496 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
497     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
498 #endif
499     , m_currentTextTrack(0)
500     , m_cachedRate(0)
501     , m_cachedTotalBytes(0)
502     , m_pendingStatusChanges(0)
503     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
504     , m_cachedLikelyToKeepUp(false)
505     , m_cachedBufferEmpty(false)
506     , m_cachedBufferFull(false)
507     , m_cachedHasEnabledAudio(false)
508     , m_shouldBufferData(true)
509     , m_cachedIsReadyForDisplay(false)
510     , m_haveBeenAskedToCreateLayer(false)
511 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
512     , m_allowsWirelessVideoPlayback(true)
513 #endif
514 {
515 #if ENABLE(ENCRYPTED_MEDIA_V2)
516     playerToPrivateMap().set(player, this);
517 #endif
518 }
519
520 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
521 {
522 #if ENABLE(ENCRYPTED_MEDIA_V2)
523     playerToPrivateMap().remove(player());
524 #endif
525 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
526     [m_loaderDelegate.get() setCallback:0];
527     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
528
529     for (auto& pair : m_resourceLoaderMap)
530         pair.value->invalidate();
531 #endif
532 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
533     [m_videoOutputDelegate setCallback:0];
534     [m_videoOutput setDelegate:nil queue:0];
535     if (m_videoOutputSemaphore)
536         dispatch_release(m_videoOutputSemaphore);
537 #endif
538
539     if (m_videoLayer)
540         destroyVideoLayer();
541
542     cancelLoad();
543 }
544
545 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
546 {
547     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
548     tearDownVideoRendering();
549
550     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
551     [m_objcObserver.get() disconnect];
552
553     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
554     setIgnoreLoadStateChanges(true);
555     if (m_avAsset) {
556         [m_avAsset.get() cancelLoading];
557         m_avAsset = nil;
558     }
559
560     clearTextTracks();
561
562 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
563     if (m_legibleOutput) {
564         if (m_avPlayerItem)
565             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
566         m_legibleOutput = nil;
567     }
568 #endif
569
570     if (m_avPlayerItem) {
571         for (NSString *keyName in itemKVOProperties())
572             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
573         
574         m_avPlayerItem = nil;
575     }
576     if (m_avPlayer) {
577         if (m_timeObserver)
578             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
579         m_timeObserver = nil;
580
581         for (NSString *keyName in playerKVOProperties())
582             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
583         m_avPlayer = nil;
584     }
585
586     // Reset cached properties
587     m_pendingStatusChanges = 0;
588     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
589     m_cachedSeekableRanges = nullptr;
590     m_cachedLoadedRanges = nullptr;
591     m_cachedHasEnabledAudio = false;
592     m_cachedPresentationSize = FloatSize();
593     m_cachedDuration = MediaTime::zeroTime();
594
595     for (AVPlayerItemTrack *track in m_cachedTracks.get())
596         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
597     m_cachedTracks = nullptr;
598
599 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
600     if (m_provider) {
601         m_provider->setPlayerItem(nullptr);
602         m_provider->setAudioTrack(nullptr);
603     }
604 #endif
605
606     setIgnoreLoadStateChanges(false);
607 }
608
609 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
610 {
611     return m_haveBeenAskedToCreateLayer;
612 }
613
614 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
615 {
616 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
617     if (m_videoOutput)
618         return true;
619 #endif
620     return m_imageGenerator;
621 }
622
623 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
624 {
625 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
626     createVideoOutput();
627 #else
628     createImageGenerator();
629 #endif
630 }
631
632 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
633 {
634     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
635
636     if (!m_avAsset || m_imageGenerator)
637         return;
638
639     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
640
641     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
642     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
643     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
644     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
645
646     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
647 }
648
649 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
650 {
651 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
652     destroyVideoOutput();
653     destroyOpenGLVideoOutput();
654 #endif
655     destroyImageGenerator();
656 }
657
658 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
659 {
660     if (!m_imageGenerator)
661         return;
662
663     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
664
665     m_imageGenerator = 0;
666 }
667
668 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
669 {
670     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
671         return;
672
673     auto weakThis = createWeakPtr();
674     callOnMainThread([this, weakThis] {
675         if (!weakThis)
676             return;
677
678         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
679             return;
680         m_haveBeenAskedToCreateLayer = true;
681
682         if (!m_videoLayer)
683             createAVPlayerLayer();
684
685 #if USE(VIDEOTOOLBOX)
686         if (!m_videoOutput)
687             createVideoOutput();
688 #endif
689
690         player()->client().mediaPlayerRenderingModeChanged(player());
691     });
692 }
693
694 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
695 {
696     if (!m_avPlayer)
697         return;
698
699     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
700     [m_videoLayer setPlayer:m_avPlayer.get()];
701     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
702 #ifndef NDEBUG
703     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
704 #endif
705     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
706     updateVideoLayerGravity();
707     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
708     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
709     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
710
711 #if PLATFORM(IOS)
712     [m_videoLayer web_disableAllActions];
713     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
714 #ifndef NDEBUG
715     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
716 #endif
717     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
718     if (m_videoFullscreenLayer) {
719         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
720         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
721     } else {
722         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
723         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
724     }
725     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
726         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
727 #else
728     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
729 #endif
730 }
731
732 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
733 {
734     if (!m_videoLayer)
735         return;
736
737     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
738
739     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
740     [m_videoLayer.get() setPlayer:nil];
741
742 #if PLATFORM(IOS)
743     if (m_videoFullscreenLayer)
744         [m_videoLayer removeFromSuperlayer];
745     m_videoInlineLayer = nil;
746 #endif
747
748     m_videoLayer = nil;
749 }
750
751 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
752 {
753     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
754     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
755
756     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
757     if (!date)
758         return MediaTime::invalidTime();
759
760     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
761
762     // Rounding due to second offset error when subtracting.
763     return MediaTime::createWithDouble(round(date - currentTime));
764 }
765
766 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
767 {
768     if (currentRenderingMode() == MediaRenderingToLayer)
769         return m_cachedIsReadyForDisplay;
770
771     return m_videoFrameHasDrawn;
772 }
773
774 #if ENABLE(AVF_CAPTIONS)
775 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
776 {
777     // FIXME: Match these to correct types:
778     if (kind == PlatformTextTrack::Caption)
779         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
780
781     if (kind == PlatformTextTrack::Subtitle)
782         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
783
784     if (kind == PlatformTextTrack::Description)
785         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
786
787     if (kind == PlatformTextTrack::Forced)
788         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
789
790     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
791 }
792     
793 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
794 {
795     trackModeChanged();
796 }
797     
798 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
799 {
800     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
801     
802     for (auto& textTrack : m_textTracks) {
803         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
804             continue;
805         
806         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
807         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
808         
809         for (auto& track : outOfBandTrackSources) {
810             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
811             
812             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
813                 continue;
814             
815             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
816             if (track->mode() == PlatformTextTrack::Hidden)
817                 mode = InbandTextTrackPrivate::Hidden;
818             else if (track->mode() == PlatformTextTrack::Disabled)
819                 mode = InbandTextTrackPrivate::Disabled;
820             else if (track->mode() == PlatformTextTrack::Showing)
821                 mode = InbandTextTrackPrivate::Showing;
822             
823             textTrack->setMode(mode);
824             break;
825         }
826     }
827 }
828 #endif
829
830
831 static NSURL *canonicalURL(const String& url)
832 {
833     NSURL *cocoaURL = URL(ParsedURLString, url);
834     if (url.isEmpty())
835         return cocoaURL;
836
837     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
838     if (!request)
839         return cocoaURL;
840
841     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
842     if (!canonicalRequest)
843         return cocoaURL;
844
845     return [canonicalRequest URL];
846 }
847
848 #if PLATFORM(IOS)
849 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
850 {
851     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
852     [properties setDictionary:@{
853         NSHTTPCookieName: cookie.name,
854         NSHTTPCookieValue: cookie.value,
855         NSHTTPCookieDomain: cookie.domain,
856         NSHTTPCookiePath: cookie.path,
857         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
858     }];
859     if (cookie.secure)
860         [properties setObject:@YES forKey:NSHTTPCookieSecure];
861     if (cookie.session)
862         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
863
864     return [NSHTTPCookie cookieWithProperties:properties.get()];
865 }
866 #endif
867
868 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
869 {
870     if (m_avAsset)
871         return;
872
873     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
874
875     setDelayCallbacks(true);
876
877     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
878
879     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
880
881     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
882
883     String referrer = player()->referrer();
884     if (!referrer.isEmpty())
885         [headerFields.get() setObject:referrer forKey:@"Referer"];
886
887     String userAgent = player()->userAgent();
888     if (!userAgent.isEmpty())
889         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
890
891     if ([headerFields.get() count])
892         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
893
894     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
895         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
896
897 #if PLATFORM(IOS)
898     // FIXME: rdar://problem/20354688
899     String identifier = player()->sourceApplicationIdentifier();
900     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
901         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
902 #endif
903
904 #if ENABLE(AVF_CAPTIONS)
905     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
906     if (!outOfBandTrackSources.isEmpty()) {
907         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
908         for (auto& trackSource : outOfBandTrackSources) {
909             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
910             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
911             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
912             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
913             [outOfBandTracks.get() addObject:@{
914                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
915                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
916                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
917                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
918                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
919                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
920             }];
921         }
922
923         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
924     }
925 #endif
926
927 #if PLATFORM(IOS)
928     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
929     if (!networkInterfaceName.isEmpty())
930         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
931 #endif
932
933 #if PLATFORM(IOS)
934     Vector<Cookie> cookies;
935     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
936         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
937         for (auto& cookie : cookies)
938             [nsCookies addObject:toNSHTTPCookie(cookie)];
939
940         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
941     }
942 #endif
943
944     NSURL *cocoaURL = canonicalURL(url);
945     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
946
947 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
948     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
949 #endif
950
951     m_haveCheckedPlayability = false;
952
953     setDelayCallbacks(false);
954 }
955
956 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
957 {
958     if (!m_avPlayer)
959         return;
960
961     if (pthread_main_np()) {
962         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
963         return;
964     }
965
966     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
967     RetainPtr<AVPlayerItemType> strongItem = item;
968     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
969         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
970     });
971 }
972
973 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
974 {
975     if (m_avPlayer)
976         return;
977
978     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
979
980     setDelayCallbacks(true);
981
982     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
983     for (NSString *keyName in playerKVOProperties())
984         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
985
986 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
987     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
988 #endif
989
990 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
991     updateDisableExternalPlayback();
992     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
993 #endif
994
995 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
996     if (m_shouldPlayToPlaybackTarget)
997         setShouldPlayToPlaybackTarget(true);
998 #endif
999
1000     if (player()->client().mediaPlayerIsVideo())
1001         createAVPlayerLayer();
1002
1003     if (m_avPlayerItem)
1004         setAVPlayerItem(m_avPlayerItem.get());
1005
1006     setDelayCallbacks(false);
1007 }
1008
1009 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1010 {
1011     if (m_avPlayerItem)
1012         return;
1013
1014     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1015
1016     setDelayCallbacks(true);
1017
1018     // Create the player item so we can load media data. 
1019     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
1020
1021     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1022
1023     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1024     for (NSString *keyName in itemKVOProperties())
1025         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1026
1027     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1028
1029     if (m_avPlayer)
1030         setAVPlayerItem(m_avPlayerItem.get());
1031
1032 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1033     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1034
1035     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1036     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1037     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1038
1039     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1040     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1041     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1042     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1043 #endif
1044
1045 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1046     if (m_provider) {
1047         m_provider->setPlayerItem(m_avPlayerItem.get());
1048         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1049     }
1050 #endif
1051
1052     setDelayCallbacks(false);
1053 }
1054
1055 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1056 {
1057     if (m_haveCheckedPlayability)
1058         return;
1059     m_haveCheckedPlayability = true;
1060
1061     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1062     auto weakThis = createWeakPtr();
1063
1064     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1065         callOnMainThread([weakThis] {
1066             if (weakThis)
1067                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1068         });
1069     }];
1070 }
1071
1072 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1073 {
1074     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1075
1076     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1077     dispatch_group_enter(metadataLoadingGroup.get());
1078     auto weakThis = createWeakPtr();
1079     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1080
1081         callOnMainThread([weakThis, metadataLoadingGroup] {
1082             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1083                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1084                     dispatch_group_enter(metadataLoadingGroup.get());
1085                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1086                         dispatch_group_leave(metadataLoadingGroup.get());
1087                     }];
1088                 }
1089             }
1090             dispatch_group_leave(metadataLoadingGroup.get());
1091         });
1092     }];
1093
1094     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1095         callOnMainThread([weakThis] {
1096             if (weakThis)
1097                 [weakThis->m_objcObserver.get() metadataLoaded];
1098         });
1099     });
1100 }
1101
1102 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1103 {
1104     if (!m_avPlayerItem)
1105         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1106
1107     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1108         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1109     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1110         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1111     if (m_cachedLikelyToKeepUp)
1112         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1113     if (m_cachedBufferFull)
1114         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1115     if (m_cachedBufferEmpty)
1116         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1117
1118     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1119 }
1120
1121 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1122 {
1123     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1124     PlatformMedia pm;
1125     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1126     pm.media.avfMediaPlayer = m_avPlayer.get();
1127     return pm;
1128 }
1129
1130 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1131 {
1132 #if PLATFORM(IOS)
1133     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1134 #else
1135     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1136 #endif
1137 }
1138
1139 #if PLATFORM(IOS)
1140 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1141 {
1142     if (m_videoFullscreenLayer == videoFullscreenLayer)
1143         return;
1144
1145     m_videoFullscreenLayer = videoFullscreenLayer;
1146
1147     [CATransaction begin];
1148     [CATransaction setDisableActions:YES];
1149     
1150     CAContext *oldContext = [m_videoLayer context];
1151     CAContext *newContext = nil;
1152     
1153     if (m_videoFullscreenLayer && m_videoLayer) {
1154         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1155         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1156         newContext = [m_videoFullscreenLayer context];
1157     } else if (m_videoInlineLayer && m_videoLayer) {
1158         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1159         [m_videoLayer removeFromSuperlayer];
1160         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1161         newContext = [m_videoInlineLayer context];
1162     } else if (m_videoLayer)
1163         [m_videoLayer removeFromSuperlayer];
1164
1165     if (oldContext && newContext && oldContext != newContext) {
1166         mach_port_t fencePort = [oldContext createFencePort];
1167         [newContext setFencePort:fencePort];
1168         mach_port_deallocate(mach_task_self(), fencePort);
1169     }
1170     [CATransaction commit];
1171
1172     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1173         syncTextTrackBounds();
1174         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1175     }
1176
1177     updateDisableExternalPlayback();
1178 }
1179
1180 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1181 {
1182     m_videoFullscreenFrame = frame;
1183     if (!m_videoFullscreenLayer)
1184         return;
1185
1186     if (m_videoLayer) {
1187         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1188     }
1189     syncTextTrackBounds();
1190 }
1191
1192 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1193 {
1194     m_videoFullscreenGravity = gravity;
1195     if (!m_videoLayer)
1196         return;
1197
1198     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1199     if (gravity == MediaPlayer::VideoGravityResize)
1200         videoGravity = AVLayerVideoGravityResize;
1201     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1202         videoGravity = AVLayerVideoGravityResizeAspect;
1203     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1204         videoGravity = AVLayerVideoGravityResizeAspectFill;
1205     else
1206         ASSERT_NOT_REACHED();
1207     
1208     if ([m_videoLayer videoGravity] == videoGravity)
1209         return;
1210
1211     [m_videoLayer setVideoGravity:videoGravity];
1212     syncTextTrackBounds();
1213 }
1214
1215 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1216 {
1217     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1218         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1219 }
1220
1221 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1222 {
1223     if (m_currentMetaData)
1224         return m_currentMetaData.get();
1225     return nil;
1226 }
1227
1228 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1229 {
1230     if (!m_avPlayerItem)
1231         return emptyString();
1232     
1233     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1234     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1235
1236     return logString.get();
1237 }
1238
1239 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1240 {
1241     if (!m_avPlayerItem)
1242         return emptyString();
1243
1244     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1245     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1246
1247     return logString.get();
1248 }
1249 #endif
1250
1251 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1252 {
1253     [CATransaction begin];
1254     [CATransaction setDisableActions:YES];    
1255     if (m_videoLayer)
1256         [m_videoLayer.get() setHidden:!isVisible];
1257     [CATransaction commit];
1258 }
1259     
1260 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1261 {
1262     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1263     if (!metaDataAvailable())
1264         return;
1265
1266     setDelayCallbacks(true);
1267     m_cachedRate = requestedRate();
1268     [m_avPlayer.get() setRate:requestedRate()];
1269     setDelayCallbacks(false);
1270 }
1271
1272 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1273 {
1274     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1275     if (!metaDataAvailable())
1276         return;
1277
1278     setDelayCallbacks(true);
1279     m_cachedRate = 0;
1280     [m_avPlayer.get() setRate:0];
1281     setDelayCallbacks(false);
1282 }
1283
1284 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1285 {
1286     // Do not ask the asset for duration before it has been loaded or it will fetch the
1287     // answer synchronously.
1288     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1289         return MediaTime::invalidTime();
1290     
1291     CMTime cmDuration;
1292     
1293     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1294     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1295         cmDuration = [m_avPlayerItem.get() duration];
1296     else
1297         cmDuration = [m_avAsset.get() duration];
1298
1299     if (CMTIME_IS_NUMERIC(cmDuration))
1300         return toMediaTime(cmDuration);
1301
1302     if (CMTIME_IS_INDEFINITE(cmDuration))
1303         return MediaTime::positiveInfiniteTime();
1304
1305     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1306     return MediaTime::invalidTime();
1307 }
1308
1309 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1310 {
1311     if (!metaDataAvailable() || !m_avPlayerItem)
1312         return MediaTime::zeroTime();
1313
1314     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1315     if (CMTIME_IS_NUMERIC(itemTime))
1316         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1317
1318     return MediaTime::zeroTime();
1319 }
1320
1321 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1322 {
1323     // setCurrentTime generates several event callbacks, update afterwards.
1324     setDelayCallbacks(true);
1325
1326     if (m_metadataTrack)
1327         m_metadataTrack->flushPartialCues();
1328
1329     CMTime cmTime = toCMTime(time);
1330     CMTime cmBefore = toCMTime(negativeTolerance);
1331     CMTime cmAfter = toCMTime(positiveTolerance);
1332
1333     auto weakThis = createWeakPtr();
1334
1335     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1336
1337     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1338         callOnMainThread([weakThis, finished] {
1339             auto _this = weakThis.get();
1340             if (!_this)
1341                 return;
1342
1343             _this->seekCompleted(finished);
1344         });
1345     }];
1346
1347     setDelayCallbacks(false);
1348 }
1349
1350 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1351 {
1352 #if PLATFORM(IOS)
1353     UNUSED_PARAM(volume);
1354     return;
1355 #else
1356     if (!metaDataAvailable())
1357         return;
1358
1359     [m_avPlayer.get() setVolume:volume];
1360 #endif
1361 }
1362
1363 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1364 {
1365     UNUSED_PARAM(closedCaptionsVisible);
1366
1367     if (!metaDataAvailable())
1368         return;
1369
1370     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1371 }
1372
1373 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1374 {
1375     setDelayCallbacks(true);
1376     m_cachedRate = rate;
1377     [m_avPlayer.get() setRate:rate];
1378     setDelayCallbacks(false);
1379 }
1380
1381 double MediaPlayerPrivateAVFoundationObjC::rate() const
1382 {
1383     if (!metaDataAvailable())
1384         return 0;
1385
1386     return m_cachedRate;
1387 }
1388
1389 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1390 {
1391     if (m_avPlayerItem)
1392         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1393 }
1394
1395 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1396 {
1397     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1398
1399     if (!m_avPlayerItem)
1400         return timeRanges;
1401
1402     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1403         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1404         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1405             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1406     }
1407     return timeRanges;
1408 }
1409
1410 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1411 {
1412     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1413         return MediaTime::zeroTime();
1414
1415     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1416     bool hasValidRange = false;
1417     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1418         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1419         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1420             continue;
1421
1422         hasValidRange = true;
1423         MediaTime startOfRange = toMediaTime(timeRange.start);
1424         if (minTimeSeekable > startOfRange)
1425             minTimeSeekable = startOfRange;
1426     }
1427     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1428 }
1429
1430 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1431 {
1432     if (!m_cachedSeekableRanges)
1433         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1434
1435     MediaTime maxTimeSeekable;
1436     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1437         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1438         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1439             continue;
1440         
1441         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1442         if (maxTimeSeekable < endOfRange)
1443             maxTimeSeekable = endOfRange;
1444     }
1445     return maxTimeSeekable;
1446 }
1447
1448 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1449 {
1450     if (!m_cachedLoadedRanges)
1451         return MediaTime::zeroTime();
1452
1453     MediaTime maxTimeLoaded;
1454     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1455         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1456         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1457             continue;
1458         
1459         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1460         if (maxTimeLoaded < endOfRange)
1461             maxTimeLoaded = endOfRange;
1462     }
1463
1464     return maxTimeLoaded;   
1465 }
1466
1467 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1468 {
1469     if (!metaDataAvailable())
1470         return 0;
1471
1472     if (m_cachedTotalBytes)
1473         return m_cachedTotalBytes;
1474
1475     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1476         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1477
1478     return m_cachedTotalBytes;
1479 }
1480
1481 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1482 {
1483     m_avAsset = asset;
1484 }
1485
1486 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1487 {
1488     if (!m_avAsset)
1489         return MediaPlayerAVAssetStatusDoesNotExist;
1490
1491     for (NSString *keyName in assetMetadataKeyNames()) {
1492         NSError *error = nil;
1493         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1494 #if !LOG_DISABLED
1495         if (error)
1496             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1497 #endif
1498
1499         if (keyStatus < AVKeyValueStatusLoaded)
1500             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1501         
1502         if (keyStatus == AVKeyValueStatusFailed)
1503             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1504
1505         if (keyStatus == AVKeyValueStatusCancelled)
1506             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1507     }
1508
1509     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1510         return MediaPlayerAVAssetStatusPlayable;
1511
1512     return MediaPlayerAVAssetStatusLoaded;
1513 }
1514
1515 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1516 {
1517     if (!m_avAsset)
1518         return 0;
1519
1520     NSError *error = nil;
1521     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1522     return [error code];
1523 }
1524
1525 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1526 {
1527     if (!metaDataAvailable() || context.paintingDisabled())
1528         return;
1529
1530     setDelayCallbacks(true);
1531     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1532
1533 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1534     if (videoOutputHasAvailableFrame())
1535         paintWithVideoOutput(context, rect);
1536     else
1537 #endif
1538         paintWithImageGenerator(context, rect);
1539
1540     END_BLOCK_OBJC_EXCEPTIONS;
1541     setDelayCallbacks(false);
1542
1543     m_videoFrameHasDrawn = true;
1544 }
1545
1546 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1547 {
1548     if (!metaDataAvailable() || context.paintingDisabled())
1549         return;
1550
1551     // We can ignore the request if we are already rendering to a layer.
1552     if (currentRenderingMode() == MediaRenderingToLayer)
1553         return;
1554
1555     // paint() is best effort, so only paint if we already have an image generator or video output available.
1556     if (!hasContextRenderer())
1557         return;
1558
1559     paintCurrentFrameInContext(context, rect);
1560 }
1561
1562 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1563 {
1564     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1565
1566     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1567     if (image) {
1568         GraphicsContextStateSaver stateSaver(context);
1569         context.translate(rect.x(), rect.y() + rect.height());
1570         context.scale(FloatSize(1.0f, -1.0f));
1571         context.setImageInterpolationQuality(InterpolationLow);
1572         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1573         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1574         image = 0;
1575     }
1576 }
1577
1578 static const HashSet<String>& avfMIMETypes()
1579 {
1580     static NeverDestroyed<HashSet<String>> cache = [] () {
1581         HashSet<String> types;
1582
1583         NSArray *nsTypes = [AVURLAsset audiovisualMIMETypes];
1584         for (NSString *mimeType in nsTypes)
1585             types.add([mimeType lowercaseString]);
1586
1587         return types;
1588     }();
1589
1590     
1591     return cache;
1592 }
1593
1594 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1595 {
1596     if (!m_imageGenerator)
1597         createImageGenerator();
1598     ASSERT(m_imageGenerator);
1599
1600 #if !LOG_DISABLED
1601     double start = monotonicallyIncreasingTime();
1602 #endif
1603
1604     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1605     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1606     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1607
1608 #if !LOG_DISABLED
1609     double duration = monotonicallyIncreasingTime() - start;
1610     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1611 #endif
1612
1613     return image;
1614 }
1615
1616 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1617 {
1618     supportedTypes = avfMIMETypes();
1619
1620
1621 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1622 static bool keySystemIsSupported(const String& keySystem)
1623 {
1624     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1625         return true;
1626     return false;
1627 }
1628 #endif
1629
1630 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1631 {
1632 #if ENABLE(ENCRYPTED_MEDIA)
1633     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1634     // In addition to the steps in the current specification, this method must run the following steps:
1635
1636     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1637     //    If keySystem is null, continue to the next step.
1638     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1639         // "Clear Key" is only supported with HLS:
1640         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1641             return MediaPlayer::IsNotSupported;
1642
1643         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1644         if (!keySystemIsSupported(parameters.keySystem))
1645             return MediaPlayer::IsNotSupported;
1646
1647         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1648         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1649     }
1650
1651     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1652 #endif
1653
1654 #if ENABLE(MEDIA_SOURCE)
1655     if (parameters.isMediaSource)
1656         return MediaPlayer::IsNotSupported;
1657 #endif
1658 #if ENABLE(MEDIA_STREAM)
1659     if (parameters.isMediaStream)
1660         return MediaPlayer::IsNotSupported;
1661 #endif
1662     if (isUnsupportedMIMEType(parameters.type))
1663         return MediaPlayer::IsNotSupported;
1664
1665     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1666         return MediaPlayer::IsNotSupported;
1667
1668     // The spec says:
1669     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1670     if (parameters.codecs.isEmpty())
1671         return MediaPlayer::MayBeSupported;
1672
1673     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1674     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1675 }
1676
1677 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1678 {
1679 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1680     if (!keySystem.isEmpty()) {
1681         // "Clear Key" is only supported with HLS:
1682         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1683             return MediaPlayer::IsNotSupported;
1684
1685         if (!keySystemIsSupported(keySystem))
1686             return false;
1687
1688         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1689             return false;
1690
1691         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1692             return false;
1693
1694         return true;
1695     }
1696 #else
1697     UNUSED_PARAM(keySystem);
1698     UNUSED_PARAM(mimeType);
1699 #endif
1700     return false;
1701 }
1702
1703 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1704 #if ENABLE(ENCRYPTED_MEDIA_V2)
1705 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1706 {
1707     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1708         [infoRequest setContentLength:keyData->byteLength()];
1709         [infoRequest setByteRangeAccessSupported:YES];
1710     }
1711
1712     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1713         long long start = [dataRequest currentOffset];
1714         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1715
1716         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1717             [request finishLoadingWithError:nil];
1718             return;
1719         }
1720
1721         ASSERT(start <= std::numeric_limits<int>::max());
1722         ASSERT(end <= std::numeric_limits<int>::max());
1723         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1724         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1725         [dataRequest respondWithData:nsData.get()];
1726     }
1727
1728     [request finishLoading];
1729 }
1730 #endif
1731
1732 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1733 {
1734     String scheme = [[[avRequest request] URL] scheme];
1735     String keyURI = [[[avRequest request] URL] absoluteString];
1736
1737 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1738     if (scheme == "skd") {
1739         // Create an initData with the following layout:
1740         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1741         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1742         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1743         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1744         initDataView->set<uint32_t>(0, keyURISize, true);
1745
1746         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1747         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1748
1749 #if ENABLE(ENCRYPTED_MEDIA)
1750         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1751 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1752         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1753         if (!player()->keyNeeded(initData.get()))
1754 #endif
1755             return false;
1756
1757         m_keyURIToRequestMap.set(keyURI, avRequest);
1758         return true;
1759 #if ENABLE(ENCRYPTED_MEDIA_V2)
1760     } else if (scheme == "clearkey") {
1761         String keyID = [[[avRequest request] URL] resourceSpecifier];
1762         StringView keyIDView(keyID);
1763         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1764
1765         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1766         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1767
1768         auto keyData = player()->cachedKeyForKeyId(keyID);
1769         if (keyData) {
1770             fulfillRequestWithKeyData(avRequest, keyData.get());
1771             return false;
1772         }
1773
1774         if (!player()->keyNeeded(initData.get()))
1775             return false;
1776
1777         m_keyURIToRequestMap.set(keyID, avRequest);
1778         return true;
1779 #endif
1780     }
1781 #endif
1782
1783     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1784     m_resourceLoaderMap.add(avRequest, resourceLoader);
1785     resourceLoader->startLoading();
1786     return true;
1787 }
1788
1789 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1790 {
1791 #if USE(CFNETWORK)
1792     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1793     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1794     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1795 #else
1796     AuthenticationChallenge challenge(nsChallenge);
1797 #endif
1798
1799     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1800 }
1801
1802 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1803 {
1804     String scheme = [[[avRequest request] URL] scheme];
1805
1806     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1807
1808     if (resourceLoader)
1809         resourceLoader->stopLoading();
1810 }
1811
1812 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1813 {
1814     m_resourceLoaderMap.remove(avRequest);
1815 }
1816 #endif
1817
1818 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1819 {
1820     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1821 }
1822
1823 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1824 {
1825     if (!metaDataAvailable())
1826         return timeValue;
1827
1828     // FIXME - impossible to implement until rdar://8721510 is fixed.
1829     return timeValue;
1830 }
1831
1832 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1833 {
1834 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1835     return 0;
1836 #else
1837     return 5;
1838 #endif
1839 }
1840
1841 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1842 {
1843     if (!m_videoLayer)
1844         return;
1845
1846 #if PLATFORM(IOS)
1847     // Do not attempt to change the video gravity while in full screen mode.
1848     // See setVideoFullscreenGravity().
1849     if (m_videoFullscreenLayer)
1850         return;
1851 #endif
1852
1853     [CATransaction begin];
1854     [CATransaction setDisableActions:YES];    
1855     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1856     [m_videoLayer.get() setVideoGravity:gravity];
1857     [CATransaction commit];
1858 }
1859
1860 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1861 {
1862     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1863         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1864     }];
1865     if (index == NSNotFound)
1866         return nil;
1867     return [tracks objectAtIndex:index];
1868 }
1869
1870 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1871 {
1872     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1873     m_languageOfPrimaryAudioTrack = String();
1874
1875     if (!m_avAsset)
1876         return;
1877
1878     setDelayCharacteristicsChangedNotification(true);
1879
1880     bool haveCCTrack = false;
1881     bool hasCaptions = false;
1882
1883     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1884     // asked about those fairly fequently.
1885     if (!m_avPlayerItem) {
1886         // We don't have a player item yet, so check with the asset because some assets support inspection
1887         // prior to becoming ready to play.
1888         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1889         setHasVideo(firstEnabledVideoTrack);
1890         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1891 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1892         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1893 #endif
1894
1895         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1896     } else {
1897         bool hasVideo = false;
1898         bool hasAudio = false;
1899         bool hasMetaData = false;
1900         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1901             if ([track isEnabled]) {
1902                 AVAssetTrack *assetTrack = [track assetTrack];
1903                 NSString *mediaType = [assetTrack mediaType];
1904                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1905                     hasVideo = true;
1906                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1907                     hasAudio = true;
1908                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1909 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1910                     hasCaptions = true;
1911 #endif
1912                     haveCCTrack = true;
1913                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1914                     hasMetaData = true;
1915                 }
1916             }
1917         }
1918
1919 #if ENABLE(VIDEO_TRACK)
1920         updateAudioTracks();
1921         updateVideoTracks();
1922
1923 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1924         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1925         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1926 #endif
1927 #endif
1928
1929         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1930         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1931         // when it is not.
1932         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1933
1934         setHasAudio(hasAudio);
1935 #if ENABLE(DATACUE_VALUE)
1936         if (hasMetaData)
1937             processMetadataTrack();
1938 #endif
1939     }
1940
1941 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1942     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1943     if (legibleGroup && m_cachedTracks) {
1944         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1945         if (hasCaptions)
1946             processMediaSelectionOptions();
1947     }
1948 #endif
1949
1950 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1951     if (!hasCaptions && haveCCTrack)
1952         processLegacyClosedCaptionsTracks();
1953 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1954     if (haveCCTrack)
1955         processLegacyClosedCaptionsTracks();
1956 #endif
1957
1958     setHasClosedCaptions(hasCaptions);
1959
1960     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1961         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1962
1963     sizeChanged();
1964
1965     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1966         characteristicsChanged();
1967
1968 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1969     if (m_provider)
1970         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1971 #endif
1972
1973     setDelayCharacteristicsChangedNotification(false);
1974 }
1975
1976 #if ENABLE(VIDEO_TRACK)
1977 template <typename RefT, typename PassRefT>
1978 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1979 {
1980     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1981         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1982     }]]]);
1983     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1984
1985     for (auto& oldItem : oldItems) {
1986         if (oldItem->playerItemTrack())
1987             [oldTracks addObject:oldItem->playerItemTrack()];
1988     }
1989
1990     // Find the added & removed AVPlayerItemTracks:
1991     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1992     [removedTracks minusSet:newTracks.get()];
1993
1994     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1995     [addedTracks minusSet:oldTracks.get()];
1996
1997     typedef Vector<RefT> ItemVector;
1998     ItemVector replacementItems;
1999     ItemVector addedItems;
2000     ItemVector removedItems;
2001     for (auto& oldItem : oldItems) {
2002         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2003             removedItems.append(oldItem);
2004         else
2005             replacementItems.append(oldItem);
2006     }
2007
2008     for (AVPlayerItemTrack* track in addedTracks.get())
2009         addedItems.append(itemFactory(track));
2010
2011     replacementItems.appendVector(addedItems);
2012     oldItems.swap(replacementItems);
2013
2014     for (auto& removedItem : removedItems)
2015         (player->*removedFunction)(removedItem);
2016
2017     for (auto& addedItem : addedItems)
2018         (player->*addedFunction)(addedItem);
2019 }
2020
2021 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2022 template <typename RefT, typename PassRefT>
2023 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2024 {
2025     group->updateOptions(characteristics);
2026
2027     // Only add selection options which do not have an associated persistant track.
2028     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2029     for (auto& option : group->options()) {
2030         if (!option)
2031             continue;
2032         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2033         if (!avOption)
2034             continue;
2035         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
2036             newSelectionOptions.add(option);
2037     }
2038
2039     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2040     for (auto& oldItem : oldItems) {
2041         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2042             oldSelectionOptions.add(option);
2043     }
2044
2045     // Find the added & removed AVMediaSelectionOptions:
2046     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2047     for (auto& oldOption : oldSelectionOptions) {
2048         if (!newSelectionOptions.contains(oldOption))
2049             removedSelectionOptions.add(oldOption);
2050     }
2051
2052     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2053     for (auto& newOption : newSelectionOptions) {
2054         if (!oldSelectionOptions.contains(newOption))
2055             addedSelectionOptions.add(newOption);
2056     }
2057
2058     typedef Vector<RefT> ItemVector;
2059     ItemVector replacementItems;
2060     ItemVector addedItems;
2061     ItemVector removedItems;
2062     for (auto& oldItem : oldItems) {
2063         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2064             removedItems.append(oldItem);
2065         else
2066             replacementItems.append(oldItem);
2067     }
2068
2069     for (auto& option : addedSelectionOptions)
2070         addedItems.append(itemFactory(*option.get()));
2071
2072     replacementItems.appendVector(addedItems);
2073     oldItems.swap(replacementItems);
2074     
2075     for (auto& removedItem : removedItems)
2076         (player->*removedFunction)(removedItem);
2077     
2078     for (auto& addedItem : addedItems)
2079         (player->*addedFunction)(addedItem);
2080 }
2081 #endif
2082
2083 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2084 {
2085 #if !LOG_DISABLED
2086     size_t count = m_audioTracks.size();
2087 #endif
2088
2089     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2090
2091 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2092     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2093     if (!m_audibleGroup) {
2094         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2095             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2096     }
2097
2098     if (m_audibleGroup)
2099         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2100 #endif
2101
2102     for (auto& track : m_audioTracks)
2103         track->resetPropertiesFromTrack();
2104
2105 #if !LOG_DISABLED
2106     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2107 #endif
2108 }
2109
2110 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2111 {
2112 #if !LOG_DISABLED
2113     size_t count = m_videoTracks.size();
2114 #endif
2115
2116     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2117
2118 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2119     if (!m_visualGroup) {
2120         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2121             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2122     }
2123
2124     if (m_visualGroup)
2125         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2126 #endif
2127
2128     for (auto& track : m_audioTracks)
2129         track->resetPropertiesFromTrack();
2130
2131 #if !LOG_DISABLED
2132     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2133 #endif
2134 }
2135
2136 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2137 {
2138 #if PLATFORM(IOS)
2139     if (m_videoFullscreenLayer)
2140         return true;
2141 #endif
2142     return false;
2143 }
2144
2145 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2146 {
2147 #if PLATFORM(IOS)
2148     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2149         return;
2150     
2151     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2152     [m_textTrackRepresentationLayer setFrame:textFrame];
2153 #endif
2154 }
2155
2156 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2157 {
2158 #if PLATFORM(IOS)
2159     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2160     if (representationLayer == m_textTrackRepresentationLayer) {
2161         syncTextTrackBounds();
2162         return;
2163     }
2164
2165     if (m_textTrackRepresentationLayer)
2166         [m_textTrackRepresentationLayer removeFromSuperlayer];
2167
2168     m_textTrackRepresentationLayer = representationLayer;
2169
2170     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2171         syncTextTrackBounds();
2172         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2173     }
2174
2175 #else
2176     UNUSED_PARAM(representation);
2177 #endif
2178 }
2179 #endif // ENABLE(VIDEO_TRACK)
2180
2181 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2182 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2183 {
2184     if (!m_provider) {
2185         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2186         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2187     }
2188
2189     return m_provider.get();
2190 }
2191 #endif
2192
2193 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2194 {
2195     if (!m_avAsset)
2196         return;
2197
2198     setNaturalSize(m_cachedPresentationSize);
2199 }
2200     
2201 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2202 {
2203     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2204         return false;
2205     
2206     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2207     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2208     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2209 }
2210
2211 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2212 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2213 {
2214     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2215
2216     if (!m_avPlayerItem || m_videoOutput)
2217         return;
2218
2219 #if USE(VIDEOTOOLBOX)
2220     NSDictionary* attributes = nil;
2221 #else
2222     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2223                                 nil];
2224 #endif
2225     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2226     ASSERT(m_videoOutput);
2227
2228     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2229
2230     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2231
2232     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2233 }
2234
2235 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2236 {
2237     if (!m_videoOutput)
2238         return;
2239
2240     if (m_avPlayerItem)
2241         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2242     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2243
2244     m_videoOutput = 0;
2245 }
2246
2247 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2248 {
2249     if (!m_videoOutput)
2250         createVideoOutput();
2251     ASSERT(m_videoOutput);
2252
2253 #if !LOG_DISABLED
2254     double start = monotonicallyIncreasingTime();
2255 #endif
2256
2257     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2258
2259     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2260         return 0;
2261
2262     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2263     if (!buffer)
2264         return 0;
2265
2266 #if USE(VIDEOTOOLBOX)
2267     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2268     if (!m_pixelTransferSession) {
2269         VTPixelTransferSessionRef session = 0;
2270         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2271         m_pixelTransferSession = adoptCF(session);
2272     }
2273
2274     CVPixelBufferRef outputBuffer;
2275     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2276     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2277     buffer = adoptCF(outputBuffer);
2278 #endif
2279
2280 #if !LOG_DISABLED
2281     double duration = monotonicallyIncreasingTime() - start;
2282     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2283 #endif
2284
2285     return buffer;
2286 }
2287
2288 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2289 {
2290     if (!m_avPlayerItem)
2291         return false;
2292
2293     if (m_lastImage)
2294         return true;
2295
2296     if (!m_videoOutput)
2297         createVideoOutput();
2298
2299     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2300 }
2301
2302 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2303 {
2304     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2305     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2306     return CVPixelBufferGetBaseAddress(pixelBuffer);
2307 }
2308
2309 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2310 {
2311     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2312     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2313 }
2314
2315 static void CVPixelBufferReleaseInfoCallback(void* info)
2316 {
2317     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2318     CFRelease(pixelBuffer);
2319 }
2320
2321 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2322 {
2323     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2324     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2325
2326     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2327     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2328     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2329     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2330     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2331
2332     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2333     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2334     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2335
2336     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2337 }
2338
2339 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2340 {
2341     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2342
2343     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2344     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2345     // should be displayed.
2346     if (pixelBuffer)
2347         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2348 }
2349
2350 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2351 {
2352     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2353         waitForVideoOutputMediaDataWillChange();
2354
2355     updateLastImage();
2356
2357     if (!m_lastImage)
2358         return;
2359
2360     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2361     if (!firstEnabledVideoTrack)
2362         return;
2363
2364     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2365
2366     GraphicsContextStateSaver stateSaver(context);
2367     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2368     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2369     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2370
2371     context.concatCTM(videoTransform);
2372     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2373
2374     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2375     // video frame, destroy it now that it is no longer needed.
2376     if (m_imageGenerator)
2377         destroyImageGenerator();
2378
2379 }
2380
2381 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2382 {
2383     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2384
2385     if (!m_avPlayerItem || m_openGLVideoOutput)
2386         return;
2387
2388 #if PLATFORM(IOS)
2389     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2390 #else
2391     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2392 #endif
2393     m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2394     ASSERT(m_openGLVideoOutput);
2395
2396     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2397
2398     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2399 }
2400
2401 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2402 {
2403     if (!m_openGLVideoOutput)
2404         return;
2405
2406     if (m_avPlayerItem)
2407         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2408     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2409
2410     m_openGLVideoOutput = 0;
2411 }
2412
2413 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2414 {
2415     if (!m_openGLVideoOutput)
2416         return;
2417
2418     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2419     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2420         return;
2421
2422     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2423 }
2424
2425 #if !LOG_DISABLED
2426
2427 #define STRINGIFY_PAIR(e) e, #e
2428 static std::map<uint32_t, const char*>& enumToStringMap()
2429 {
2430     static NeverDestroyed<std::map<uint32_t, const char*>> map;
2431     if (map.get().empty()) {
2432         std::map<uint32_t, const char*> stringMap;
2433         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2434         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2435         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2436         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2437         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2438         map.get().emplace(STRINGIFY_PAIR(GL_R8));
2439         map.get().emplace(STRINGIFY_PAIR(GL_R16F));
2440         map.get().emplace(STRINGIFY_PAIR(GL_R32F));
2441         map.get().emplace(STRINGIFY_PAIR(GL_R8UI));
2442         map.get().emplace(STRINGIFY_PAIR(GL_R8I));
2443         map.get().emplace(STRINGIFY_PAIR(GL_R16UI));
2444         map.get().emplace(STRINGIFY_PAIR(GL_R16I));
2445         map.get().emplace(STRINGIFY_PAIR(GL_R32UI));
2446         map.get().emplace(STRINGIFY_PAIR(GL_R32I));
2447         map.get().emplace(STRINGIFY_PAIR(GL_RG8));
2448         map.get().emplace(STRINGIFY_PAIR(GL_RG16F));
2449         map.get().emplace(STRINGIFY_PAIR(GL_RG32F));
2450         map.get().emplace(STRINGIFY_PAIR(GL_RG8UI));
2451         map.get().emplace(STRINGIFY_PAIR(GL_RG8I));
2452         map.get().emplace(STRINGIFY_PAIR(GL_RG16UI));
2453         map.get().emplace(STRINGIFY_PAIR(GL_RG16I));
2454         map.get().emplace(STRINGIFY_PAIR(GL_RG32UI));
2455         map.get().emplace(STRINGIFY_PAIR(GL_RG32I));
2456         map.get().emplace(STRINGIFY_PAIR(GL_RGB8));
2457         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8));
2458         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8));
2459         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8_ALPHA8));
2460         map.get().emplace(STRINGIFY_PAIR(GL_RGBA4));
2461         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2));
2462         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT16));
2463         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT24));
2464         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT32F));
2465         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH24_STENCIL8));
2466         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH32F_STENCIL8));
2467         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2468         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2469         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2470         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2471         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2472         map.get().emplace(STRINGIFY_PAIR(GL_RED));
2473         map.get().emplace(STRINGIFY_PAIR(GL_RG_INTEGER));
2474         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_STENCIL));
2475         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_BYTE));
2476         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_6_5));
2477         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_4_4_4_4));
2478         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_5_5_1));
2479         map.get().emplace(STRINGIFY_PAIR(GL_BYTE));
2480         map.get().emplace(STRINGIFY_PAIR(GL_HALF_FLOAT));
2481         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT));
2482         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT));
2483         map.get().emplace(STRINGIFY_PAIR(GL_SHORT));
2484         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT));
2485         map.get().emplace(STRINGIFY_PAIR(GL_INT));
2486         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_2_10_10_10_REV));
2487         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_24_8));
2488         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT_32_UNSIGNED_INT_24_8_REV));
2489
2490 #if PLATFORM(IOS)
2491         map.get().emplace(STRINGIFY_PAIR(GL_RED_INTEGER));
2492         map.get().emplace(STRINGIFY_PAIR(GL_RGB_INTEGER));
2493         map.get().emplace(STRINGIFY_PAIR(GL_RG8_SNORM));
2494         map.get().emplace(STRINGIFY_PAIR(GL_RGB565));
2495         map.get().emplace(STRINGIFY_PAIR(GL_RGB8_SNORM));
2496         map.get().emplace(STRINGIFY_PAIR(GL_R11F_G11F_B10F));
2497         map.get().emplace(STRINGIFY_PAIR(GL_RGB9_E5));
2498         map.get().emplace(STRINGIFY_PAIR(GL_RGB16F));
2499         map.get().emplace(STRINGIFY_PAIR(GL_RGB32F));
2500         map.get().emplace(STRINGIFY_PAIR(GL_RGB8UI));
2501         map.get().emplace(STRINGIFY_PAIR(GL_RGB8I));
2502         map.get().emplace(STRINGIFY_PAIR(GL_RGB16UI));
2503         map.get().emplace(STRINGIFY_PAIR(GL_RGB16I));
2504         map.get().emplace(STRINGIFY_PAIR(GL_RGB32UI));
2505         map.get().emplace(STRINGIFY_PAIR(GL_RGB32I));
2506         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8_SNORM));
2507         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16F));
2508         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32F));
2509         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8UI));
2510         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8I));
2511         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2UI));
2512         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16UI));
2513         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16I));
2514         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32I));
2515         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32UI));
2516         map.get().emplace(STRINGIFY_PAIR(GL_RGB5_A1));
2517         map.get().emplace(STRINGIFY_PAIR(GL_RG));
2518         map.get().emplace(STRINGIFY_PAIR(GL_RGBA_INTEGER));
2519         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT));
2520         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_10F_11F_11F_REV));
2521         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_5_9_9_9_REV));
2522 #endif
2523     }
2524     return map.get();
2525 }
2526
2527 #endif // !LOG_DISABLED
2528
2529 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2530 {
2531     if (flipY || premultiplyAlpha)
2532         return false;
2533
2534     ASSERT(context);
2535
2536     if (!m_openGLVideoOutput)
2537         createOpenGLVideoOutput();
2538
2539     updateLastOpenGLImage();
2540
2541     if (!m_lastOpenGLImage)
2542         return false;
2543
2544     if (!m_openGLTextureCache) {
2545 #if PLATFORM(IOS)
2546         CVOpenGLESTextureCacheRef cache = nullptr;
2547         CVReturn error = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), nullptr, &cache);
2548 #else
2549         CVOpenGLTextureCacheRef cache = nullptr;
2550         CVReturn error = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), CGLGetPixelFormat(context->platformGraphicsContext3D()), nullptr, &cache);
2551 #endif
2552         if (error != kCVReturnSuccess)
2553             return false;
2554         m_openGLTextureCache = adoptCF(cache);
2555     }
2556
2557     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2558     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2559
2560 #if PLATFORM(IOS)
2561     CVOpenGLESTextureRef bareVideoTexture = nullptr;
2562     if (kCVReturnSuccess != CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, outputTarget, internalFormat, width, height, format, type, level, &bareVideoTexture))
2563         return false;
2564     RetainPtr<CVOpenGLESTextureRef> videoTexture = adoptCF(bareVideoTexture);
2565     Platform3DObject videoTextureName = CVOpenGLESTextureGetName(videoTexture.get());
2566     GC3Denum videoTextureTarget = CVOpenGLESTextureGetTarget(videoTexture.get());
2567 #else
2568     CVOpenGLTextureRef bareVideoTexture = nullptr;
2569     if (kCVReturnSuccess != CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, &bareVideoTexture))
2570         return false;
2571     RetainPtr<CVOpenGLTextureRef> videoTexture = adoptCF(bareVideoTexture);
2572     Platform3DObject videoTextureName = CVOpenGLTextureGetName(videoTexture.get());
2573     GC3Denum videoTextureTarget = CVOpenGLTextureGetTarget(videoTexture.get());
2574 #endif
2575
2576     auto weakThis = createWeakPtr();
2577     dispatch_async(dispatch_get_main_queue(), [weakThis] {
2578         if (!weakThis)
2579             return;
2580
2581         if (auto cache = weakThis->m_openGLTextureCache.get())
2582 #if PLATFORM(IOS)
2583             CVOpenGLESTextureCacheFlush(cache, 0);
2584 #else
2585             CVOpenGLTextureCacheFlush(cache, 0);
2586 #endif
2587     });
2588
2589     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(%p) - internalFormat: %s, format: %s, type: %s", this, enumToStringMap()[internalFormat], enumToStringMap()[format], enumToStringMap()[type]);
2590
2591     // Save the origial bound texture & framebuffer names so we can re-bind them after copying the video texture.
2592     GC3Dint boundTexture = 0;
2593     GC3Dint boundReadFramebuffer = 0;
2594     context->getIntegerv(GraphicsContext3D::TEXTURE_BINDING_2D, &boundTexture);
2595     context->getIntegerv(GraphicsContext3D::READ_FRAMEBUFFER_BINDING, &boundReadFramebuffer);
2596
2597     context->bindTexture(videoTextureTarget, videoTextureName);
2598     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2599     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2600     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2601
2602     // Create a framebuffer object to represent the video texture's memory.
2603     Platform3DObject readFramebuffer = context->createFramebuffer();
2604
2605     // Make that framebuffer the read source from which drawing commands will read voxels.
2606     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, readFramebuffer);
2607
2608     // Allocate uninitialized memory for the output texture.
2609     context->bindTexture(outputTarget, outputTexture);
2610     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2611     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2612     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2613     context->texImage2DDirect(outputTarget, level, internalFormat, width, height, 0, format, type, nullptr);
2614
2615     // Attach the video texture to the framebuffer.
2616     context->framebufferTexture2D(GraphicsContext3D::READ_FRAMEBUFFER, GraphicsContext3D::COLOR_ATTACHMENT0, videoTextureTarget, videoTextureName, level);
2617
2618     GC3Denum status = context->checkFramebufferStatus(GraphicsContext3D::READ_FRAMEBUFFER);
2619     if (status != GraphicsContext3D::FRAMEBUFFER_COMPLETE)
2620         return false;
2621
2622     // Copy texture from the read framebuffer (and thus the video texture) to the output texture.
2623     context->copyTexImage2D(outputTarget, level, internalFormat, 0, 0, width, height, 0);
2624
2625     // Restore the previous texture and framebuffer bindings.
2626     context->bindTexture(outputTarget, boundTexture);
2627     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, boundReadFramebuffer);
2628
2629     // Clean up after ourselves.
2630     context->deleteFramebuffer(readFramebuffer);
2631
2632     return !context->getError();
2633 }
2634
2635 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2636 {
2637     updateLastImage();
2638     return m_lastImage.get();
2639 }
2640
2641 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2642 {
2643     if (!m_videoOutputSemaphore)
2644         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2645
2646     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2647
2648     // Wait for 1 second.
2649     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2650
2651     if (result)
2652         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2653 }
2654
2655 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2656 {
2657     dispatch_semaphore_signal(m_videoOutputSemaphore);
2658 }
2659 #endif
2660
2661 #if ENABLE(ENCRYPTED_MEDIA)
2662 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2663 {
2664     if (!keySystemIsSupported(keySystem))
2665         return MediaPlayer::KeySystemNotSupported;
2666
2667     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2668     String keyURI;
2669     String keyID;
2670     RefPtr<Uint8Array> certificate;
2671     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2672         return MediaPlayer::InvalidPlayerState;
2673
2674     if (!m_keyURIToRequestMap.contains(keyURI))
2675         return MediaPlayer::InvalidPlayerState;
2676
2677     String sessionID = createCanonicalUUIDString();
2678
2679     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2680
2681     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2682     NSString* assetStr = keyID;
2683     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2684     NSError* error = 0;
2685     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2686
2687     if (!keyRequest) {
2688         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2689         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2690         return MediaPlayer::NoError;
2691     }
2692
2693     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2694     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2695     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2696
2697     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2698     m_sessionIDToRequestMap.set(sessionID, avRequest);
2699     m_keyURIToRequestMap.remove(keyURI);
2700
2701     return MediaPlayer::NoError;
2702 }
2703
2704 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2705 {
2706     if (!keySystemIsSupported(keySystem))
2707         return MediaPlayer::KeySystemNotSupported;
2708
2709     if (!m_sessionIDToRequestMap.contains(sessionID))
2710         return MediaPlayer::InvalidPlayerState;
2711
2712     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2713     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2714     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2715     [avRequest.get() finishLoading];
2716     m_sessionIDToRequestMap.remove(sessionID);
2717
2718     player()->keyAdded(keySystem, sessionID);
2719
2720     UNUSED_PARAM(initDataPtr);
2721     UNUSED_PARAM(initDataLength);
2722     return MediaPlayer::NoError;
2723 }
2724
2725 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2726 {
2727     if (!keySystemIsSupported(keySystem))
2728         return MediaPlayer::KeySystemNotSupported;
2729
2730     if (!m_sessionIDToRequestMap.contains(sessionID))
2731         return MediaPlayer::InvalidPlayerState;
2732
2733     m_sessionIDToRequestMap.remove(sessionID);
2734     return MediaPlayer::NoError;
2735 }
2736 #endif
2737
2738 #if ENABLE(ENCRYPTED_MEDIA_V2)
2739 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2740 {
2741     return m_keyURIToRequestMap.take(keyURI);
2742 }
2743
2744 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2745 {
2746     Vector<String> fulfilledKeyIds;
2747
2748     for (auto& pair : m_keyURIToRequestMap) {
2749         const String& keyId = pair.key;
2750         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2751
2752         auto keyData = player()->cachedKeyForKeyId(keyId);
2753         if (!keyData)
2754             continue;
2755
2756         fulfillRequestWithKeyData(request.get(), keyData.get());
2757         fulfilledKeyIds.append(keyId);
2758     }
2759
2760     for (auto& keyId : fulfilledKeyIds)
2761         m_keyURIToRequestMap.remove(keyId);
2762 }
2763
2764 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2765 {
2766     if (!keySystemIsSupported(keySystem))
2767         return nullptr;
2768
2769     return std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2770 }
2771 #endif
2772
2773 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2774 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2775 {
2776 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2777     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2778 #endif
2779
2780     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2781     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2782
2783         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2784         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2785             continue;
2786
2787         bool newCCTrack = true;
2788         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2789             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2790                 continue;
2791
2792             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2793             if (track->avPlayerItemTrack() == playerItemTrack) {
2794                 removedTextTracks.remove(i - 1);
2795                 newCCTrack = false;
2796                 break;
2797             }
2798         }
2799
2800         if (!newCCTrack)
2801             continue;
2802         
2803         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2804     }
2805
2806     processNewAndRemovedTextTracks(removedTextTracks);
2807 }
2808 #endif
2809
2810 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2811 {
2812     if (!m_avAsset)
2813         return nil;
2814
2815     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2816         return nil;
2817
2818     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2819 }
2820
2821 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2822 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2823 {
2824     if (!m_avAsset)
2825         return false;
2826
2827     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2828         return false;
2829
2830     return true;
2831 }
2832
2833 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2834 {
2835     if (!hasLoadedMediaSelectionGroups())
2836         return nil;
2837
2838     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2839 }
2840
2841 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2842 {
2843     if (!hasLoadedMediaSelectionGroups())
2844         return nil;
2845
2846     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2847 }
2848
2849 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2850 {
2851     if (!hasLoadedMediaSelectionGroups())
2852         return nil;
2853
2854     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2855 }
2856
2857 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2858 {
2859     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2860     if (!legibleGroup) {
2861         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2862         return;
2863     }
2864
2865     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2866     // but set the selected legible track to nil so text tracks will not be automatically configured.
2867     if (!m_textTracks.size())
2868         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2869
2870     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2871     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2872     for (AVMediaSelectionOptionType *option in legibleOptions) {
2873         bool newTrack = true;
2874         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2875             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2876                 continue;
2877             
2878             RetainPtr<AVMediaSelectionOptionType> currentOption;
2879 #if ENABLE(AVF_CAPTIONS)
2880             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2881                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2882                 currentOption = track->mediaSelectionOption();
2883             } else
2884 #endif
2885             {
2886                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2887                 currentOption = track->mediaSelectionOption();
2888             }
2889             
2890             if ([currentOption.get() isEqual:option]) {
2891                 removedTextTracks.remove(i - 1);
2892                 newTrack = false;
2893                 break;
2894             }
2895         }
2896         if (!newTrack)
2897             continue;
2898
2899 #if ENABLE(AVF_CAPTIONS)
2900         if ([option outOfBandSource]) {
2901             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2902             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2903             continue;
2904         }
2905 #endif
2906
2907         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2908     }
2909
2910     processNewAndRemovedTextTracks(removedTextTracks);
2911 }
2912
2913 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2914 {
2915     if (m_metadataTrack)
2916         return;
2917
2918     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2919     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2920     player()->addTextTrack(m_metadataTrack);
2921 }
2922
2923 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2924 {
2925     if (!m_currentTextTrack)
2926         return;
2927
2928     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2929 }
2930
2931 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2932 {
2933     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2934
2935     if (!m_currentTextTrack)
2936         return;
2937     
2938     m_currentTextTrack->resetCueValues();
2939 }
2940 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2941
2942 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2943 {
2944     if (m_currentTextTrack == track)
2945         return;
2946
2947     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2948         
2949     m_currentTextTrack = track;
2950
2951     if (track) {
2952         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2953             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2954 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2955 #if ENABLE(AVF_CAPTIONS)
2956         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2957             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2958 #endif
2959         else
2960             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2961 #endif
2962     } else {
2963 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2964         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2965 #endif
2966         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2967     }
2968
2969 }
2970
2971 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2972 {
2973     if (!m_languageOfPrimaryAudioTrack.isNull())
2974         return m_languageOfPrimaryAudioTrack;
2975
2976     if (!m_avPlayerItem.get())
2977         return emptyString();
2978
2979 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2980     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2981     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2982     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2983     if (currentlySelectedAudibleOption) {
2984         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2985         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2986
2987         return m_languageOfPrimaryAudioTrack;
2988     }
2989 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2990
2991     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2992     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2993     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2994     if (!tracks || [tracks count] != 1) {
2995         m_languageOfPrimaryAudioTrack = emptyString();
2996         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2997         return m_languageOfPrimaryAudioTrack;
2998     }
2999
3000     AVAssetTrack *track = [tracks objectAtIndex:0];
3001     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
3002
3003 #if !LOG_DISABLED
3004     if (m_languageOfPrimaryAudioTrack == emptyString())
3005         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
3006     else
3007         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
3008 #endif
3009
3010     return m_languageOfPrimaryAudioTrack;
3011 }
3012
3013 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3014 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
3015 {
3016     bool wirelessTarget = false;
3017
3018 #if !PLATFORM(IOS)
3019     if (m_playbackTarget) {