[Mac] Remove unused playerToPrivateMap()
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "OutOfBandTextTrackPrivateAVF.h"
50 #import "URL.h"
51 #import "Logging.h"
52 #import "MediaPlaybackTargetMac.h"
53 #import "MediaPlaybackTargetMock.h"
54 #import "MediaSelectionGroupAVFObjC.h"
55 #import "MediaTimeAVFoundation.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "TextEncoding.h"
61 #import "TextTrackRepresentation.h"
62 #import "UUID.h"
63 #import "VideoTrackPrivateAVFObjC.h"
64 #import "WebCoreAVFResourceLoader.h"
65 #import "WebCoreCALayerExtras.h"
66 #import "WebCoreSystemInterface.h"
67 #import <functional>
68 #import <map>
69 #import <objc/runtime.h>
70 #import <runtime/DataView.h>
71 #import <runtime/JSCInlines.h>
72 #import <runtime/TypedArrayInlines.h>
73 #import <runtime/Uint16Array.h>
74 #import <runtime/Uint32Array.h>
75 #import <runtime/Uint8Array.h>
76 #import <wtf/CurrentTime.h>
77 #import <wtf/ListHashSet.h>
78 #import <wtf/NeverDestroyed.h>
79 #import <wtf/OSObjectPtr.h>
80 #import <wtf/text/CString.h>
81 #import <wtf/text/StringBuilder.h>
82
83 #if ENABLE(AVF_CAPTIONS)
84 #include "TextTrack.h"
85 #endif
86
87 #import <AVFoundation/AVFoundation.h>
88 #if PLATFORM(IOS)
89 #import "WAKAppKitStubs.h"
90 #import <CoreImage/CoreImage.h>
91 #import <mach/mach_port.h>
92 #else
93 #import <Foundation/NSGeometry.h>
94 #import <QuartzCore/CoreImage.h>
95 #endif
96
97 #if USE(VIDEOTOOLBOX)
98 #import <CoreVideo/CoreVideo.h>
99 #import <VideoToolbox/VideoToolbox.h>
100 #endif
101
102 #if USE(CFNETWORK)
103 #include "CFNSURLConnectionSPI.h"
104 #endif
105
106 #if PLATFORM(IOS)
107 #include <OpenGLES/ES3/glext.h>
108 #endif
109
110 namespace std {
111 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
112     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
113 };
114 }
115
116 @interface WebVideoContainerLayer : CALayer
117 @end
118
119 @implementation WebVideoContainerLayer
120
121 - (void)setBounds:(CGRect)bounds
122 {
123     [super setBounds:bounds];
124     for (CALayer* layer in self.sublayers)
125         layer.frame = bounds;
126 }
127
128 - (void)setPosition:(CGPoint)position
129 {
130     if (!CATransform3DIsIdentity(self.transform)) {
131         // Pre-apply the transform added in the WebProcess to fix <rdar://problem/18316542> to the position.
132         position = CGPointApplyAffineTransform(position, CATransform3DGetAffineTransform(self.transform));
133     }
134     [super setPosition:position];
135 }
136 @end
137
138 #if ENABLE(AVF_CAPTIONS)
139 // Note: This must be defined before our SOFT_LINK macros:
140 @class AVMediaSelectionOption;
141 @interface AVMediaSelectionOption (OutOfBandExtensions)
142 @property (nonatomic, readonly) NSString* outOfBandSource;
143 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
144 @end
145 #endif
146
147 @interface AVURLAsset (WebKitExtensions)
148 @property (nonatomic, readonly) NSURL *resolvedURL;
149 @end
150
151 typedef AVPlayer AVPlayerType;
152 typedef AVPlayerItem AVPlayerItemType;
153 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
154 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
155 typedef AVMetadataItem AVMetadataItemType;
156 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
157 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
158
159 #pragma mark - Soft Linking
160
161 // Soft-linking headers must be included last since they #define functions, constants, etc.
162 #import "CoreMediaSoftLink.h"
163
164 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
165
166 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
167 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
168
169 #if USE(VIDEOTOOLBOX)
170 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
171 #endif
172
173 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
174 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
175 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
176 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
177 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
178 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
179 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
180 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
181
182 #if USE(VIDEOTOOLBOX)
183 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
184 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
185 #endif
186
187 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
188 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
189 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
190 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
191 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
192 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
193 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
194
195 SOFT_LINK_CLASS(CoreImage, CIContext)
196 SOFT_LINK_CLASS(CoreImage, CIImage)
197
198 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
199 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
200 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
201 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
203 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
204 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
205 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
206 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
207 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
208 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
209 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
210 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
211 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
212 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
213 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
214
215 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
216
217 #define AVPlayer getAVPlayerClass()
218 #define AVPlayerItem getAVPlayerItemClass()
219 #define AVPlayerLayer getAVPlayerLayerClass()
220 #define AVURLAsset getAVURLAssetClass()
221 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
222 #define AVMetadataItem getAVMetadataItemClass()
223
224 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
225 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
226 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
227 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
228 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
229 #define AVMediaTypeVideo getAVMediaTypeVideo()
230 #define AVMediaTypeAudio getAVMediaTypeAudio()
231 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
232 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
233 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
234 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
235 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
236 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
237 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
238 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
239 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
240 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
241
242 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
243 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
244 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
245
246 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
247 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
248 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
249
250 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
251 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
252 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
253 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
254
255 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
256 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
257 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
258 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
259 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
260 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
261 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
262 #endif
263
264 #if ENABLE(AVF_CAPTIONS)
265 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
266 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
267 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
268 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
269 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
270 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
271 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
272 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
273 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
274 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
275
276 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
277 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
278 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
279 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
280 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
281 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
282 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
283 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
284 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
285 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
286 #endif
287
288 #if ENABLE(DATACUE_VALUE)
289 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
290 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
291 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
292 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
293 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
294
295 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
296 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
297 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
298 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
299 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
300 #endif
301
302 #if PLATFORM(IOS)
303 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
304 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
305 #endif
306
307 #if PLATFORM(IOS)
308 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CVEAGLContext eaglContext, CFDictionaryRef textureAttributes, CVOpenGLESTextureCacheRef* cacheOut), (allocator, cacheAttributes, eaglContext, textureAttributes, cacheOut))
309 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLESTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef textureAttributes, GLenum target, GLint internalFormat, GLsizei width, GLsizei height, GLenum format, GLenum type, size_t planeIndex, CVOpenGLESTextureRef* textureOut), (allocator, textureCache, sourceImage, textureAttributes, target, internalFormat, width, height, format, type, planeIndex, textureOut))
310 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheFlush, void, (CVOpenGLESTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
311 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetTarget, GLenum, (CVOpenGLESTextureRef image), (image))
312 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetName, GLuint, (CVOpenGLESTextureRef image), (image))
313 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey, NSString *)
314 #define kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey()
315 #else
316 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CGLContextObj cglContext, CGLPixelFormatObj cglPixelFormat, CFDictionaryRef textureAttributes, CVOpenGLTextureCacheRef* cacheOut), (allocator, cacheAttributes, cglContext, cglPixelFormat, textureAttributes, cacheOut))
317 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef attributes, CVOpenGLTextureRef* textureOut), (allocator, textureCache, sourceImage, attributes, textureOut))
318 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheFlush, void, (CVOpenGLTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
319 SOFT_LINK(CoreVideo, CVOpenGLTextureGetTarget, GLenum, (CVOpenGLTextureRef image), (image))
320 SOFT_LINK(CoreVideo, CVOpenGLTextureGetName, GLuint, (CVOpenGLTextureRef image), (image))
321 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey, NSString *)
322 #define kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey()
323 #endif
324
325 using namespace WebCore;
326
327 enum MediaPlayerAVFoundationObservationContext {
328     MediaPlayerAVFoundationObservationContextPlayerItem,
329     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
330     MediaPlayerAVFoundationObservationContextPlayer,
331     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
332 };
333
334 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
335 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
336 #else
337 @interface WebCoreAVFMovieObserver : NSObject
338 #endif
339 {
340     MediaPlayerPrivateAVFoundationObjC* m_callback;
341     int m_delayCallbacks;
342 }
343 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
344 -(void)disconnect;
345 -(void)metadataLoaded;
346 -(void)didEnd:(NSNotification *)notification;
347 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
348 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
349 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
350 - (void)outputSequenceWasFlushed:(id)output;
351 #endif
352 @end
353
354 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
355 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
356     MediaPlayerPrivateAVFoundationObjC* m_callback;
357 }
358 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
359 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
360 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
361 @end
362 #endif
363
364 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
365 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
366     MediaPlayerPrivateAVFoundationObjC *m_callback;
367     dispatch_semaphore_t m_semaphore;
368 }
369 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
370 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
371 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
372 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
373 @end
374 #endif
375
376 namespace WebCore {
377
378 static NSArray *assetMetadataKeyNames();
379 static NSArray *itemKVOProperties();
380 static NSArray *assetTrackMetadataKeyNames();
381 static NSArray *playerKVOProperties();
382 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
383
384 #if !LOG_DISABLED
385 static const char *boolString(bool val)
386 {
387     return val ? "true" : "false";
388 }
389 #endif
390
391 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
392 static dispatch_queue_t globalLoaderDelegateQueue()
393 {
394     static dispatch_queue_t globalQueue;
395     static dispatch_once_t onceToken;
396     dispatch_once(&onceToken, ^{
397         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
398     });
399     return globalQueue;
400 }
401 #endif
402
403 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
404 static dispatch_queue_t globalPullDelegateQueue()
405 {
406     static dispatch_queue_t globalQueue;
407     static dispatch_once_t onceToken;
408     dispatch_once(&onceToken, ^{
409         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
410     });
411     return globalQueue;
412 }
413 #endif
414
415 #if USE(CFNETWORK)
416 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
417 public:
418     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
419     {
420         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
421     }
422
423     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
424     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
425
426 private:
427     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
428         : m_challenge(challenge)
429     {
430         ASSERT(m_challenge);
431     }
432
433     virtual void refAuthenticationClient() override { ref(); }
434     virtual void derefAuthenticationClient() override { deref(); }
435
436     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
437     {
438         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
439     }
440
441     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
442     {
443         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
444     }
445
446     virtual void receivedCancellation(const AuthenticationChallenge&) override
447     {
448         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
449     }
450
451     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
452     {
453         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
454             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
455     }
456
457     virtual void receivedChallengeRejection(const AuthenticationChallenge&) override
458     {
459         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
460             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
461     }
462
463     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
464 };
465 #endif
466
467 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
468 {
469     if (isAvailable())
470         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
471             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
472 }
473
474 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
475     : MediaPlayerPrivateAVFoundation(player)
476     , m_weakPtrFactory(this)
477 #if PLATFORM(IOS)
478     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
479 #endif
480     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
481     , m_videoFrameHasDrawn(false)
482     , m_haveCheckedPlayability(false)
483 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
484     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
485     , m_videoOutputSemaphore(nullptr)
486 #endif
487 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
488     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
489 #endif
490     , m_currentTextTrack(0)
491     , m_cachedRate(0)
492     , m_cachedTotalBytes(0)
493     , m_pendingStatusChanges(0)
494     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
495     , m_cachedLikelyToKeepUp(false)
496     , m_cachedBufferEmpty(false)
497     , m_cachedBufferFull(false)
498     , m_cachedHasEnabledAudio(false)
499     , m_shouldBufferData(true)
500     , m_cachedIsReadyForDisplay(false)
501     , m_haveBeenAskedToCreateLayer(false)
502 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
503     , m_allowsWirelessVideoPlayback(true)
504 #endif
505 {
506 }
507
508 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
509 {
510 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
511     [m_loaderDelegate.get() setCallback:0];
512     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
513
514     for (auto& pair : m_resourceLoaderMap)
515         pair.value->invalidate();
516 #endif
517 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
518     [m_videoOutputDelegate setCallback:0];
519     [m_videoOutput setDelegate:nil queue:0];
520     if (m_videoOutputSemaphore)
521         dispatch_release(m_videoOutputSemaphore);
522 #endif
523
524     if (m_videoLayer)
525         destroyVideoLayer();
526
527     cancelLoad();
528 }
529
530 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
531 {
532     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
533     tearDownVideoRendering();
534
535     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
536     [m_objcObserver.get() disconnect];
537
538     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
539     setIgnoreLoadStateChanges(true);
540     if (m_avAsset) {
541         [m_avAsset.get() cancelLoading];
542         m_avAsset = nil;
543     }
544
545     clearTextTracks();
546
547 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
548     if (m_legibleOutput) {
549         if (m_avPlayerItem)
550             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
551         m_legibleOutput = nil;
552     }
553 #endif
554
555     if (m_avPlayerItem) {
556         for (NSString *keyName in itemKVOProperties())
557             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
558         
559         m_avPlayerItem = nil;
560     }
561     if (m_avPlayer) {
562         if (m_timeObserver)
563             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
564         m_timeObserver = nil;
565
566         for (NSString *keyName in playerKVOProperties())
567             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
568         m_avPlayer = nil;
569     }
570
571     // Reset cached properties
572     m_pendingStatusChanges = 0;
573     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
574     m_cachedSeekableRanges = nullptr;
575     m_cachedLoadedRanges = nullptr;
576     m_cachedHasEnabledAudio = false;
577     m_cachedPresentationSize = FloatSize();
578     m_cachedDuration = MediaTime::zeroTime();
579
580     for (AVPlayerItemTrack *track in m_cachedTracks.get())
581         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
582     m_cachedTracks = nullptr;
583
584 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
585     if (m_provider) {
586         m_provider->setPlayerItem(nullptr);
587         m_provider->setAudioTrack(nullptr);
588     }
589 #endif
590
591     setIgnoreLoadStateChanges(false);
592 }
593
594 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
595 {
596     return m_haveBeenAskedToCreateLayer;
597 }
598
599 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
600 {
601 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
602     if (m_videoOutput)
603         return true;
604 #endif
605     return m_imageGenerator;
606 }
607
608 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
609 {
610 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
611     createVideoOutput();
612 #else
613     createImageGenerator();
614 #endif
615 }
616
617 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
618 {
619     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
620
621     if (!m_avAsset || m_imageGenerator)
622         return;
623
624     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
625
626     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
627     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
628     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
629     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
630
631     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
632 }
633
634 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
635 {
636 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
637     destroyVideoOutput();
638     destroyOpenGLVideoOutput();
639 #endif
640     destroyImageGenerator();
641 }
642
643 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
644 {
645     if (!m_imageGenerator)
646         return;
647
648     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
649
650     m_imageGenerator = 0;
651 }
652
653 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
654 {
655     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
656         return;
657
658     auto weakThis = createWeakPtr();
659     callOnMainThread([this, weakThis] {
660         if (!weakThis)
661             return;
662
663         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
664             return;
665         m_haveBeenAskedToCreateLayer = true;
666
667         if (!m_videoLayer)
668             createAVPlayerLayer();
669
670 #if USE(VIDEOTOOLBOX)
671         if (!m_videoOutput)
672             createVideoOutput();
673 #endif
674
675         player()->client().mediaPlayerRenderingModeChanged(player());
676     });
677 }
678
679 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
680 {
681     if (!m_avPlayer)
682         return;
683
684     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
685     [m_videoLayer setPlayer:m_avPlayer.get()];
686     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
687 #ifndef NDEBUG
688     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
689 #endif
690     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
691     updateVideoLayerGravity();
692     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
693     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
694     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
695
696 #if PLATFORM(IOS)
697     [m_videoLayer web_disableAllActions];
698     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
699 #ifndef NDEBUG
700     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
701 #endif
702     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
703     if (m_videoFullscreenLayer) {
704         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
705         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
706     } else {
707         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
708         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
709     }
710     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
711         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
712 #else
713     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
714 #endif
715 }
716
717 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
718 {
719     if (!m_videoLayer)
720         return;
721
722     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
723
724     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
725     [m_videoLayer.get() setPlayer:nil];
726
727 #if PLATFORM(IOS)
728     if (m_videoFullscreenLayer)
729         [m_videoLayer removeFromSuperlayer];
730     m_videoInlineLayer = nil;
731 #endif
732
733     m_videoLayer = nil;
734 }
735
736 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
737 {
738     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
739     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
740
741     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
742     if (!date)
743         return MediaTime::invalidTime();
744
745     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
746
747     // Rounding due to second offset error when subtracting.
748     return MediaTime::createWithDouble(round(date - currentTime));
749 }
750
751 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
752 {
753     if (currentRenderingMode() == MediaRenderingToLayer)
754         return m_cachedIsReadyForDisplay;
755
756     return m_videoFrameHasDrawn;
757 }
758
759 #if ENABLE(AVF_CAPTIONS)
760 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
761 {
762     // FIXME: Match these to correct types:
763     if (kind == PlatformTextTrack::Caption)
764         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
765
766     if (kind == PlatformTextTrack::Subtitle)
767         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
768
769     if (kind == PlatformTextTrack::Description)
770         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
771
772     if (kind == PlatformTextTrack::Forced)
773         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
774
775     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
776 }
777     
778 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
779 {
780     trackModeChanged();
781 }
782     
783 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
784 {
785     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
786     
787     for (auto& textTrack : m_textTracks) {
788         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
789             continue;
790         
791         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
792         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
793         
794         for (auto& track : outOfBandTrackSources) {
795             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
796             
797             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
798                 continue;
799             
800             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
801             if (track->mode() == PlatformTextTrack::Hidden)
802                 mode = InbandTextTrackPrivate::Hidden;
803             else if (track->mode() == PlatformTextTrack::Disabled)
804                 mode = InbandTextTrackPrivate::Disabled;
805             else if (track->mode() == PlatformTextTrack::Showing)
806                 mode = InbandTextTrackPrivate::Showing;
807             
808             textTrack->setMode(mode);
809             break;
810         }
811     }
812 }
813 #endif
814
815
816 static NSURL *canonicalURL(const String& url)
817 {
818     NSURL *cocoaURL = URL(ParsedURLString, url);
819     if (url.isEmpty())
820         return cocoaURL;
821
822     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
823     if (!request)
824         return cocoaURL;
825
826     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
827     if (!canonicalRequest)
828         return cocoaURL;
829
830     return [canonicalRequest URL];
831 }
832
833 #if PLATFORM(IOS)
834 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
835 {
836     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
837     [properties setDictionary:@{
838         NSHTTPCookieName: cookie.name,
839         NSHTTPCookieValue: cookie.value,
840         NSHTTPCookieDomain: cookie.domain,
841         NSHTTPCookiePath: cookie.path,
842         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
843     }];
844     if (cookie.secure)
845         [properties setObject:@YES forKey:NSHTTPCookieSecure];
846     if (cookie.session)
847         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
848
849     return [NSHTTPCookie cookieWithProperties:properties.get()];
850 }
851 #endif
852
853 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
854 {
855     if (m_avAsset)
856         return;
857
858     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
859
860     setDelayCallbacks(true);
861
862     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
863
864     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
865
866     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
867
868     String referrer = player()->referrer();
869     if (!referrer.isEmpty())
870         [headerFields.get() setObject:referrer forKey:@"Referer"];
871
872     String userAgent = player()->userAgent();
873     if (!userAgent.isEmpty())
874         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
875
876     if ([headerFields.get() count])
877         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
878
879     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
880         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
881
882 #if PLATFORM(IOS)
883     // FIXME: rdar://problem/20354688
884     String identifier = player()->sourceApplicationIdentifier();
885     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
886         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
887 #endif
888
889 #if ENABLE(AVF_CAPTIONS)
890     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
891     if (!outOfBandTrackSources.isEmpty()) {
892         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
893         for (auto& trackSource : outOfBandTrackSources) {
894             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
895             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
896             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
897             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
898             [outOfBandTracks.get() addObject:@{
899                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
900                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
901                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
902                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
903                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
904                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
905             }];
906         }
907
908         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
909     }
910 #endif
911
912 #if PLATFORM(IOS)
913     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
914     if (!networkInterfaceName.isEmpty())
915         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
916 #endif
917
918 #if PLATFORM(IOS)
919     Vector<Cookie> cookies;
920     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
921         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
922         for (auto& cookie : cookies)
923             [nsCookies addObject:toNSHTTPCookie(cookie)];
924
925         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
926     }
927 #endif
928
929     NSURL *cocoaURL = canonicalURL(url);
930     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
931
932 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
933     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
934 #endif
935
936     m_haveCheckedPlayability = false;
937
938     setDelayCallbacks(false);
939 }
940
941 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
942 {
943     if (!m_avPlayer)
944         return;
945
946     if (pthread_main_np()) {
947         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
948         return;
949     }
950
951     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
952     RetainPtr<AVPlayerItemType> strongItem = item;
953     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
954         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
955     });
956 }
957
958 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
959 {
960     if (m_avPlayer)
961         return;
962
963     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
964
965     setDelayCallbacks(true);
966
967     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
968     for (NSString *keyName in playerKVOProperties())
969         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
970
971 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
972     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
973 #endif
974
975 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
976     updateDisableExternalPlayback();
977     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
978 #endif
979
980 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
981     if (m_shouldPlayToPlaybackTarget)
982         setShouldPlayToPlaybackTarget(true);
983 #endif
984
985     if (player()->client().mediaPlayerIsVideo())
986         createAVPlayerLayer();
987
988     if (m_avPlayerItem)
989         setAVPlayerItem(m_avPlayerItem.get());
990
991     setDelayCallbacks(false);
992 }
993
994 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
995 {
996     if (m_avPlayerItem)
997         return;
998
999     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
1000
1001     setDelayCallbacks(true);
1002
1003     // Create the player item so we can load media data. 
1004     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
1005
1006     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1007
1008     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1009     for (NSString *keyName in itemKVOProperties())
1010         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1011
1012     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1013
1014     if (m_avPlayer)
1015         setAVPlayerItem(m_avPlayerItem.get());
1016
1017 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1018     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1019
1020     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1021     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1022     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1023
1024     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1025     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1026     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1027     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1028 #endif
1029
1030 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1031     if (m_provider) {
1032         m_provider->setPlayerItem(m_avPlayerItem.get());
1033         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1034     }
1035 #endif
1036
1037     setDelayCallbacks(false);
1038 }
1039
1040 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1041 {
1042     if (m_haveCheckedPlayability)
1043         return;
1044     m_haveCheckedPlayability = true;
1045
1046     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1047     auto weakThis = createWeakPtr();
1048
1049     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1050         callOnMainThread([weakThis] {
1051             if (weakThis)
1052                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1053         });
1054     }];
1055 }
1056
1057 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1058 {
1059     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1060
1061     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1062     dispatch_group_enter(metadataLoadingGroup.get());
1063     auto weakThis = createWeakPtr();
1064     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1065
1066         callOnMainThread([weakThis, metadataLoadingGroup] {
1067             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1068                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1069                     dispatch_group_enter(metadataLoadingGroup.get());
1070                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1071                         dispatch_group_leave(metadataLoadingGroup.get());
1072                     }];
1073                 }
1074             }
1075             dispatch_group_leave(metadataLoadingGroup.get());
1076         });
1077     }];
1078
1079     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1080         callOnMainThread([weakThis] {
1081             if (weakThis)
1082                 [weakThis->m_objcObserver.get() metadataLoaded];
1083         });
1084     });
1085 }
1086
1087 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1088 {
1089     if (!m_avPlayerItem)
1090         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1091
1092     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1093         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1094     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1095         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1096     if (m_cachedLikelyToKeepUp)
1097         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1098     if (m_cachedBufferFull)
1099         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1100     if (m_cachedBufferEmpty)
1101         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1102
1103     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1104 }
1105
1106 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1107 {
1108     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1109     PlatformMedia pm;
1110     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1111     pm.media.avfMediaPlayer = m_avPlayer.get();
1112     return pm;
1113 }
1114
1115 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1116 {
1117 #if PLATFORM(IOS)
1118     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1119 #else
1120     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1121 #endif
1122 }
1123
1124 #if PLATFORM(IOS)
1125 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1126 {
1127     if (m_videoFullscreenLayer == videoFullscreenLayer)
1128         return;
1129
1130     m_videoFullscreenLayer = videoFullscreenLayer;
1131
1132     [CATransaction begin];
1133     [CATransaction setDisableActions:YES];
1134     
1135     CAContext *oldContext = [m_videoLayer context];
1136     CAContext *newContext = nil;
1137     
1138     if (m_videoFullscreenLayer && m_videoLayer) {
1139         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1140         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1141         newContext = [m_videoFullscreenLayer context];
1142     } else if (m_videoInlineLayer && m_videoLayer) {
1143         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1144         [m_videoLayer removeFromSuperlayer];
1145         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1146         newContext = [m_videoInlineLayer context];
1147     } else if (m_videoLayer)
1148         [m_videoLayer removeFromSuperlayer];
1149
1150     if (oldContext && newContext && oldContext != newContext) {
1151         mach_port_t fencePort = [oldContext createFencePort];
1152         [newContext setFencePort:fencePort];
1153         mach_port_deallocate(mach_task_self(), fencePort);
1154     }
1155     [CATransaction commit];
1156
1157     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1158         syncTextTrackBounds();
1159         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1160     }
1161
1162     updateDisableExternalPlayback();
1163 }
1164
1165 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1166 {
1167     m_videoFullscreenFrame = frame;
1168     if (!m_videoFullscreenLayer)
1169         return;
1170
1171     if (m_videoLayer) {
1172         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1173     }
1174     syncTextTrackBounds();
1175 }
1176
1177 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1178 {
1179     m_videoFullscreenGravity = gravity;
1180     if (!m_videoLayer)
1181         return;
1182
1183     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1184     if (gravity == MediaPlayer::VideoGravityResize)
1185         videoGravity = AVLayerVideoGravityResize;
1186     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1187         videoGravity = AVLayerVideoGravityResizeAspect;
1188     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1189         videoGravity = AVLayerVideoGravityResizeAspectFill;
1190     else
1191         ASSERT_NOT_REACHED();
1192     
1193     if ([m_videoLayer videoGravity] == videoGravity)
1194         return;
1195
1196     [m_videoLayer setVideoGravity:videoGravity];
1197     syncTextTrackBounds();
1198 }
1199
1200 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1201 {
1202     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1203         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1204 }
1205
1206 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1207 {
1208     if (m_currentMetaData)
1209         return m_currentMetaData.get();
1210     return nil;
1211 }
1212
1213 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1214 {
1215     if (!m_avPlayerItem)
1216         return emptyString();
1217     
1218     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1219     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1220
1221     return logString.get();
1222 }
1223
1224 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1225 {
1226     if (!m_avPlayerItem)
1227         return emptyString();
1228
1229     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1230     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1231
1232     return logString.get();
1233 }
1234 #endif
1235
1236 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1237 {
1238     [CATransaction begin];
1239     [CATransaction setDisableActions:YES];    
1240     if (m_videoLayer)
1241         [m_videoLayer.get() setHidden:!isVisible];
1242     [CATransaction commit];
1243 }
1244     
1245 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1246 {
1247     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1248     if (!metaDataAvailable())
1249         return;
1250
1251     setDelayCallbacks(true);
1252     m_cachedRate = requestedRate();
1253     [m_avPlayer.get() setRate:requestedRate()];
1254     setDelayCallbacks(false);
1255 }
1256
1257 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1258 {
1259     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1260     if (!metaDataAvailable())
1261         return;
1262
1263     setDelayCallbacks(true);
1264     m_cachedRate = 0;
1265     [m_avPlayer.get() setRate:0];
1266     setDelayCallbacks(false);
1267 }
1268
1269 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1270 {
1271     // Do not ask the asset for duration before it has been loaded or it will fetch the
1272     // answer synchronously.
1273     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1274         return MediaTime::invalidTime();
1275     
1276     CMTime cmDuration;
1277     
1278     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1279     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1280         cmDuration = [m_avPlayerItem.get() duration];
1281     else
1282         cmDuration = [m_avAsset.get() duration];
1283
1284     if (CMTIME_IS_NUMERIC(cmDuration))
1285         return toMediaTime(cmDuration);
1286
1287     if (CMTIME_IS_INDEFINITE(cmDuration))
1288         return MediaTime::positiveInfiniteTime();
1289
1290     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1291     return MediaTime::invalidTime();
1292 }
1293
1294 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1295 {
1296     if (!metaDataAvailable() || !m_avPlayerItem)
1297         return MediaTime::zeroTime();
1298
1299     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1300     if (CMTIME_IS_NUMERIC(itemTime))
1301         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1302
1303     return MediaTime::zeroTime();
1304 }
1305
1306 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1307 {
1308     // setCurrentTime generates several event callbacks, update afterwards.
1309     setDelayCallbacks(true);
1310
1311     if (m_metadataTrack)
1312         m_metadataTrack->flushPartialCues();
1313
1314     CMTime cmTime = toCMTime(time);
1315     CMTime cmBefore = toCMTime(negativeTolerance);
1316     CMTime cmAfter = toCMTime(positiveTolerance);
1317
1318     auto weakThis = createWeakPtr();
1319
1320     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1321
1322     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1323         callOnMainThread([weakThis, finished] {
1324             auto _this = weakThis.get();
1325             if (!_this)
1326                 return;
1327
1328             _this->seekCompleted(finished);
1329         });
1330     }];
1331
1332     setDelayCallbacks(false);
1333 }
1334
1335 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1336 {
1337 #if PLATFORM(IOS)
1338     UNUSED_PARAM(volume);
1339     return;
1340 #else
1341     if (!metaDataAvailable())
1342         return;
1343
1344     [m_avPlayer.get() setVolume:volume];
1345 #endif
1346 }
1347
1348 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1349 {
1350     UNUSED_PARAM(closedCaptionsVisible);
1351
1352     if (!metaDataAvailable())
1353         return;
1354
1355     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1356 }
1357
1358 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1359 {
1360     setDelayCallbacks(true);
1361     m_cachedRate = rate;
1362     [m_avPlayer.get() setRate:rate];
1363     setDelayCallbacks(false);
1364 }
1365
1366 double MediaPlayerPrivateAVFoundationObjC::rate() const
1367 {
1368     if (!metaDataAvailable())
1369         return 0;
1370
1371     return m_cachedRate;
1372 }
1373
1374 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1375 {
1376     if (m_avPlayerItem)
1377         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1378 }
1379
1380 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1381 {
1382     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1383
1384     if (!m_avPlayerItem)
1385         return timeRanges;
1386
1387     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1388         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1389         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1390             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1391     }
1392     return timeRanges;
1393 }
1394
1395 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1396 {
1397     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1398         return MediaTime::zeroTime();
1399
1400     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1401     bool hasValidRange = false;
1402     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1403         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1404         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1405             continue;
1406
1407         hasValidRange = true;
1408         MediaTime startOfRange = toMediaTime(timeRange.start);
1409         if (minTimeSeekable > startOfRange)
1410             minTimeSeekable = startOfRange;
1411     }
1412     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1413 }
1414
1415 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1416 {
1417     if (!m_cachedSeekableRanges)
1418         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1419
1420     MediaTime maxTimeSeekable;
1421     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1422         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1423         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1424             continue;
1425         
1426         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1427         if (maxTimeSeekable < endOfRange)
1428             maxTimeSeekable = endOfRange;
1429     }
1430     return maxTimeSeekable;
1431 }
1432
1433 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1434 {
1435     if (!m_cachedLoadedRanges)
1436         return MediaTime::zeroTime();
1437
1438     MediaTime maxTimeLoaded;
1439     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1440         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1441         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1442             continue;
1443         
1444         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1445         if (maxTimeLoaded < endOfRange)
1446             maxTimeLoaded = endOfRange;
1447     }
1448
1449     return maxTimeLoaded;   
1450 }
1451
1452 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1453 {
1454     if (!metaDataAvailable())
1455         return 0;
1456
1457     if (m_cachedTotalBytes)
1458         return m_cachedTotalBytes;
1459
1460     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1461         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1462
1463     return m_cachedTotalBytes;
1464 }
1465
1466 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1467 {
1468     m_avAsset = asset;
1469 }
1470
1471 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1472 {
1473     if (!m_avAsset)
1474         return MediaPlayerAVAssetStatusDoesNotExist;
1475
1476     for (NSString *keyName in assetMetadataKeyNames()) {
1477         NSError *error = nil;
1478         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1479 #if !LOG_DISABLED
1480         if (error)
1481             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1482 #endif
1483
1484         if (keyStatus < AVKeyValueStatusLoaded)
1485             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1486         
1487         if (keyStatus == AVKeyValueStatusFailed)
1488             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1489
1490         if (keyStatus == AVKeyValueStatusCancelled)
1491             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1492     }
1493
1494     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1495         return MediaPlayerAVAssetStatusPlayable;
1496
1497     return MediaPlayerAVAssetStatusLoaded;
1498 }
1499
1500 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1501 {
1502     if (!m_avAsset)
1503         return 0;
1504
1505     NSError *error = nil;
1506     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1507     return [error code];
1508 }
1509
1510 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1511 {
1512     if (!metaDataAvailable() || context.paintingDisabled())
1513         return;
1514
1515     setDelayCallbacks(true);
1516     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1517
1518 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1519     if (videoOutputHasAvailableFrame())
1520         paintWithVideoOutput(context, rect);
1521     else
1522 #endif
1523         paintWithImageGenerator(context, rect);
1524
1525     END_BLOCK_OBJC_EXCEPTIONS;
1526     setDelayCallbacks(false);
1527
1528     m_videoFrameHasDrawn = true;
1529 }
1530
1531 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1532 {
1533     if (!metaDataAvailable() || context.paintingDisabled())
1534         return;
1535
1536     // We can ignore the request if we are already rendering to a layer.
1537     if (currentRenderingMode() == MediaRenderingToLayer)
1538         return;
1539
1540     // paint() is best effort, so only paint if we already have an image generator or video output available.
1541     if (!hasContextRenderer())
1542         return;
1543
1544     paintCurrentFrameInContext(context, rect);
1545 }
1546
1547 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1548 {
1549     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1550
1551     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1552     if (image) {
1553         GraphicsContextStateSaver stateSaver(context);
1554         context.translate(rect.x(), rect.y() + rect.height());
1555         context.scale(FloatSize(1.0f, -1.0f));
1556         context.setImageInterpolationQuality(InterpolationLow);
1557         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1558         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1559         image = 0;
1560     }
1561 }
1562
1563 static const HashSet<String>& avfMIMETypes()
1564 {
1565     static NeverDestroyed<HashSet<String>> cache = [] () {
1566         HashSet<String> types;
1567
1568         NSArray *nsTypes = [AVURLAsset audiovisualMIMETypes];
1569         for (NSString *mimeType in nsTypes)
1570             types.add([mimeType lowercaseString]);
1571
1572         return types;
1573     }();
1574
1575     
1576     return cache;
1577 }
1578
1579 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1580 {
1581     if (!m_imageGenerator)
1582         createImageGenerator();
1583     ASSERT(m_imageGenerator);
1584
1585 #if !LOG_DISABLED
1586     double start = monotonicallyIncreasingTime();
1587 #endif
1588
1589     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1590     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1591     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1592
1593 #if !LOG_DISABLED
1594     double duration = monotonicallyIncreasingTime() - start;
1595     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1596 #endif
1597
1598     return image;
1599 }
1600
1601 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1602 {
1603     supportedTypes = avfMIMETypes();
1604
1605
1606 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1607 static bool keySystemIsSupported(const String& keySystem)
1608 {
1609     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1610         return true;
1611     return false;
1612 }
1613 #endif
1614
1615 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1616 {
1617 #if ENABLE(ENCRYPTED_MEDIA)
1618     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1619     // In addition to the steps in the current specification, this method must run the following steps:
1620
1621     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1622     //    If keySystem is null, continue to the next step.
1623     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1624         // "Clear Key" is only supported with HLS:
1625         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1626             return MediaPlayer::IsNotSupported;
1627
1628         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1629         if (!keySystemIsSupported(parameters.keySystem))
1630             return MediaPlayer::IsNotSupported;
1631
1632         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1633         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1634     }
1635
1636     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1637 #endif
1638
1639 #if ENABLE(MEDIA_SOURCE)
1640     if (parameters.isMediaSource)
1641         return MediaPlayer::IsNotSupported;
1642 #endif
1643 #if ENABLE(MEDIA_STREAM)
1644     if (parameters.isMediaStream)
1645         return MediaPlayer::IsNotSupported;
1646 #endif
1647     if (isUnsupportedMIMEType(parameters.type))
1648         return MediaPlayer::IsNotSupported;
1649
1650     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1651         return MediaPlayer::IsNotSupported;
1652
1653     // The spec says:
1654     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1655     if (parameters.codecs.isEmpty())
1656         return MediaPlayer::MayBeSupported;
1657
1658     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1659     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1660 }
1661
1662 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1663 {
1664 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1665     if (!keySystem.isEmpty()) {
1666         // "Clear Key" is only supported with HLS:
1667         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1668             return MediaPlayer::IsNotSupported;
1669
1670         if (!keySystemIsSupported(keySystem))
1671             return false;
1672
1673         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1674             return false;
1675
1676         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1677             return false;
1678
1679         return true;
1680     }
1681 #else
1682     UNUSED_PARAM(keySystem);
1683     UNUSED_PARAM(mimeType);
1684 #endif
1685     return false;
1686 }
1687
1688 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1689 #if ENABLE(ENCRYPTED_MEDIA_V2)
1690 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1691 {
1692     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1693         [infoRequest setContentLength:keyData->byteLength()];
1694         [infoRequest setByteRangeAccessSupported:YES];
1695     }
1696
1697     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1698         long long start = [dataRequest currentOffset];
1699         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1700
1701         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1702             [request finishLoadingWithError:nil];
1703             return;
1704         }
1705
1706         ASSERT(start <= std::numeric_limits<int>::max());
1707         ASSERT(end <= std::numeric_limits<int>::max());
1708         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1709         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1710         [dataRequest respondWithData:nsData.get()];
1711     }
1712
1713     [request finishLoading];
1714 }
1715 #endif
1716
1717 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1718 {
1719     String scheme = [[[avRequest request] URL] scheme];
1720     String keyURI = [[[avRequest request] URL] absoluteString];
1721
1722 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1723     if (scheme == "skd") {
1724         // Create an initData with the following layout:
1725         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1726         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1727         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1728         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1729         initDataView->set<uint32_t>(0, keyURISize, true);
1730
1731         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1732         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1733
1734 #if ENABLE(ENCRYPTED_MEDIA)
1735         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1736 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1737         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1738         if (!player()->keyNeeded(initData.get()))
1739 #endif
1740             return false;
1741
1742         m_keyURIToRequestMap.set(keyURI, avRequest);
1743         return true;
1744 #if ENABLE(ENCRYPTED_MEDIA_V2)
1745     } else if (scheme == "clearkey") {
1746         String keyID = [[[avRequest request] URL] resourceSpecifier];
1747         StringView keyIDView(keyID);
1748         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1749
1750         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1751         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1752
1753         auto keyData = player()->cachedKeyForKeyId(keyID);
1754         if (keyData) {
1755             fulfillRequestWithKeyData(avRequest, keyData.get());
1756             return false;
1757         }
1758
1759         if (!player()->keyNeeded(initData.get()))
1760             return false;
1761
1762         m_keyURIToRequestMap.set(keyID, avRequest);
1763         return true;
1764 #endif
1765     }
1766 #endif
1767
1768     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1769     m_resourceLoaderMap.add(avRequest, resourceLoader);
1770     resourceLoader->startLoading();
1771     return true;
1772 }
1773
1774 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1775 {
1776 #if USE(CFNETWORK)
1777     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1778     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1779     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1780 #else
1781     AuthenticationChallenge challenge(nsChallenge);
1782 #endif
1783
1784     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1785 }
1786
1787 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1788 {
1789     String scheme = [[[avRequest request] URL] scheme];
1790
1791     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1792
1793     if (resourceLoader)
1794         resourceLoader->stopLoading();
1795 }
1796
1797 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1798 {
1799     m_resourceLoaderMap.remove(avRequest);
1800 }
1801 #endif
1802
1803 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1804 {
1805     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1806 }
1807
1808 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1809 {
1810     if (!metaDataAvailable())
1811         return timeValue;
1812
1813     // FIXME - impossible to implement until rdar://8721510 is fixed.
1814     return timeValue;
1815 }
1816
1817 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1818 {
1819 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1820     return 0;
1821 #else
1822     return 5;
1823 #endif
1824 }
1825
1826 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1827 {
1828     if (!m_videoLayer)
1829         return;
1830
1831 #if PLATFORM(IOS)
1832     // Do not attempt to change the video gravity while in full screen mode.
1833     // See setVideoFullscreenGravity().
1834     if (m_videoFullscreenLayer)
1835         return;
1836 #endif
1837
1838     [CATransaction begin];
1839     [CATransaction setDisableActions:YES];    
1840     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1841     [m_videoLayer.get() setVideoGravity:gravity];
1842     [CATransaction commit];
1843 }
1844
1845 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1846 {
1847     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1848         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1849     }];
1850     if (index == NSNotFound)
1851         return nil;
1852     return [tracks objectAtIndex:index];
1853 }
1854
1855 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1856 {
1857     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1858     m_languageOfPrimaryAudioTrack = String();
1859
1860     if (!m_avAsset)
1861         return;
1862
1863     setDelayCharacteristicsChangedNotification(true);
1864
1865     bool haveCCTrack = false;
1866     bool hasCaptions = false;
1867
1868     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1869     // asked about those fairly fequently.
1870     if (!m_avPlayerItem) {
1871         // We don't have a player item yet, so check with the asset because some assets support inspection
1872         // prior to becoming ready to play.
1873         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1874         setHasVideo(firstEnabledVideoTrack);
1875         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1876 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1877         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1878 #endif
1879
1880         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1881     } else {
1882         bool hasVideo = false;
1883         bool hasAudio = false;
1884         bool hasMetaData = false;
1885         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1886             if ([track isEnabled]) {
1887                 AVAssetTrack *assetTrack = [track assetTrack];
1888                 NSString *mediaType = [assetTrack mediaType];
1889                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1890                     hasVideo = true;
1891                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1892                     hasAudio = true;
1893                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1894 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1895                     hasCaptions = true;
1896 #endif
1897                     haveCCTrack = true;
1898                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1899                     hasMetaData = true;
1900                 }
1901             }
1902         }
1903
1904 #if ENABLE(VIDEO_TRACK)
1905         updateAudioTracks();
1906         updateVideoTracks();
1907
1908 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1909         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1910         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1911 #endif
1912 #endif
1913
1914         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1915         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1916         // when it is not.
1917         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1918
1919         setHasAudio(hasAudio);
1920 #if ENABLE(DATACUE_VALUE)
1921         if (hasMetaData)
1922             processMetadataTrack();
1923 #endif
1924     }
1925
1926 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1927     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1928     if (legibleGroup && m_cachedTracks) {
1929         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1930         if (hasCaptions)
1931             processMediaSelectionOptions();
1932     }
1933 #endif
1934
1935 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1936     if (!hasCaptions && haveCCTrack)
1937         processLegacyClosedCaptionsTracks();
1938 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1939     if (haveCCTrack)
1940         processLegacyClosedCaptionsTracks();
1941 #endif
1942
1943     setHasClosedCaptions(hasCaptions);
1944
1945     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1946         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1947
1948     sizeChanged();
1949
1950     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1951         characteristicsChanged();
1952
1953 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1954     if (m_provider)
1955         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1956 #endif
1957
1958     setDelayCharacteristicsChangedNotification(false);
1959 }
1960
1961 #if ENABLE(VIDEO_TRACK)
1962 template <typename RefT, typename PassRefT>
1963 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1964 {
1965     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1966         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1967     }]]]);
1968     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1969
1970     for (auto& oldItem : oldItems) {
1971         if (oldItem->playerItemTrack())
1972             [oldTracks addObject:oldItem->playerItemTrack()];
1973     }
1974
1975     // Find the added & removed AVPlayerItemTracks:
1976     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1977     [removedTracks minusSet:newTracks.get()];
1978
1979     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1980     [addedTracks minusSet:oldTracks.get()];
1981
1982     typedef Vector<RefT> ItemVector;
1983     ItemVector replacementItems;
1984     ItemVector addedItems;
1985     ItemVector removedItems;
1986     for (auto& oldItem : oldItems) {
1987         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1988             removedItems.append(oldItem);
1989         else
1990             replacementItems.append(oldItem);
1991     }
1992
1993     for (AVPlayerItemTrack* track in addedTracks.get())
1994         addedItems.append(itemFactory(track));
1995
1996     replacementItems.appendVector(addedItems);
1997     oldItems.swap(replacementItems);
1998
1999     for (auto& removedItem : removedItems)
2000         (player->*removedFunction)(removedItem);
2001
2002     for (auto& addedItem : addedItems)
2003         (player->*addedFunction)(addedItem);
2004 }
2005
2006 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2007 template <typename RefT, typename PassRefT>
2008 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2009 {
2010     group->updateOptions(characteristics);
2011
2012     // Only add selection options which do not have an associated persistant track.
2013     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2014     for (auto& option : group->options()) {
2015         if (!option)
2016             continue;
2017         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2018         if (!avOption)
2019             continue;
2020         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
2021             newSelectionOptions.add(option);
2022     }
2023
2024     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2025     for (auto& oldItem : oldItems) {
2026         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2027             oldSelectionOptions.add(option);
2028     }
2029
2030     // Find the added & removed AVMediaSelectionOptions:
2031     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2032     for (auto& oldOption : oldSelectionOptions) {
2033         if (!newSelectionOptions.contains(oldOption))
2034             removedSelectionOptions.add(oldOption);
2035     }
2036
2037     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2038     for (auto& newOption : newSelectionOptions) {
2039         if (!oldSelectionOptions.contains(newOption))
2040             addedSelectionOptions.add(newOption);
2041     }
2042
2043     typedef Vector<RefT> ItemVector;
2044     ItemVector replacementItems;
2045     ItemVector addedItems;
2046     ItemVector removedItems;
2047     for (auto& oldItem : oldItems) {
2048         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2049             removedItems.append(oldItem);
2050         else
2051             replacementItems.append(oldItem);
2052     }
2053
2054     for (auto& option : addedSelectionOptions)
2055         addedItems.append(itemFactory(*option.get()));
2056
2057     replacementItems.appendVector(addedItems);
2058     oldItems.swap(replacementItems);
2059     
2060     for (auto& removedItem : removedItems)
2061         (player->*removedFunction)(removedItem);
2062     
2063     for (auto& addedItem : addedItems)
2064         (player->*addedFunction)(addedItem);
2065 }
2066 #endif
2067
2068 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2069 {
2070 #if !LOG_DISABLED
2071     size_t count = m_audioTracks.size();
2072 #endif
2073
2074     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2075
2076 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2077     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2078     if (!m_audibleGroup) {
2079         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2080             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2081     }
2082
2083     if (m_audibleGroup)
2084         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2085 #endif
2086
2087     for (auto& track : m_audioTracks)
2088         track->resetPropertiesFromTrack();
2089
2090 #if !LOG_DISABLED
2091     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2092 #endif
2093 }
2094
2095 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2096 {
2097 #if !LOG_DISABLED
2098     size_t count = m_videoTracks.size();
2099 #endif
2100
2101     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2102
2103 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2104     if (!m_visualGroup) {
2105         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2106             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2107     }
2108
2109     if (m_visualGroup)
2110         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2111 #endif
2112
2113     for (auto& track : m_audioTracks)
2114         track->resetPropertiesFromTrack();
2115
2116 #if !LOG_DISABLED
2117     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2118 #endif
2119 }
2120
2121 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2122 {
2123 #if PLATFORM(IOS)
2124     if (m_videoFullscreenLayer)
2125         return true;
2126 #endif
2127     return false;
2128 }
2129
2130 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2131 {
2132 #if PLATFORM(IOS)
2133     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2134         return;
2135     
2136     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2137     [m_textTrackRepresentationLayer setFrame:textFrame];
2138 #endif
2139 }
2140
2141 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2142 {
2143 #if PLATFORM(IOS)
2144     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2145     if (representationLayer == m_textTrackRepresentationLayer) {
2146         syncTextTrackBounds();
2147         return;
2148     }
2149
2150     if (m_textTrackRepresentationLayer)
2151         [m_textTrackRepresentationLayer removeFromSuperlayer];
2152
2153     m_textTrackRepresentationLayer = representationLayer;
2154
2155     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2156         syncTextTrackBounds();
2157         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2158     }
2159
2160 #else
2161     UNUSED_PARAM(representation);
2162 #endif
2163 }
2164 #endif // ENABLE(VIDEO_TRACK)
2165
2166 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2167 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2168 {
2169     if (!m_provider) {
2170         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2171         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2172     }
2173
2174     return m_provider.get();
2175 }
2176 #endif
2177
2178 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2179 {
2180     if (!m_avAsset)
2181         return;
2182
2183     setNaturalSize(m_cachedPresentationSize);
2184 }
2185     
2186 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2187 {
2188     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2189         return false;
2190     
2191     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2192     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2193     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2194 }
2195
2196 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2197 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2198 {
2199     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2200
2201     if (!m_avPlayerItem || m_videoOutput)
2202         return;
2203
2204 #if USE(VIDEOTOOLBOX)
2205     NSDictionary* attributes = nil;
2206 #else
2207     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2208                                 nil];
2209 #endif
2210     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2211     ASSERT(m_videoOutput);
2212
2213     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2214
2215     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2216
2217     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2218 }
2219
2220 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2221 {
2222     if (!m_videoOutput)
2223         return;
2224
2225     if (m_avPlayerItem)
2226         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2227     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2228
2229     m_videoOutput = 0;
2230 }
2231
2232 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2233 {
2234     if (!m_videoOutput)
2235         createVideoOutput();
2236     ASSERT(m_videoOutput);
2237
2238 #if !LOG_DISABLED
2239     double start = monotonicallyIncreasingTime();
2240 #endif
2241
2242     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2243
2244     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2245         return 0;
2246
2247     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2248     if (!buffer)
2249         return 0;
2250
2251 #if USE(VIDEOTOOLBOX)
2252     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2253     if (!m_pixelTransferSession) {
2254         VTPixelTransferSessionRef session = 0;
2255         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2256         m_pixelTransferSession = adoptCF(session);
2257     }
2258
2259     CVPixelBufferRef outputBuffer;
2260     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2261     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2262     buffer = adoptCF(outputBuffer);
2263 #endif
2264
2265 #if !LOG_DISABLED
2266     double duration = monotonicallyIncreasingTime() - start;
2267     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2268 #endif
2269
2270     return buffer;
2271 }
2272
2273 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2274 {
2275     if (!m_avPlayerItem)
2276         return false;
2277
2278     if (m_lastImage)
2279         return true;
2280
2281     if (!m_videoOutput)
2282         createVideoOutput();
2283
2284     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2285 }
2286
2287 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2288 {
2289     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2290     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2291     return CVPixelBufferGetBaseAddress(pixelBuffer);
2292 }
2293
2294 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2295 {
2296     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2297     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2298 }
2299
2300 static void CVPixelBufferReleaseInfoCallback(void* info)
2301 {
2302     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2303     CFRelease(pixelBuffer);
2304 }
2305
2306 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2307 {
2308     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2309     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2310
2311     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2312     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2313     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2314     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2315     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2316
2317     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2318     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2319     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2320
2321     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2322 }
2323
2324 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2325 {
2326     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2327
2328     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2329     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2330     // should be displayed.
2331     if (pixelBuffer)
2332         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2333 }
2334
2335 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2336 {
2337     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2338         waitForVideoOutputMediaDataWillChange();
2339
2340     updateLastImage();
2341
2342     if (!m_lastImage)
2343         return;
2344
2345     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2346     if (!firstEnabledVideoTrack)
2347         return;
2348
2349     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2350
2351     GraphicsContextStateSaver stateSaver(context);
2352     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2353     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2354     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2355
2356     context.concatCTM(videoTransform);
2357     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2358
2359     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2360     // video frame, destroy it now that it is no longer needed.
2361     if (m_imageGenerator)
2362         destroyImageGenerator();
2363
2364 }
2365
2366 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2367 {
2368     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2369
2370     if (!m_avPlayerItem || m_openGLVideoOutput)
2371         return;
2372
2373 #if PLATFORM(IOS)
2374     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2375 #else
2376     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2377 #endif
2378     m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2379     ASSERT(m_openGLVideoOutput);
2380
2381     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2382
2383     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2384 }
2385
2386 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2387 {
2388     if (!m_openGLVideoOutput)
2389         return;
2390
2391     if (m_avPlayerItem)
2392         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2393     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2394
2395     m_openGLVideoOutput = 0;
2396 }
2397
2398 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2399 {
2400     if (!m_openGLVideoOutput)
2401         return;
2402
2403     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2404     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2405         return;
2406
2407     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2408 }
2409
2410 #if !LOG_DISABLED
2411
2412 #define STRINGIFY_PAIR(e) e, #e
2413 static std::map<uint32_t, const char*>& enumToStringMap()
2414 {
2415     static NeverDestroyed<std::map<uint32_t, const char*>> map;
2416     if (map.get().empty()) {
2417         std::map<uint32_t, const char*> stringMap;
2418         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2419         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2420         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2421         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2422         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2423         map.get().emplace(STRINGIFY_PAIR(GL_R8));
2424         map.get().emplace(STRINGIFY_PAIR(GL_R16F));
2425         map.get().emplace(STRINGIFY_PAIR(GL_R32F));
2426         map.get().emplace(STRINGIFY_PAIR(GL_R8UI));
2427         map.get().emplace(STRINGIFY_PAIR(GL_R8I));
2428         map.get().emplace(STRINGIFY_PAIR(GL_R16UI));
2429         map.get().emplace(STRINGIFY_PAIR(GL_R16I));
2430         map.get().emplace(STRINGIFY_PAIR(GL_R32UI));
2431         map.get().emplace(STRINGIFY_PAIR(GL_R32I));
2432         map.get().emplace(STRINGIFY_PAIR(GL_RG8));
2433         map.get().emplace(STRINGIFY_PAIR(GL_RG16F));
2434         map.get().emplace(STRINGIFY_PAIR(GL_RG32F));
2435         map.get().emplace(STRINGIFY_PAIR(GL_RG8UI));
2436         map.get().emplace(STRINGIFY_PAIR(GL_RG8I));
2437         map.get().emplace(STRINGIFY_PAIR(GL_RG16UI));
2438         map.get().emplace(STRINGIFY_PAIR(GL_RG16I));
2439         map.get().emplace(STRINGIFY_PAIR(GL_RG32UI));
2440         map.get().emplace(STRINGIFY_PAIR(GL_RG32I));
2441         map.get().emplace(STRINGIFY_PAIR(GL_RGB8));
2442         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8));
2443         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8));
2444         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8_ALPHA8));
2445         map.get().emplace(STRINGIFY_PAIR(GL_RGBA4));
2446         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2));
2447         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT16));
2448         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT24));
2449         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT32F));
2450         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH24_STENCIL8));
2451         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH32F_STENCIL8));
2452         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2453         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2454         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2455         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2456         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2457         map.get().emplace(STRINGIFY_PAIR(GL_RED));
2458         map.get().emplace(STRINGIFY_PAIR(GL_RG_INTEGER));
2459         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_STENCIL));
2460         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_BYTE));
2461         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_6_5));
2462         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_4_4_4_4));
2463         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_5_5_1));
2464         map.get().emplace(STRINGIFY_PAIR(GL_BYTE));
2465         map.get().emplace(STRINGIFY_PAIR(GL_HALF_FLOAT));
2466         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT));
2467         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT));
2468         map.get().emplace(STRINGIFY_PAIR(GL_SHORT));
2469         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT));
2470         map.get().emplace(STRINGIFY_PAIR(GL_INT));
2471         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_2_10_10_10_REV));
2472         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_24_8));
2473         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT_32_UNSIGNED_INT_24_8_REV));
2474
2475 #if PLATFORM(IOS)
2476         map.get().emplace(STRINGIFY_PAIR(GL_RED_INTEGER));
2477         map.get().emplace(STRINGIFY_PAIR(GL_RGB_INTEGER));
2478         map.get().emplace(STRINGIFY_PAIR(GL_RG8_SNORM));
2479         map.get().emplace(STRINGIFY_PAIR(GL_RGB565));
2480         map.get().emplace(STRINGIFY_PAIR(GL_RGB8_SNORM));
2481         map.get().emplace(STRINGIFY_PAIR(GL_R11F_G11F_B10F));
2482         map.get().emplace(STRINGIFY_PAIR(GL_RGB9_E5));
2483         map.get().emplace(STRINGIFY_PAIR(GL_RGB16F));
2484         map.get().emplace(STRINGIFY_PAIR(GL_RGB32F));
2485         map.get().emplace(STRINGIFY_PAIR(GL_RGB8UI));
2486         map.get().emplace(STRINGIFY_PAIR(GL_RGB8I));
2487         map.get().emplace(STRINGIFY_PAIR(GL_RGB16UI));
2488         map.get().emplace(STRINGIFY_PAIR(GL_RGB16I));
2489         map.get().emplace(STRINGIFY_PAIR(GL_RGB32UI));
2490         map.get().emplace(STRINGIFY_PAIR(GL_RGB32I));
2491         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8_SNORM));
2492         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16F));
2493         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32F));
2494         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8UI));
2495         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8I));
2496         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2UI));
2497         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16UI));
2498         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16I));
2499         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32I));
2500         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32UI));
2501         map.get().emplace(STRINGIFY_PAIR(GL_RGB5_A1));
2502         map.get().emplace(STRINGIFY_PAIR(GL_RG));
2503         map.get().emplace(STRINGIFY_PAIR(GL_RGBA_INTEGER));
2504         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT));
2505         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_10F_11F_11F_REV));
2506         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_5_9_9_9_REV));
2507 #endif
2508     }
2509     return map.get();
2510 }
2511
2512 #endif // !LOG_DISABLED
2513
2514 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2515 {
2516     if (flipY || premultiplyAlpha)
2517         return false;
2518
2519     ASSERT(context);
2520
2521     if (!m_openGLVideoOutput)
2522         createOpenGLVideoOutput();
2523
2524     updateLastOpenGLImage();
2525
2526     if (!m_lastOpenGLImage)
2527         return false;
2528
2529     if (!m_openGLTextureCache) {
2530 #if PLATFORM(IOS)
2531         CVOpenGLESTextureCacheRef cache = nullptr;
2532         CVReturn error = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), nullptr, &cache);
2533 #else
2534         CVOpenGLTextureCacheRef cache = nullptr;
2535         CVReturn error = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), CGLGetPixelFormat(context->platformGraphicsContext3D()), nullptr, &cache);
2536 #endif
2537         if (error != kCVReturnSuccess)
2538             return false;
2539         m_openGLTextureCache = adoptCF(cache);
2540     }
2541
2542     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2543     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2544
2545 #if PLATFORM(IOS)
2546     CVOpenGLESTextureRef bareVideoTexture = nullptr;
2547     if (kCVReturnSuccess != CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, outputTarget, internalFormat, width, height, format, type, level, &bareVideoTexture))
2548         return false;
2549     RetainPtr<CVOpenGLESTextureRef> videoTexture = adoptCF(bareVideoTexture);
2550     Platform3DObject videoTextureName = CVOpenGLESTextureGetName(videoTexture.get());
2551     GC3Denum videoTextureTarget = CVOpenGLESTextureGetTarget(videoTexture.get());
2552 #else
2553     CVOpenGLTextureRef bareVideoTexture = nullptr;
2554     if (kCVReturnSuccess != CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, &bareVideoTexture))
2555         return false;
2556     RetainPtr<CVOpenGLTextureRef> videoTexture = adoptCF(bareVideoTexture);
2557     Platform3DObject videoTextureName = CVOpenGLTextureGetName(videoTexture.get());
2558     GC3Denum videoTextureTarget = CVOpenGLTextureGetTarget(videoTexture.get());
2559 #endif
2560
2561     auto weakThis = createWeakPtr();
2562     dispatch_async(dispatch_get_main_queue(), [weakThis] {
2563         if (!weakThis)
2564             return;
2565
2566         if (auto cache = weakThis->m_openGLTextureCache.get())
2567 #if PLATFORM(IOS)
2568             CVOpenGLESTextureCacheFlush(cache, 0);
2569 #else
2570             CVOpenGLTextureCacheFlush(cache, 0);
2571 #endif
2572     });
2573
2574     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(%p) - internalFormat: %s, format: %s, type: %s", this, enumToStringMap()[internalFormat], enumToStringMap()[format], enumToStringMap()[type]);
2575
2576     // Save the origial bound texture & framebuffer names so we can re-bind them after copying the video texture.
2577     GC3Dint boundTexture = 0;
2578     GC3Dint boundReadFramebuffer = 0;
2579     context->getIntegerv(GraphicsContext3D::TEXTURE_BINDING_2D, &boundTexture);
2580     context->getIntegerv(GraphicsContext3D::READ_FRAMEBUFFER_BINDING, &boundReadFramebuffer);
2581
2582     context->bindTexture(videoTextureTarget, videoTextureName);
2583     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2584     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2585     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2586
2587     // Create a framebuffer object to represent the video texture's memory.
2588     Platform3DObject readFramebuffer = context->createFramebuffer();
2589
2590     // Make that framebuffer the read source from which drawing commands will read voxels.
2591     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, readFramebuffer);
2592
2593     // Allocate uninitialized memory for the output texture.
2594     context->bindTexture(outputTarget, outputTexture);
2595     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2596     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2597     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2598     context->texImage2DDirect(outputTarget, level, internalFormat, width, height, 0, format, type, nullptr);
2599
2600     // Attach the video texture to the framebuffer.
2601     context->framebufferTexture2D(GraphicsContext3D::READ_FRAMEBUFFER, GraphicsContext3D::COLOR_ATTACHMENT0, videoTextureTarget, videoTextureName, level);
2602
2603     GC3Denum status = context->checkFramebufferStatus(GraphicsContext3D::READ_FRAMEBUFFER);
2604     if (status != GraphicsContext3D::FRAMEBUFFER_COMPLETE)
2605         return false;
2606
2607     // Copy texture from the read framebuffer (and thus the video texture) to the output texture.
2608     context->copyTexImage2D(outputTarget, level, internalFormat, 0, 0, width, height, 0);
2609
2610     // Restore the previous texture and framebuffer bindings.
2611     context->bindTexture(outputTarget, boundTexture);
2612     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, boundReadFramebuffer);
2613
2614     // Clean up after ourselves.
2615     context->deleteFramebuffer(readFramebuffer);
2616
2617     return !context->getError();
2618 }
2619
2620 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2621 {
2622     updateLastImage();
2623     return m_lastImage.get();
2624 }
2625
2626 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2627 {
2628     if (!m_videoOutputSemaphore)
2629         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2630
2631     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2632
2633     // Wait for 1 second.
2634     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2635
2636     if (result)
2637         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2638 }
2639
2640 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2641 {
2642     dispatch_semaphore_signal(m_videoOutputSemaphore);
2643 }
2644 #endif
2645
2646 #if ENABLE(ENCRYPTED_MEDIA)
2647 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2648 {
2649     if (!keySystemIsSupported(keySystem))
2650         return MediaPlayer::KeySystemNotSupported;
2651
2652     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2653     String keyURI;
2654     String keyID;
2655     RefPtr<Uint8Array> certificate;
2656     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2657         return MediaPlayer::InvalidPlayerState;
2658
2659     if (!m_keyURIToRequestMap.contains(keyURI))
2660         return MediaPlayer::InvalidPlayerState;
2661
2662     String sessionID = createCanonicalUUIDString();
2663
2664     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2665
2666     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2667     NSString* assetStr = keyID;
2668     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2669     NSError* error = 0;
2670     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2671
2672     if (!keyRequest) {
2673         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2674         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2675         return MediaPlayer::NoError;
2676     }
2677
2678     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2679     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2680     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2681
2682     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2683     m_sessionIDToRequestMap.set(sessionID, avRequest);
2684     m_keyURIToRequestMap.remove(keyURI);
2685
2686     return MediaPlayer::NoError;
2687 }
2688
2689 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2690 {
2691     if (!keySystemIsSupported(keySystem))
2692         return MediaPlayer::KeySystemNotSupported;
2693
2694     if (!m_sessionIDToRequestMap.contains(sessionID))
2695         return MediaPlayer::InvalidPlayerState;
2696
2697     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2698     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2699     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2700     [avRequest.get() finishLoading];
2701     m_sessionIDToRequestMap.remove(sessionID);
2702
2703     player()->keyAdded(keySystem, sessionID);
2704
2705     UNUSED_PARAM(initDataPtr);
2706     UNUSED_PARAM(initDataLength);
2707     return MediaPlayer::NoError;
2708 }
2709
2710 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2711 {
2712     if (!keySystemIsSupported(keySystem))
2713         return MediaPlayer::KeySystemNotSupported;
2714
2715     if (!m_sessionIDToRequestMap.contains(sessionID))
2716         return MediaPlayer::InvalidPlayerState;
2717
2718     m_sessionIDToRequestMap.remove(sessionID);
2719     return MediaPlayer::NoError;
2720 }
2721 #endif
2722
2723 #if ENABLE(ENCRYPTED_MEDIA_V2)
2724 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2725 {
2726     return m_keyURIToRequestMap.take(keyURI);
2727 }
2728
2729 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2730 {
2731     Vector<String> fulfilledKeyIds;
2732
2733     for (auto& pair : m_keyURIToRequestMap) {
2734         const String& keyId = pair.key;
2735         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2736
2737         auto keyData = player()->cachedKeyForKeyId(keyId);
2738         if (!keyData)
2739             continue;
2740
2741         fulfillRequestWithKeyData(request.get(), keyData.get());
2742         fulfilledKeyIds.append(keyId);
2743     }
2744
2745     for (auto& keyId : fulfilledKeyIds)
2746         m_keyURIToRequestMap.remove(keyId);
2747 }
2748
2749 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2750 {
2751     if (!keySystemIsSupported(keySystem))
2752         return nullptr;
2753
2754     return std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2755 }
2756 #endif
2757
2758 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2759 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2760 {
2761 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2762     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2763 #endif
2764
2765     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2766     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2767
2768         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2769         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2770             continue;
2771
2772         bool newCCTrack = true;
2773         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2774             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2775                 continue;
2776
2777             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2778             if (track->avPlayerItemTrack() == playerItemTrack) {
2779                 removedTextTracks.remove(i - 1);
2780                 newCCTrack = false;
2781                 break;
2782             }
2783         }
2784
2785         if (!newCCTrack)
2786             continue;
2787         
2788         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2789     }
2790
2791     processNewAndRemovedTextTracks(removedTextTracks);
2792 }
2793 #endif
2794
2795 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2796 {
2797     if (!m_avAsset)
2798         return nil;
2799
2800     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2801         return nil;
2802
2803     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2804 }
2805
2806 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2807 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2808 {
2809     if (!m_avAsset)
2810         return false;
2811
2812     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2813         return false;
2814
2815     return true;
2816 }
2817
2818 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2819 {
2820     if (!hasLoadedMediaSelectionGroups())
2821         return nil;
2822
2823     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2824 }
2825
2826 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2827 {
2828     if (!hasLoadedMediaSelectionGroups())
2829         return nil;
2830
2831     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2832 }
2833
2834 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2835 {
2836     if (!hasLoadedMediaSelectionGroups())
2837         return nil;
2838
2839     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2840 }
2841
2842 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2843 {
2844     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2845     if (!legibleGroup) {
2846         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2847         return;
2848     }
2849
2850     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2851     // but set the selected legible track to nil so text tracks will not be automatically configured.
2852     if (!m_textTracks.size())
2853         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2854
2855     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2856     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2857     for (AVMediaSelectionOptionType *option in legibleOptions) {
2858         bool newTrack = true;
2859         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2860             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2861                 continue;
2862             
2863             RetainPtr<AVMediaSelectionOptionType> currentOption;
2864 #if ENABLE(AVF_CAPTIONS)
2865             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2866                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2867                 currentOption = track->mediaSelectionOption();
2868             } else
2869 #endif
2870             {
2871                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2872                 currentOption = track->mediaSelectionOption();
2873             }
2874             
2875             if ([currentOption.get() isEqual:option]) {
2876                 removedTextTracks.remove(i - 1);
2877                 newTrack = false;
2878                 break;
2879             }
2880         }
2881         if (!newTrack)
2882             continue;
2883
2884 #if ENABLE(AVF_CAPTIONS)
2885         if ([option outOfBandSource]) {
2886             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2887             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2888             continue;
2889         }
2890 #endif
2891
2892         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2893     }
2894
2895     processNewAndRemovedTextTracks(removedTextTracks);
2896 }
2897
2898 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2899 {
2900     if (m_metadataTrack)
2901         return;
2902
2903     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2904     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2905     player()->addTextTrack(m_metadataTrack);
2906 }
2907
2908 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2909 {
2910     if (!m_currentTextTrack)
2911         return;
2912
2913     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2914 }
2915
2916 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2917 {
2918     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2919
2920     if (!m_currentTextTrack)
2921         return;
2922     
2923     m_currentTextTrack->resetCueValues();
2924 }
2925 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2926
2927 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2928 {
2929     if (m_currentTextTrack == track)
2930         return;
2931
2932     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2933         
2934     m_currentTextTrack = track;
2935
2936     if (track) {
2937         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2938             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2939 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2940 #if ENABLE(AVF_CAPTIONS)
2941         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2942             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2943 #endif
2944         else
2945             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2946 #endif
2947     } else {
2948 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2949         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2950 #endif
2951         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2952     }
2953
2954 }
2955
2956 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2957 {
2958     if (!m_languageOfPrimaryAudioTrack.isNull())
2959         return m_languageOfPrimaryAudioTrack;
2960
2961     if (!m_avPlayerItem.get())
2962         return emptyString();
2963
2964 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2965     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2966     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2967     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2968     if (currentlySelectedAudibleOption) {
2969         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2970         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2971
2972         return m_languageOfPrimaryAudioTrack;
2973     }
2974 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2975
2976     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2977     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2978     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2979     if (!tracks || [tracks count] != 1) {
2980         m_languageOfPrimaryAudioTrack = emptyString();
2981         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2982         return m_languageOfPrimaryAudioTrack;
2983     }
2984
2985     AVAssetTrack *track = [tracks objectAtIndex:0];
2986     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2987
2988 #if !LOG_DISABLED
2989     if (m_languageOfPrimaryAudioTrack == emptyString())
2990         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2991     else
2992         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2993 #endif
2994
2995     return m_languageOfPrimaryAudioTrack;
2996 }
2997
2998 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2999 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
3000 {
3001     bool wirelessTarget = false;
3002
3003 #if !PLATFORM(IOS)
3004     if (m_playbackTarget) {
3005         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
3006             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
3007         else
3008             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
3009     }
3010 #else
3011     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
3012 #endif
3013
3014     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
3015
3016     return wirelessTarget;
3017 }