PiP and external playback are mutually exclusive.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "OutOfBandTextTrackPrivateAVF.h"
50 #import "URL.h"
51 #import "Logging.h"
52 #import "MediaPlaybackTargetMac.h"
53 #import "MediaPlaybackTargetMock.h"
54 #import "MediaSelectionGroupAVFObjC.h"
55 #import "MediaTimeAVFoundation.h"
56 #import "PlatformTimeRanges.h"
57 #import "QuartzCoreSPI.h"
58 #import "SecurityOrigin.h"
59 #import "SerializedPlatformRepresentationMac.h"
60 #import "TextEncoding.h"
61 #import "TextTrackRepresentation.h"
62 #import "UUID.h"
63 #import "VideoTrackPrivateAVFObjC.h"
64 #import "WebCoreAVFResourceLoader.h"
65 #import "WebCoreCALayerExtras.h"
66 #import "WebCoreSystemInterface.h"
67 #import <functional>
68 #import <map>
69 #import <objc/runtime.h>
70 #import <runtime/DataView.h>
71 #import <runtime/JSCInlines.h>
72 #import <runtime/TypedArrayInlines.h>
73 #import <runtime/Uint16Array.h>
74 #import <runtime/Uint32Array.h>
75 #import <runtime/Uint8Array.h>
76 #import <wtf/CurrentTime.h>
77 #import <wtf/ListHashSet.h>
78 #import <wtf/NeverDestroyed.h>
79 #import <wtf/OSObjectPtr.h>
80 #import <wtf/text/CString.h>
81 #import <wtf/text/StringBuilder.h>
82
83 #if ENABLE(AVF_CAPTIONS)
84 #include "TextTrack.h"
85 #endif
86
87 #import <AVFoundation/AVFoundation.h>
88
89 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
90 #import "VideoFullscreenLayerManager.h"
91 #endif
92
93 #if PLATFORM(IOS)
94 #import "WAKAppKitStubs.h"
95 #import <CoreImage/CoreImage.h>
96 #import <mach/mach_port.h>
97 #else
98 #import <Foundation/NSGeometry.h>
99 #import <QuartzCore/CoreImage.h>
100 #endif
101
102 #if USE(VIDEOTOOLBOX)
103 #import <CoreVideo/CoreVideo.h>
104 #import <VideoToolbox/VideoToolbox.h>
105 #endif
106
107 #if USE(CFNETWORK)
108 #include "CFNSURLConnectionSPI.h"
109 #endif
110
111 #if PLATFORM(IOS)
112 #include <OpenGLES/ES3/glext.h>
113 #endif
114
115 namespace std {
116 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
117     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
118 };
119 }
120
121 #if ENABLE(AVF_CAPTIONS)
122 // Note: This must be defined before our SOFT_LINK macros:
123 @class AVMediaSelectionOption;
124 @interface AVMediaSelectionOption (OutOfBandExtensions)
125 @property (nonatomic, readonly) NSString* outOfBandSource;
126 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
127 @end
128 #endif
129
130 @interface AVURLAsset (WebKitExtensions)
131 @property (nonatomic, readonly) NSURL *resolvedURL;
132 @end
133
134 typedef AVPlayer AVPlayerType;
135 typedef AVPlayerItem AVPlayerItemType;
136 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
137 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
138 typedef AVMetadataItem AVMetadataItemType;
139 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
140 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
141
142 #pragma mark - Soft Linking
143
144 // Soft-linking headers must be included last since they #define functions, constants, etc.
145 #import "CoreMediaSoftLink.h"
146
147 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
148
149 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
150 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
151
152 #if USE(VIDEOTOOLBOX)
153 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
154 #endif
155
156 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
157 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
158 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
159 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
160 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
161 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
162 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
163 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
164
165 #if USE(VIDEOTOOLBOX)
166 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
167 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
168 #endif
169
170 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
171 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
172 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
173 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
174 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
175 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
176 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
177
178 SOFT_LINK_CLASS(CoreImage, CIContext)
179 SOFT_LINK_CLASS(CoreImage, CIImage)
180
181 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
182 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
183 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
196 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
197
198 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
199
200 #define AVPlayer getAVPlayerClass()
201 #define AVPlayerItem getAVPlayerItemClass()
202 #define AVPlayerLayer getAVPlayerLayerClass()
203 #define AVURLAsset getAVURLAssetClass()
204 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
205 #define AVMetadataItem getAVMetadataItemClass()
206
207 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
208 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
209 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
210 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
211 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
212 #define AVMediaTypeVideo getAVMediaTypeVideo()
213 #define AVMediaTypeAudio getAVMediaTypeAudio()
214 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
215 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
216 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
217 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
218 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
219 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
220 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
221 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
222 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
223 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
224
225 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
226 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
227 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
228
229 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
230 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
231 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
232
233 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
234 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
235 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
236 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
237
238 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
239 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
240 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
241 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
242 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
243 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
244 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
245 #endif
246
247 #if ENABLE(AVF_CAPTIONS)
248 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
259
260 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
261 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
262 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
263 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
264 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
265 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
266 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
267 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
268 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
269 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
270 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
271 #endif
272
273 #if ENABLE(DATACUE_VALUE)
274 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
275 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
277 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
279
280 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
281 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
282 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
283 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
284 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
285 #endif
286
287 #if PLATFORM(IOS)
288 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
289 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
290 #endif
291
292 #if PLATFORM(IOS)
293 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CVEAGLContext eaglContext, CFDictionaryRef textureAttributes, CVOpenGLESTextureCacheRef* cacheOut), (allocator, cacheAttributes, eaglContext, textureAttributes, cacheOut))
294 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLESTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef textureAttributes, GLenum target, GLint internalFormat, GLsizei width, GLsizei height, GLenum format, GLenum type, size_t planeIndex, CVOpenGLESTextureRef* textureOut), (allocator, textureCache, sourceImage, textureAttributes, target, internalFormat, width, height, format, type, planeIndex, textureOut))
295 SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheFlush, void, (CVOpenGLESTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
296 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetTarget, GLenum, (CVOpenGLESTextureRef image), (image))
297 SOFT_LINK(CoreVideo, CVOpenGLESTextureGetName, GLuint, (CVOpenGLESTextureRef image), (image))
298 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey, NSString *)
299 #define kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey()
300 #else
301 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CGLContextObj cglContext, CGLPixelFormatObj cglPixelFormat, CFDictionaryRef textureAttributes, CVOpenGLTextureCacheRef* cacheOut), (allocator, cacheAttributes, cglContext, cglPixelFormat, textureAttributes, cacheOut))
302 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef attributes, CVOpenGLTextureRef* textureOut), (allocator, textureCache, sourceImage, attributes, textureOut))
303 SOFT_LINK(CoreVideo, CVOpenGLTextureCacheFlush, void, (CVOpenGLTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
304 SOFT_LINK(CoreVideo, CVOpenGLTextureGetTarget, GLenum, (CVOpenGLTextureRef image), (image))
305 SOFT_LINK(CoreVideo, CVOpenGLTextureGetName, GLuint, (CVOpenGLTextureRef image), (image))
306 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey, NSString *)
307 #define kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey()
308 #endif
309
310 SOFT_LINK_FRAMEWORK(MediaToolbox)
311 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
312
313 using namespace WebCore;
314
315 enum MediaPlayerAVFoundationObservationContext {
316     MediaPlayerAVFoundationObservationContextPlayerItem,
317     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
318     MediaPlayerAVFoundationObservationContextPlayer,
319     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
320 };
321
322 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
323 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
324 #else
325 @interface WebCoreAVFMovieObserver : NSObject
326 #endif
327 {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329     int m_delayCallbacks;
330 }
331 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
332 -(void)disconnect;
333 -(void)metadataLoaded;
334 -(void)didEnd:(NSNotification *)notification;
335 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
336 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
337 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
338 - (void)outputSequenceWasFlushed:(id)output;
339 #endif
340 @end
341
342 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
343 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
344     MediaPlayerPrivateAVFoundationObjC* m_callback;
345 }
346 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
347 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
348 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
349 @end
350 #endif
351
352 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
353 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
354     MediaPlayerPrivateAVFoundationObjC *m_callback;
355     dispatch_semaphore_t m_semaphore;
356 }
357 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
358 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
359 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
360 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
361 @end
362 #endif
363
364 namespace WebCore {
365
366 static NSArray *assetMetadataKeyNames();
367 static NSArray *itemKVOProperties();
368 static NSArray *assetTrackMetadataKeyNames();
369 static NSArray *playerKVOProperties();
370 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
371
372 #if !LOG_DISABLED
373 static const char *boolString(bool val)
374 {
375     return val ? "true" : "false";
376 }
377 #endif
378
379 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
380 static dispatch_queue_t globalLoaderDelegateQueue()
381 {
382     static dispatch_queue_t globalQueue;
383     static dispatch_once_t onceToken;
384     dispatch_once(&onceToken, ^{
385         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
386     });
387     return globalQueue;
388 }
389 #endif
390
391 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
392 static dispatch_queue_t globalPullDelegateQueue()
393 {
394     static dispatch_queue_t globalQueue;
395     static dispatch_once_t onceToken;
396     dispatch_once(&onceToken, ^{
397         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
398     });
399     return globalQueue;
400 }
401 #endif
402
403 #if USE(CFNETWORK)
404 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
405 public:
406     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
407     {
408         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
409     }
410
411     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
412     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
413
414 private:
415     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
416         : m_challenge(challenge)
417     {
418         ASSERT(m_challenge);
419     }
420
421     virtual void refAuthenticationClient() override { ref(); }
422     virtual void derefAuthenticationClient() override { deref(); }
423
424     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
425     {
426         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
427     }
428
429     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
430     {
431         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
432     }
433
434     virtual void receivedCancellation(const AuthenticationChallenge&) override
435     {
436         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
437     }
438
439     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
440     {
441         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
442             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
443     }
444
445     virtual void receivedChallengeRejection(const AuthenticationChallenge&) override
446     {
447         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
448             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
449     }
450
451     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
452 };
453 #endif
454
455 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
456 {
457     if (isAvailable())
458         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
459             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
460 }
461
462 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
463     : MediaPlayerPrivateAVFoundation(player)
464     , m_weakPtrFactory(this)
465 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
466     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
467     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
468 #endif
469     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
470     , m_videoFrameHasDrawn(false)
471     , m_haveCheckedPlayability(false)
472 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
473     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
474     , m_videoOutputSemaphore(nullptr)
475 #endif
476 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
477     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
478 #endif
479     , m_currentTextTrack(0)
480     , m_cachedRate(0)
481     , m_cachedTotalBytes(0)
482     , m_pendingStatusChanges(0)
483     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
484     , m_cachedLikelyToKeepUp(false)
485     , m_cachedBufferEmpty(false)
486     , m_cachedBufferFull(false)
487     , m_cachedHasEnabledAudio(false)
488     , m_shouldBufferData(true)
489     , m_cachedIsReadyForDisplay(false)
490     , m_haveBeenAskedToCreateLayer(false)
491 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
492     , m_allowsWirelessVideoPlayback(true)
493 #endif
494 {
495 }
496
497 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
498 {
499 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
500     [m_loaderDelegate.get() setCallback:0];
501     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
502
503     for (auto& pair : m_resourceLoaderMap)
504         pair.value->invalidate();
505 #endif
506 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
507     [m_videoOutputDelegate setCallback:0];
508     [m_videoOutput setDelegate:nil queue:0];
509     if (m_videoOutputSemaphore)
510         dispatch_release(m_videoOutputSemaphore);
511 #endif
512
513     if (m_videoLayer)
514         destroyVideoLayer();
515
516     cancelLoad();
517 }
518
519 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
520 {
521     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
522     tearDownVideoRendering();
523
524     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
525     [m_objcObserver.get() disconnect];
526
527     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
528     setIgnoreLoadStateChanges(true);
529     if (m_avAsset) {
530         [m_avAsset.get() cancelLoading];
531         m_avAsset = nil;
532     }
533
534     clearTextTracks();
535
536 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
537     if (m_legibleOutput) {
538         if (m_avPlayerItem)
539             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
540         m_legibleOutput = nil;
541     }
542 #endif
543
544     if (m_avPlayerItem) {
545         for (NSString *keyName in itemKVOProperties())
546             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
547         
548         m_avPlayerItem = nil;
549     }
550     if (m_avPlayer) {
551         if (m_timeObserver)
552             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
553         m_timeObserver = nil;
554
555         for (NSString *keyName in playerKVOProperties())
556             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
557         m_avPlayer = nil;
558     }
559
560     // Reset cached properties
561     m_pendingStatusChanges = 0;
562     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
563     m_cachedSeekableRanges = nullptr;
564     m_cachedLoadedRanges = nullptr;
565     m_cachedHasEnabledAudio = false;
566     m_cachedPresentationSize = FloatSize();
567     m_cachedDuration = MediaTime::zeroTime();
568
569     for (AVPlayerItemTrack *track in m_cachedTracks.get())
570         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
571     m_cachedTracks = nullptr;
572
573 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
574     if (m_provider) {
575         m_provider->setPlayerItem(nullptr);
576         m_provider->setAudioTrack(nullptr);
577     }
578 #endif
579
580     setIgnoreLoadStateChanges(false);
581 }
582
583 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
584 {
585     return m_haveBeenAskedToCreateLayer;
586 }
587
588 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
589 {
590 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
591     if (m_videoOutput)
592         return true;
593 #endif
594     return m_imageGenerator;
595 }
596
597 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
598 {
599 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
600     createVideoOutput();
601 #else
602     createImageGenerator();
603 #endif
604 }
605
606 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
607 {
608     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
609
610     if (!m_avAsset || m_imageGenerator)
611         return;
612
613     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
614
615     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
616     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
617     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
618     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
619
620     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
621 }
622
623 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
624 {
625 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
626     destroyVideoOutput();
627     destroyOpenGLVideoOutput();
628 #endif
629     destroyImageGenerator();
630 }
631
632 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
633 {
634     if (!m_imageGenerator)
635         return;
636
637     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
638
639     m_imageGenerator = 0;
640 }
641
642 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
643 {
644     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
645         return;
646
647     auto weakThis = createWeakPtr();
648     callOnMainThread([this, weakThis] {
649         if (!weakThis)
650             return;
651
652         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
653             return;
654         m_haveBeenAskedToCreateLayer = true;
655
656         if (!m_videoLayer)
657             createAVPlayerLayer();
658
659 #if USE(VIDEOTOOLBOX)
660         if (!m_videoOutput)
661             createVideoOutput();
662 #endif
663
664         player()->client().mediaPlayerRenderingModeChanged(player());
665     });
666 }
667
668 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
669 {
670     if (!m_avPlayer)
671         return;
672
673     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
674     [m_videoLayer setPlayer:m_avPlayer.get()];
675     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
676 #ifndef NDEBUG
677     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
678 #endif
679     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
680     updateVideoLayerGravity();
681     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
682     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
683     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
684
685 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
686     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
687
688 #if PLATFORM(IOS)
689     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
690         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
691 #endif
692 #else
693     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
694 #endif
695 }
696
697 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
698 {
699     if (!m_videoLayer)
700         return;
701
702     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
703
704     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
705     [m_videoLayer.get() setPlayer:nil];
706
707 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
708     m_videoFullscreenLayerManager->didDestroyVideoLayer();
709 #endif
710
711     m_videoLayer = nil;
712 }
713
714 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
715 {
716     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
717     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
718
719     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
720     if (!date)
721         return MediaTime::invalidTime();
722
723     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
724
725     // Rounding due to second offset error when subtracting.
726     return MediaTime::createWithDouble(round(date - currentTime));
727 }
728
729 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
730 {
731     if (currentRenderingMode() == MediaRenderingToLayer)
732         return m_cachedIsReadyForDisplay;
733
734     return m_videoFrameHasDrawn;
735 }
736
737 #if ENABLE(AVF_CAPTIONS)
738 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
739 {
740     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
741     if (manualSelectionMode)
742         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
743
744     // FIXME: Match these to correct types:
745     if (kind == PlatformTextTrack::Caption)
746         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
747
748     if (kind == PlatformTextTrack::Subtitle)
749         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
750
751     if (kind == PlatformTextTrack::Description)
752         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
753
754     if (kind == PlatformTextTrack::Forced)
755         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
756
757     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
758 }
759     
760 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
761 {
762     trackModeChanged();
763 }
764     
765 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
766 {
767     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
768     
769     for (auto& textTrack : m_textTracks) {
770         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
771             continue;
772         
773         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
774         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
775         
776         for (auto& track : outOfBandTrackSources) {
777             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
778             
779             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
780                 continue;
781             
782             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
783             if (track->mode() == PlatformTextTrack::Hidden)
784                 mode = InbandTextTrackPrivate::Hidden;
785             else if (track->mode() == PlatformTextTrack::Disabled)
786                 mode = InbandTextTrackPrivate::Disabled;
787             else if (track->mode() == PlatformTextTrack::Showing)
788                 mode = InbandTextTrackPrivate::Showing;
789             
790             textTrack->setMode(mode);
791             break;
792         }
793     }
794 }
795 #endif
796
797
798 static NSURL *canonicalURL(const String& url)
799 {
800     NSURL *cocoaURL = URL(ParsedURLString, url);
801     if (url.isEmpty())
802         return cocoaURL;
803
804     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
805     if (!request)
806         return cocoaURL;
807
808     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
809     if (!canonicalRequest)
810         return cocoaURL;
811
812     return [canonicalRequest URL];
813 }
814
815 #if PLATFORM(IOS)
816 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
817 {
818     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
819     [properties setDictionary:@{
820         NSHTTPCookieName: cookie.name,
821         NSHTTPCookieValue: cookie.value,
822         NSHTTPCookieDomain: cookie.domain,
823         NSHTTPCookiePath: cookie.path,
824         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
825     }];
826     if (cookie.secure)
827         [properties setObject:@YES forKey:NSHTTPCookieSecure];
828     if (cookie.session)
829         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
830
831     return [NSHTTPCookie cookieWithProperties:properties.get()];
832 }
833 #endif
834
835 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
836 {
837     if (m_avAsset)
838         return;
839
840     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
841
842     setDelayCallbacks(true);
843
844     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
845
846     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
847
848     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
849
850     String referrer = player()->referrer();
851     if (!referrer.isEmpty())
852         [headerFields.get() setObject:referrer forKey:@"Referer"];
853
854     String userAgent = player()->userAgent();
855     if (!userAgent.isEmpty())
856         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
857
858     if ([headerFields.get() count])
859         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
860
861     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
862         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
863
864 #if PLATFORM(IOS)
865     // FIXME: rdar://problem/20354688
866     String identifier = player()->sourceApplicationIdentifier();
867     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
868         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
869 #endif
870
871 #if ENABLE(AVF_CAPTIONS)
872     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
873     if (!outOfBandTrackSources.isEmpty()) {
874         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
875         for (auto& trackSource : outOfBandTrackSources) {
876             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
877             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
878             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
879             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
880             [outOfBandTracks.get() addObject:@{
881                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
882                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
883                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
884                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
885                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
886                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
887             }];
888         }
889
890         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
891     }
892 #endif
893
894 #if PLATFORM(IOS)
895     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
896     if (!networkInterfaceName.isEmpty())
897         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
898 #endif
899
900 #if PLATFORM(IOS)
901     Vector<Cookie> cookies;
902     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
903         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
904         for (auto& cookie : cookies)
905             [nsCookies addObject:toNSHTTPCookie(cookie)];
906
907         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
908     }
909 #endif
910
911     NSURL *cocoaURL = canonicalURL(url);
912     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
913
914 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
915     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
916 #endif
917
918     m_haveCheckedPlayability = false;
919
920     setDelayCallbacks(false);
921 }
922
923 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
924 {
925     if (!m_avPlayer)
926         return;
927
928     if (pthread_main_np()) {
929         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
930         return;
931     }
932
933     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
934     RetainPtr<AVPlayerItemType> strongItem = item;
935     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
936         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
937     });
938 }
939
940 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
941 {
942     if (m_avPlayer)
943         return;
944
945     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
946
947     setDelayCallbacks(true);
948
949     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
950     for (NSString *keyName in playerKVOProperties())
951         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
952
953 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
954     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
955 #endif
956
957 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
958     updateDisableExternalPlayback();
959     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
960 #endif
961
962 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
963     if (m_shouldPlayToPlaybackTarget)
964         setShouldPlayToPlaybackTarget(true);
965 #endif
966
967     if (player()->client().mediaPlayerIsVideo())
968         createAVPlayerLayer();
969
970     if (m_avPlayerItem)
971         setAVPlayerItem(m_avPlayerItem.get());
972
973     setDelayCallbacks(false);
974 }
975
976 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
977 {
978     if (m_avPlayerItem)
979         return;
980
981     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
982
983     setDelayCallbacks(true);
984
985     // Create the player item so we can load media data. 
986     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
987
988     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
989
990     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
991     for (NSString *keyName in itemKVOProperties())
992         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
993
994     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
995
996     if (m_avPlayer)
997         setAVPlayerItem(m_avPlayerItem.get());
998
999 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1000     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1001
1002     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1003     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1004     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1005
1006     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1007     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1008     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1009     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1010 #endif
1011
1012 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1013     if (m_provider) {
1014         m_provider->setPlayerItem(m_avPlayerItem.get());
1015         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1016     }
1017 #endif
1018
1019     setDelayCallbacks(false);
1020 }
1021
1022 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1023 {
1024     if (m_haveCheckedPlayability)
1025         return;
1026     m_haveCheckedPlayability = true;
1027
1028     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1029     auto weakThis = createWeakPtr();
1030
1031     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1032         callOnMainThread([weakThis] {
1033             if (weakThis)
1034                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1035         });
1036     }];
1037 }
1038
1039 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1040 {
1041     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1042
1043     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1044     dispatch_group_enter(metadataLoadingGroup.get());
1045     auto weakThis = createWeakPtr();
1046     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1047
1048         callOnMainThread([weakThis, metadataLoadingGroup] {
1049             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1050                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1051                     dispatch_group_enter(metadataLoadingGroup.get());
1052                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1053                         dispatch_group_leave(metadataLoadingGroup.get());
1054                     }];
1055                 }
1056             }
1057             dispatch_group_leave(metadataLoadingGroup.get());
1058         });
1059     }];
1060
1061     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1062         callOnMainThread([weakThis] {
1063             if (weakThis)
1064                 [weakThis->m_objcObserver.get() metadataLoaded];
1065         });
1066     });
1067 }
1068
1069 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1070 {
1071     if (!m_avPlayerItem)
1072         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1073
1074     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1075         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1076     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1077         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1078     if (m_cachedLikelyToKeepUp)
1079         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1080     if (m_cachedBufferFull)
1081         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1082     if (m_cachedBufferEmpty)
1083         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1084
1085     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1086 }
1087
1088 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1089 {
1090     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1091     PlatformMedia pm;
1092     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1093     pm.media.avfMediaPlayer = m_avPlayer.get();
1094     return pm;
1095 }
1096
1097 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1098 {
1099 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1100     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1101 #else
1102     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1103 #endif
1104 }
1105
1106 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1107 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1108 {
1109     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer)
1110         return;
1111
1112     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer);
1113
1114     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1115         syncTextTrackBounds();
1116         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1117     }
1118
1119     updateDisableExternalPlayback();
1120 }
1121
1122 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1123 {
1124     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1125     syncTextTrackBounds();
1126 }
1127
1128 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1129 {
1130     m_videoFullscreenGravity = gravity;
1131     if (!m_videoLayer)
1132         return;
1133
1134     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1135     if (gravity == MediaPlayer::VideoGravityResize)
1136         videoGravity = AVLayerVideoGravityResize;
1137     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1138         videoGravity = AVLayerVideoGravityResizeAspect;
1139     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1140         videoGravity = AVLayerVideoGravityResizeAspectFill;
1141     else
1142         ASSERT_NOT_REACHED();
1143     
1144     if ([m_videoLayer videoGravity] == videoGravity)
1145         return;
1146
1147     [m_videoLayer setVideoGravity:videoGravity];
1148     syncTextTrackBounds();
1149 }
1150
1151 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1152 {
1153 #if PLATFORM(IOS)
1154     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1155         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1156     updateDisableExternalPlayback();
1157 #else
1158     UNUSED_PARAM(mode);
1159 #endif
1160 }
1161
1162 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1163
1164 #if PLATFORM(IOS)
1165 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1166 {
1167     if (m_currentMetaData)
1168         return m_currentMetaData.get();
1169     return nil;
1170 }
1171
1172 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1173 {
1174     if (!m_avPlayerItem)
1175         return emptyString();
1176     
1177     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1178     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1179
1180     return logString.get();
1181 }
1182
1183 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1184 {
1185     if (!m_avPlayerItem)
1186         return emptyString();
1187
1188     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1189     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1190
1191     return logString.get();
1192 }
1193 #endif
1194
1195 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1196 {
1197     [CATransaction begin];
1198     [CATransaction setDisableActions:YES];    
1199     if (m_videoLayer)
1200         [m_videoLayer.get() setHidden:!isVisible];
1201     [CATransaction commit];
1202 }
1203     
1204 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1205 {
1206     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1207     if (!metaDataAvailable())
1208         return;
1209
1210     setDelayCallbacks(true);
1211     m_cachedRate = requestedRate();
1212     [m_avPlayer.get() setRate:requestedRate()];
1213     setDelayCallbacks(false);
1214 }
1215
1216 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1217 {
1218     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1219     if (!metaDataAvailable())
1220         return;
1221
1222     setDelayCallbacks(true);
1223     m_cachedRate = 0;
1224     [m_avPlayer.get() setRate:0];
1225     setDelayCallbacks(false);
1226 }
1227
1228 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1229 {
1230     // Do not ask the asset for duration before it has been loaded or it will fetch the
1231     // answer synchronously.
1232     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1233         return MediaTime::invalidTime();
1234     
1235     CMTime cmDuration;
1236     
1237     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1238     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1239         cmDuration = [m_avPlayerItem.get() duration];
1240     else
1241         cmDuration = [m_avAsset.get() duration];
1242
1243     if (CMTIME_IS_NUMERIC(cmDuration))
1244         return toMediaTime(cmDuration);
1245
1246     if (CMTIME_IS_INDEFINITE(cmDuration))
1247         return MediaTime::positiveInfiniteTime();
1248
1249     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1250     return MediaTime::invalidTime();
1251 }
1252
1253 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1254 {
1255     if (!metaDataAvailable() || !m_avPlayerItem)
1256         return MediaTime::zeroTime();
1257
1258     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1259     if (CMTIME_IS_NUMERIC(itemTime))
1260         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1261
1262     return MediaTime::zeroTime();
1263 }
1264
1265 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1266 {
1267     // setCurrentTime generates several event callbacks, update afterwards.
1268     setDelayCallbacks(true);
1269
1270     if (m_metadataTrack)
1271         m_metadataTrack->flushPartialCues();
1272
1273     CMTime cmTime = toCMTime(time);
1274     CMTime cmBefore = toCMTime(negativeTolerance);
1275     CMTime cmAfter = toCMTime(positiveTolerance);
1276
1277     auto weakThis = createWeakPtr();
1278
1279     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
1280
1281     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1282         callOnMainThread([weakThis, finished] {
1283             auto _this = weakThis.get();
1284             if (!_this)
1285                 return;
1286
1287             _this->seekCompleted(finished);
1288         });
1289     }];
1290
1291     setDelayCallbacks(false);
1292 }
1293
1294 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1295 {
1296 #if PLATFORM(IOS)
1297     UNUSED_PARAM(volume);
1298     return;
1299 #else
1300     if (!metaDataAvailable())
1301         return;
1302
1303     [m_avPlayer.get() setVolume:volume];
1304 #endif
1305 }
1306
1307 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1308 {
1309     UNUSED_PARAM(closedCaptionsVisible);
1310
1311     if (!metaDataAvailable())
1312         return;
1313
1314     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1315 }
1316
1317 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1318 {
1319     setDelayCallbacks(true);
1320     m_cachedRate = rate;
1321     [m_avPlayer.get() setRate:rate];
1322     setDelayCallbacks(false);
1323 }
1324
1325 double MediaPlayerPrivateAVFoundationObjC::rate() const
1326 {
1327     if (!metaDataAvailable())
1328         return 0;
1329
1330     return m_cachedRate;
1331 }
1332
1333 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1334 {
1335     if (m_avPlayerItem)
1336         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1337 }
1338
1339 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1340 {
1341     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1342
1343     if (!m_avPlayerItem)
1344         return timeRanges;
1345
1346     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1347         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1348         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1349             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1350     }
1351     return timeRanges;
1352 }
1353
1354 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1355 {
1356     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1357         return MediaTime::zeroTime();
1358
1359     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1360     bool hasValidRange = false;
1361     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1362         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1363         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1364             continue;
1365
1366         hasValidRange = true;
1367         MediaTime startOfRange = toMediaTime(timeRange.start);
1368         if (minTimeSeekable > startOfRange)
1369             minTimeSeekable = startOfRange;
1370     }
1371     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1372 }
1373
1374 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1375 {
1376     if (!m_cachedSeekableRanges)
1377         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1378
1379     MediaTime maxTimeSeekable;
1380     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1381         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1382         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1383             continue;
1384         
1385         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1386         if (maxTimeSeekable < endOfRange)
1387             maxTimeSeekable = endOfRange;
1388     }
1389     return maxTimeSeekable;
1390 }
1391
1392 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1393 {
1394     if (!m_cachedLoadedRanges)
1395         return MediaTime::zeroTime();
1396
1397     MediaTime maxTimeLoaded;
1398     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1399         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1400         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1401             continue;
1402         
1403         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1404         if (maxTimeLoaded < endOfRange)
1405             maxTimeLoaded = endOfRange;
1406     }
1407
1408     return maxTimeLoaded;   
1409 }
1410
1411 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1412 {
1413     if (!metaDataAvailable())
1414         return 0;
1415
1416     if (m_cachedTotalBytes)
1417         return m_cachedTotalBytes;
1418
1419     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1420         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1421
1422     return m_cachedTotalBytes;
1423 }
1424
1425 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1426 {
1427     m_avAsset = asset;
1428 }
1429
1430 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1431 {
1432     if (!m_avAsset)
1433         return MediaPlayerAVAssetStatusDoesNotExist;
1434
1435     for (NSString *keyName in assetMetadataKeyNames()) {
1436         NSError *error = nil;
1437         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1438 #if !LOG_DISABLED
1439         if (error)
1440             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1441 #endif
1442
1443         if (keyStatus < AVKeyValueStatusLoaded)
1444             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1445         
1446         if (keyStatus == AVKeyValueStatusFailed)
1447             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1448
1449         if (keyStatus == AVKeyValueStatusCancelled)
1450             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1451     }
1452
1453     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1454         return MediaPlayerAVAssetStatusPlayable;
1455
1456     return MediaPlayerAVAssetStatusLoaded;
1457 }
1458
1459 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1460 {
1461     if (!m_avAsset)
1462         return 0;
1463
1464     NSError *error = nil;
1465     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1466     return [error code];
1467 }
1468
1469 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1470 {
1471     if (!metaDataAvailable() || context.paintingDisabled())
1472         return;
1473
1474     setDelayCallbacks(true);
1475     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1476
1477 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1478     if (videoOutputHasAvailableFrame())
1479         paintWithVideoOutput(context, rect);
1480     else
1481 #endif
1482         paintWithImageGenerator(context, rect);
1483
1484     END_BLOCK_OBJC_EXCEPTIONS;
1485     setDelayCallbacks(false);
1486
1487     m_videoFrameHasDrawn = true;
1488 }
1489
1490 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1491 {
1492     if (!metaDataAvailable() || context.paintingDisabled())
1493         return;
1494
1495     // We can ignore the request if we are already rendering to a layer.
1496     if (currentRenderingMode() == MediaRenderingToLayer)
1497         return;
1498
1499     // paint() is best effort, so only paint if we already have an image generator or video output available.
1500     if (!hasContextRenderer())
1501         return;
1502
1503     paintCurrentFrameInContext(context, rect);
1504 }
1505
1506 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1507 {
1508     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1509
1510     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1511     if (image) {
1512         GraphicsContextStateSaver stateSaver(context);
1513         context.translate(rect.x(), rect.y() + rect.height());
1514         context.scale(FloatSize(1.0f, -1.0f));
1515         context.setImageInterpolationQuality(InterpolationLow);
1516         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1517         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1518     }
1519 }
1520
1521 static const HashSet<String, ASCIICaseInsensitiveHash>& avfMIMETypes()
1522 {
1523     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []() {
1524         HashSet<String, ASCIICaseInsensitiveHash> types;
1525         for (NSString *type in [AVURLAsset audiovisualMIMETypes])
1526             types.add(type);
1527         return types;
1528     }();
1529     
1530     return cache;
1531 }
1532
1533 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1534 {
1535     if (!m_imageGenerator)
1536         createImageGenerator();
1537     ASSERT(m_imageGenerator);
1538
1539 #if !LOG_DISABLED
1540     double start = monotonicallyIncreasingTime();
1541 #endif
1542
1543     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1544     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1545     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1546
1547 #if !LOG_DISABLED
1548     double duration = monotonicallyIncreasingTime() - start;
1549     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1550 #endif
1551
1552     return image;
1553 }
1554
1555 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1556 {
1557     supportedTypes = avfMIMETypes();
1558
1559
1560 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1561 static bool keySystemIsSupported(const String& keySystem)
1562 {
1563     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1564         return true;
1565     return false;
1566 }
1567 #endif
1568
1569 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1570 {
1571 #if ENABLE(ENCRYPTED_MEDIA)
1572     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1573     // In addition to the steps in the current specification, this method must run the following steps:
1574
1575     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1576     //    If keySystem is null, continue to the next step.
1577     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1578         // "Clear Key" is only supported with HLS:
1579         if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
1580             return MediaPlayer::IsNotSupported;
1581
1582         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1583         if (!keySystemIsSupported(parameters.keySystem))
1584             return MediaPlayer::IsNotSupported;
1585
1586         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1587         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1588     }
1589
1590     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1591 #endif
1592
1593 #if ENABLE(MEDIA_SOURCE)
1594     if (parameters.isMediaSource)
1595         return MediaPlayer::IsNotSupported;
1596 #endif
1597 #if ENABLE(MEDIA_STREAM)
1598     if (parameters.isMediaStream)
1599         return MediaPlayer::IsNotSupported;
1600 #endif
1601     if (isUnsupportedMIMEType(parameters.type))
1602         return MediaPlayer::IsNotSupported;
1603
1604     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
1605         return MediaPlayer::IsNotSupported;
1606
1607     // The spec says:
1608     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1609     if (parameters.codecs.isEmpty())
1610         return MediaPlayer::MayBeSupported;
1611
1612     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1613     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1614 }
1615
1616 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1617 {
1618 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1619     if (!keySystem.isEmpty()) {
1620         // "Clear Key" is only supported with HLS:
1621         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1622             return MediaPlayer::IsNotSupported;
1623
1624         if (!keySystemIsSupported(keySystem))
1625             return false;
1626
1627         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1628             return false;
1629
1630         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
1631             return false;
1632
1633         return true;
1634     }
1635 #else
1636     UNUSED_PARAM(keySystem);
1637     UNUSED_PARAM(mimeType);
1638 #endif
1639     return false;
1640 }
1641
1642 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1643 #if ENABLE(ENCRYPTED_MEDIA_V2)
1644 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1645 {
1646     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1647         [infoRequest setContentLength:keyData->byteLength()];
1648         [infoRequest setByteRangeAccessSupported:YES];
1649     }
1650
1651     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1652         long long start = [dataRequest currentOffset];
1653         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1654
1655         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1656             [request finishLoadingWithError:nil];
1657             return;
1658         }
1659
1660         ASSERT(start <= std::numeric_limits<int>::max());
1661         ASSERT(end <= std::numeric_limits<int>::max());
1662         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1663         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1664         [dataRequest respondWithData:nsData.get()];
1665     }
1666
1667     [request finishLoading];
1668 }
1669 #endif
1670
1671 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1672 {
1673     String scheme = [[[avRequest request] URL] scheme];
1674     String keyURI = [[[avRequest request] URL] absoluteString];
1675
1676 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1677     if (scheme == "skd") {
1678         // Create an initData with the following layout:
1679         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1680         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1681         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1682         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1683         initDataView->set<uint32_t>(0, keyURISize, true);
1684
1685         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1686         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1687
1688 #if ENABLE(ENCRYPTED_MEDIA)
1689         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1690 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1691         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1692         if (!player()->keyNeeded(initData.get()))
1693 #endif
1694             return false;
1695
1696         m_keyURIToRequestMap.set(keyURI, avRequest);
1697         return true;
1698 #if ENABLE(ENCRYPTED_MEDIA_V2)
1699     } else if (scheme == "clearkey") {
1700         String keyID = [[[avRequest request] URL] resourceSpecifier];
1701         StringView keyIDView(keyID);
1702         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1703
1704         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1705         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1706
1707         auto keyData = player()->cachedKeyForKeyId(keyID);
1708         if (keyData) {
1709             fulfillRequestWithKeyData(avRequest, keyData.get());
1710             return false;
1711         }
1712
1713         if (!player()->keyNeeded(initData.get()))
1714             return false;
1715
1716         m_keyURIToRequestMap.set(keyID, avRequest);
1717         return true;
1718 #endif
1719     }
1720 #endif
1721
1722     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1723     m_resourceLoaderMap.add(avRequest, resourceLoader);
1724     resourceLoader->startLoading();
1725     return true;
1726 }
1727
1728 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1729 {
1730 #if USE(CFNETWORK)
1731     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1732     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1733     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1734 #else
1735     AuthenticationChallenge challenge(nsChallenge);
1736 #endif
1737
1738     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1739 }
1740
1741 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1742 {
1743     String scheme = [[[avRequest request] URL] scheme];
1744
1745     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1746
1747     if (resourceLoader)
1748         resourceLoader->stopLoading();
1749 }
1750
1751 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1752 {
1753     m_resourceLoaderMap.remove(avRequest);
1754 }
1755 #endif
1756
1757 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1758 {
1759     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1760 }
1761
1762 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1763 {
1764     if (!metaDataAvailable())
1765         return timeValue;
1766
1767     // FIXME - impossible to implement until rdar://8721510 is fixed.
1768     return timeValue;
1769 }
1770
1771 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1772 {
1773 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1774     return 0;
1775 #else
1776     return 5;
1777 #endif
1778 }
1779
1780 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1781 {
1782     if (!m_videoLayer)
1783         return;
1784
1785 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1786     // Do not attempt to change the video gravity while in full screen mode.
1787     // See setVideoFullscreenGravity().
1788     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1789         return;
1790 #endif
1791
1792     [CATransaction begin];
1793     [CATransaction setDisableActions:YES];    
1794     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1795     [m_videoLayer.get() setVideoGravity:gravity];
1796     [CATransaction commit];
1797 }
1798
1799 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1800 {
1801     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1802         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1803     }];
1804     if (index == NSNotFound)
1805         return nil;
1806     return [tracks objectAtIndex:index];
1807 }
1808
1809 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1810 {
1811     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1812     m_languageOfPrimaryAudioTrack = String();
1813
1814     if (!m_avAsset)
1815         return;
1816
1817     setDelayCharacteristicsChangedNotification(true);
1818
1819     bool haveCCTrack = false;
1820     bool hasCaptions = false;
1821
1822     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1823     // asked about those fairly fequently.
1824     if (!m_avPlayerItem) {
1825         // We don't have a player item yet, so check with the asset because some assets support inspection
1826         // prior to becoming ready to play.
1827         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1828         setHasVideo(firstEnabledVideoTrack);
1829         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1830 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1831         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1832 #endif
1833
1834         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1835     } else {
1836         bool hasVideo = false;
1837         bool hasAudio = false;
1838         bool hasMetaData = false;
1839         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1840             if ([track isEnabled]) {
1841                 AVAssetTrack *assetTrack = [track assetTrack];
1842                 NSString *mediaType = [assetTrack mediaType];
1843                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1844                     hasVideo = true;
1845                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1846                     hasAudio = true;
1847                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1848 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1849                     hasCaptions = true;
1850 #endif
1851                     haveCCTrack = true;
1852                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1853                     hasMetaData = true;
1854                 }
1855             }
1856         }
1857
1858 #if ENABLE(VIDEO_TRACK)
1859         updateAudioTracks();
1860         updateVideoTracks();
1861
1862 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1863         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1864         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1865 #endif
1866 #endif
1867
1868         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1869         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1870         // when it is not.
1871         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1872
1873         setHasAudio(hasAudio);
1874 #if ENABLE(DATACUE_VALUE)
1875         if (hasMetaData)
1876             processMetadataTrack();
1877 #endif
1878     }
1879
1880 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1881     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1882     if (legibleGroup && m_cachedTracks) {
1883         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1884         if (hasCaptions)
1885             processMediaSelectionOptions();
1886     }
1887 #endif
1888
1889 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1890     if (!hasCaptions && haveCCTrack)
1891         processLegacyClosedCaptionsTracks();
1892 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1893     if (haveCCTrack)
1894         processLegacyClosedCaptionsTracks();
1895 #endif
1896
1897     setHasClosedCaptions(hasCaptions);
1898
1899     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1900         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1901
1902     sizeChanged();
1903
1904     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1905         characteristicsChanged();
1906
1907 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1908     if (m_provider)
1909         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1910 #endif
1911
1912     setDelayCharacteristicsChangedNotification(false);
1913 }
1914
1915 #if ENABLE(VIDEO_TRACK)
1916 template <typename RefT, typename PassRefT>
1917 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1918 {
1919     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1920         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1921     }]]]);
1922     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1923
1924     for (auto& oldItem : oldItems) {
1925         if (oldItem->playerItemTrack())
1926             [oldTracks addObject:oldItem->playerItemTrack()];
1927     }
1928
1929     // Find the added & removed AVPlayerItemTracks:
1930     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1931     [removedTracks minusSet:newTracks.get()];
1932
1933     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1934     [addedTracks minusSet:oldTracks.get()];
1935
1936     typedef Vector<RefT> ItemVector;
1937     ItemVector replacementItems;
1938     ItemVector addedItems;
1939     ItemVector removedItems;
1940     for (auto& oldItem : oldItems) {
1941         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1942             removedItems.append(oldItem);
1943         else
1944             replacementItems.append(oldItem);
1945     }
1946
1947     for (AVPlayerItemTrack* track in addedTracks.get())
1948         addedItems.append(itemFactory(track));
1949
1950     replacementItems.appendVector(addedItems);
1951     oldItems.swap(replacementItems);
1952
1953     for (auto& removedItem : removedItems)
1954         (player->*removedFunction)(removedItem);
1955
1956     for (auto& addedItem : addedItems)
1957         (player->*addedFunction)(addedItem);
1958 }
1959
1960 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1961 template <typename RefT, typename PassRefT>
1962 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1963 {
1964     group->updateOptions(characteristics);
1965
1966     // Only add selection options which do not have an associated persistant track.
1967     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1968     for (auto& option : group->options()) {
1969         if (!option)
1970             continue;
1971         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1972         if (!avOption)
1973             continue;
1974         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1975             newSelectionOptions.add(option);
1976     }
1977
1978     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1979     for (auto& oldItem : oldItems) {
1980         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1981             oldSelectionOptions.add(option);
1982     }
1983
1984     // Find the added & removed AVMediaSelectionOptions:
1985     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1986     for (auto& oldOption : oldSelectionOptions) {
1987         if (!newSelectionOptions.contains(oldOption))
1988             removedSelectionOptions.add(oldOption);
1989     }
1990
1991     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1992     for (auto& newOption : newSelectionOptions) {
1993         if (!oldSelectionOptions.contains(newOption))
1994             addedSelectionOptions.add(newOption);
1995     }
1996
1997     typedef Vector<RefT> ItemVector;
1998     ItemVector replacementItems;
1999     ItemVector addedItems;
2000     ItemVector removedItems;
2001     for (auto& oldItem : oldItems) {
2002         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2003             removedItems.append(oldItem);
2004         else
2005             replacementItems.append(oldItem);
2006     }
2007
2008     for (auto& option : addedSelectionOptions)
2009         addedItems.append(itemFactory(*option.get()));
2010
2011     replacementItems.appendVector(addedItems);
2012     oldItems.swap(replacementItems);
2013     
2014     for (auto& removedItem : removedItems)
2015         (player->*removedFunction)(removedItem);
2016     
2017     for (auto& addedItem : addedItems)
2018         (player->*addedFunction)(addedItem);
2019 }
2020 #endif
2021
2022 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2023 {
2024 #if !LOG_DISABLED
2025     size_t count = m_audioTracks.size();
2026 #endif
2027
2028     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2029
2030 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2031     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2032     if (!m_audibleGroup) {
2033         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2034             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2035     }
2036
2037     if (m_audibleGroup)
2038         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2039 #endif
2040
2041     for (auto& track : m_audioTracks)
2042         track->resetPropertiesFromTrack();
2043
2044 #if !LOG_DISABLED
2045     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2046 #endif
2047 }
2048
2049 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2050 {
2051 #if !LOG_DISABLED
2052     size_t count = m_videoTracks.size();
2053 #endif
2054
2055     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2056
2057 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2058     if (!m_visualGroup) {
2059         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2060             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2061     }
2062
2063     if (m_visualGroup)
2064         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2065 #endif
2066
2067     for (auto& track : m_audioTracks)
2068         track->resetPropertiesFromTrack();
2069
2070 #if !LOG_DISABLED
2071     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2072 #endif
2073 }
2074
2075 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2076 {
2077 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2078     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2079         return true;
2080 #endif
2081     return false;
2082 }
2083
2084 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2085 {
2086 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2087     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2088         return;
2089
2090     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2091     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2092     [m_textTrackRepresentationLayer setFrame:textFrame];
2093 #endif
2094 }
2095
2096 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2097 {
2098 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2099     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2100     if (representationLayer == m_textTrackRepresentationLayer) {
2101         syncTextTrackBounds();
2102         return;
2103     }
2104
2105     if (m_textTrackRepresentationLayer)
2106         [m_textTrackRepresentationLayer removeFromSuperlayer];
2107
2108     m_textTrackRepresentationLayer = representationLayer;
2109
2110     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2111         syncTextTrackBounds();
2112         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2113     }
2114
2115 #else
2116     UNUSED_PARAM(representation);
2117 #endif
2118 }
2119 #endif // ENABLE(VIDEO_TRACK)
2120
2121 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2122 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2123 {
2124     if (!m_provider) {
2125         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2126         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2127     }
2128
2129     return m_provider.get();
2130 }
2131 #endif
2132
2133 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2134 {
2135     if (!m_avAsset)
2136         return;
2137
2138     setNaturalSize(m_cachedPresentationSize);
2139 }
2140     
2141 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2142 {
2143     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2144         return false;
2145     
2146     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2147     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2148     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2149 }
2150
2151 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2152 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2153 {
2154     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2155
2156     if (!m_avPlayerItem || m_videoOutput)
2157         return;
2158
2159 #if USE(VIDEOTOOLBOX)
2160     NSDictionary* attributes = nil;
2161 #else
2162     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2163                                 nil];
2164 #endif
2165     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2166     ASSERT(m_videoOutput);
2167
2168     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2169
2170     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2171
2172     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2173 }
2174
2175 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2176 {
2177     if (!m_videoOutput)
2178         return;
2179
2180     if (m_avPlayerItem)
2181         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2182     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2183
2184     m_videoOutput = 0;
2185 }
2186
2187 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2188 {
2189     if (!m_videoOutput)
2190         createVideoOutput();
2191     ASSERT(m_videoOutput);
2192
2193 #if !LOG_DISABLED
2194     double start = monotonicallyIncreasingTime();
2195 #endif
2196
2197     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2198
2199     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2200         return 0;
2201
2202     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2203     if (!buffer)
2204         return 0;
2205
2206 #if USE(VIDEOTOOLBOX)
2207     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2208     if (!m_pixelTransferSession) {
2209         VTPixelTransferSessionRef session = 0;
2210         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2211         m_pixelTransferSession = adoptCF(session);
2212     }
2213
2214     CVPixelBufferRef outputBuffer;
2215     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2216     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2217     buffer = adoptCF(outputBuffer);
2218 #endif
2219
2220 #if !LOG_DISABLED
2221     double duration = monotonicallyIncreasingTime() - start;
2222     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2223 #endif
2224
2225     return buffer;
2226 }
2227
2228 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2229 {
2230     if (!m_avPlayerItem)
2231         return false;
2232
2233     if (m_lastImage)
2234         return true;
2235
2236     if (!m_videoOutput)
2237         createVideoOutput();
2238
2239     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2240 }
2241
2242 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2243 {
2244     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2245     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2246     return CVPixelBufferGetBaseAddress(pixelBuffer);
2247 }
2248
2249 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2250 {
2251     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2252     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2253 }
2254
2255 static void CVPixelBufferReleaseInfoCallback(void* info)
2256 {
2257     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2258     CFRelease(pixelBuffer);
2259 }
2260
2261 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2262 {
2263     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2264     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2265
2266     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2267     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2268     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2269     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2270     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2271
2272     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2273     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2274     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2275
2276     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2277 }
2278
2279 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2280 {
2281     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2282
2283     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2284     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2285     // should be displayed.
2286     if (pixelBuffer)
2287         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2288 }
2289
2290 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2291 {
2292     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2293         waitForVideoOutputMediaDataWillChange();
2294
2295     updateLastImage();
2296
2297     if (!m_lastImage)
2298         return;
2299
2300     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2301     if (!firstEnabledVideoTrack)
2302         return;
2303
2304     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2305
2306     GraphicsContextStateSaver stateSaver(context);
2307     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2308     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2309     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2310
2311     context.concatCTM(videoTransform);
2312     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2313
2314     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2315     // video frame, destroy it now that it is no longer needed.
2316     if (m_imageGenerator)
2317         destroyImageGenerator();
2318
2319 }
2320
2321 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2322 {
2323     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
2324
2325     if (!m_avPlayerItem || m_openGLVideoOutput)
2326         return;
2327
2328 #if PLATFORM(IOS)
2329     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2330 #else
2331     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2332 #endif
2333     m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2334     ASSERT(m_openGLVideoOutput);
2335
2336     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2337
2338     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
2339 }
2340
2341 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2342 {
2343     if (!m_openGLVideoOutput)
2344         return;
2345
2346     if (m_avPlayerItem)
2347         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2348     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2349
2350     m_openGLVideoOutput = 0;
2351 }
2352
2353 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2354 {
2355     if (!m_openGLVideoOutput)
2356         return;
2357
2358     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2359     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2360         return;
2361
2362     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2363 }
2364
2365 #if !LOG_DISABLED
2366
2367 #define STRINGIFY_PAIR(e) e, #e
2368 static std::map<uint32_t, const char*>& enumToStringMap()
2369 {
2370     static NeverDestroyed<std::map<uint32_t, const char*>> map;
2371     if (map.get().empty()) {
2372         std::map<uint32_t, const char*> stringMap;
2373         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2374         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2375         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2376         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2377         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2378         map.get().emplace(STRINGIFY_PAIR(GL_R8));
2379         map.get().emplace(STRINGIFY_PAIR(GL_R16F));
2380         map.get().emplace(STRINGIFY_PAIR(GL_R32F));
2381         map.get().emplace(STRINGIFY_PAIR(GL_R8UI));
2382         map.get().emplace(STRINGIFY_PAIR(GL_R8I));
2383         map.get().emplace(STRINGIFY_PAIR(GL_R16UI));
2384         map.get().emplace(STRINGIFY_PAIR(GL_R16I));
2385         map.get().emplace(STRINGIFY_PAIR(GL_R32UI));
2386         map.get().emplace(STRINGIFY_PAIR(GL_R32I));
2387         map.get().emplace(STRINGIFY_PAIR(GL_RG8));
2388         map.get().emplace(STRINGIFY_PAIR(GL_RG16F));
2389         map.get().emplace(STRINGIFY_PAIR(GL_RG32F));
2390         map.get().emplace(STRINGIFY_PAIR(GL_RG8UI));
2391         map.get().emplace(STRINGIFY_PAIR(GL_RG8I));
2392         map.get().emplace(STRINGIFY_PAIR(GL_RG16UI));
2393         map.get().emplace(STRINGIFY_PAIR(GL_RG16I));
2394         map.get().emplace(STRINGIFY_PAIR(GL_RG32UI));
2395         map.get().emplace(STRINGIFY_PAIR(GL_RG32I));
2396         map.get().emplace(STRINGIFY_PAIR(GL_RGB8));
2397         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8));
2398         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8));
2399         map.get().emplace(STRINGIFY_PAIR(GL_SRGB8_ALPHA8));
2400         map.get().emplace(STRINGIFY_PAIR(GL_RGBA4));
2401         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2));
2402         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT16));
2403         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT24));
2404         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT32F));
2405         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH24_STENCIL8));
2406         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH32F_STENCIL8));
2407         map.get().emplace(STRINGIFY_PAIR(GL_RGB));
2408         map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
2409         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
2410         map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
2411         map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
2412         map.get().emplace(STRINGIFY_PAIR(GL_RED));
2413         map.get().emplace(STRINGIFY_PAIR(GL_RG_INTEGER));
2414         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_STENCIL));
2415         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_BYTE));
2416         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_6_5));
2417         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_4_4_4_4));
2418         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_5_5_1));
2419         map.get().emplace(STRINGIFY_PAIR(GL_BYTE));
2420         map.get().emplace(STRINGIFY_PAIR(GL_HALF_FLOAT));
2421         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT));
2422         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT));
2423         map.get().emplace(STRINGIFY_PAIR(GL_SHORT));
2424         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT));
2425         map.get().emplace(STRINGIFY_PAIR(GL_INT));
2426         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_2_10_10_10_REV));
2427         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_24_8));
2428         map.get().emplace(STRINGIFY_PAIR(GL_FLOAT_32_UNSIGNED_INT_24_8_REV));
2429
2430 #if PLATFORM(IOS)
2431         map.get().emplace(STRINGIFY_PAIR(GL_RED_INTEGER));
2432         map.get().emplace(STRINGIFY_PAIR(GL_RGB_INTEGER));
2433         map.get().emplace(STRINGIFY_PAIR(GL_RG8_SNORM));
2434         map.get().emplace(STRINGIFY_PAIR(GL_RGB565));
2435         map.get().emplace(STRINGIFY_PAIR(GL_RGB8_SNORM));
2436         map.get().emplace(STRINGIFY_PAIR(GL_R11F_G11F_B10F));
2437         map.get().emplace(STRINGIFY_PAIR(GL_RGB9_E5));
2438         map.get().emplace(STRINGIFY_PAIR(GL_RGB16F));
2439         map.get().emplace(STRINGIFY_PAIR(GL_RGB32F));
2440         map.get().emplace(STRINGIFY_PAIR(GL_RGB8UI));
2441         map.get().emplace(STRINGIFY_PAIR(GL_RGB8I));
2442         map.get().emplace(STRINGIFY_PAIR(GL_RGB16UI));
2443         map.get().emplace(STRINGIFY_PAIR(GL_RGB16I));
2444         map.get().emplace(STRINGIFY_PAIR(GL_RGB32UI));
2445         map.get().emplace(STRINGIFY_PAIR(GL_RGB32I));
2446         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8_SNORM));
2447         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16F));
2448         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32F));
2449         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8UI));
2450         map.get().emplace(STRINGIFY_PAIR(GL_RGBA8I));
2451         map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2UI));
2452         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16UI));
2453         map.get().emplace(STRINGIFY_PAIR(GL_RGBA16I));
2454         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32I));
2455         map.get().emplace(STRINGIFY_PAIR(GL_RGBA32UI));
2456         map.get().emplace(STRINGIFY_PAIR(GL_RGB5_A1));
2457         map.get().emplace(STRINGIFY_PAIR(GL_RG));
2458         map.get().emplace(STRINGIFY_PAIR(GL_RGBA_INTEGER));
2459         map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT));
2460         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_10F_11F_11F_REV));
2461         map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_5_9_9_9_REV));
2462 #endif
2463     }
2464     return map.get();
2465 }
2466
2467 #endif // !LOG_DISABLED
2468
2469 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2470 {
2471     if (flipY || premultiplyAlpha)
2472         return false;
2473
2474     ASSERT(context);
2475
2476     if (!m_openGLVideoOutput)
2477         createOpenGLVideoOutput();
2478
2479     updateLastOpenGLImage();
2480
2481     if (!m_lastOpenGLImage)
2482         return false;
2483
2484     if (!m_openGLTextureCache) {
2485 #if PLATFORM(IOS)
2486         CVOpenGLESTextureCacheRef cache = nullptr;
2487         CVReturn error = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), nullptr, &cache);
2488 #else
2489         CVOpenGLTextureCacheRef cache = nullptr;
2490         CVReturn error = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), CGLGetPixelFormat(context->platformGraphicsContext3D()), nullptr, &cache);
2491 #endif
2492         if (error != kCVReturnSuccess)
2493             return false;
2494         m_openGLTextureCache = adoptCF(cache);
2495     }
2496
2497     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2498     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2499
2500 #if PLATFORM(IOS)
2501     CVOpenGLESTextureRef bareVideoTexture = nullptr;
2502     if (kCVReturnSuccess != CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, outputTarget, internalFormat, width, height, format, type, level, &bareVideoTexture))
2503         return false;
2504     RetainPtr<CVOpenGLESTextureRef> videoTexture = adoptCF(bareVideoTexture);
2505     Platform3DObject videoTextureName = CVOpenGLESTextureGetName(videoTexture.get());
2506     GC3Denum videoTextureTarget = CVOpenGLESTextureGetTarget(videoTexture.get());
2507 #else
2508     CVOpenGLTextureRef bareVideoTexture = nullptr;
2509     if (kCVReturnSuccess != CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, &bareVideoTexture))
2510         return false;
2511     RetainPtr<CVOpenGLTextureRef> videoTexture = adoptCF(bareVideoTexture);
2512     Platform3DObject videoTextureName = CVOpenGLTextureGetName(videoTexture.get());
2513     GC3Denum videoTextureTarget = CVOpenGLTextureGetTarget(videoTexture.get());
2514 #endif
2515
2516     auto weakThis = createWeakPtr();
2517     dispatch_async(dispatch_get_main_queue(), [weakThis] {
2518         if (!weakThis)
2519             return;
2520
2521         if (auto cache = weakThis->m_openGLTextureCache.get())
2522 #if PLATFORM(IOS)
2523             CVOpenGLESTextureCacheFlush(cache, 0);
2524 #else
2525             CVOpenGLTextureCacheFlush(cache, 0);
2526 #endif
2527     });
2528
2529     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(%p) - internalFormat: %s, format: %s, type: %s", this, enumToStringMap()[internalFormat], enumToStringMap()[format], enumToStringMap()[type]);
2530
2531     // Save the origial bound texture & framebuffer names so we can re-bind them after copying the video texture.
2532     GC3Dint boundTexture = 0;
2533     GC3Dint boundReadFramebuffer = 0;
2534     context->getIntegerv(GraphicsContext3D::TEXTURE_BINDING_2D, &boundTexture);
2535     context->getIntegerv(GraphicsContext3D::READ_FRAMEBUFFER_BINDING, &boundReadFramebuffer);
2536
2537     context->bindTexture(videoTextureTarget, videoTextureName);
2538     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2539     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2540     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2541
2542     // Create a framebuffer object to represent the video texture's memory.
2543     Platform3DObject readFramebuffer = context->createFramebuffer();
2544
2545     // Make that framebuffer the read source from which drawing commands will read voxels.
2546     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, readFramebuffer);
2547
2548     // Allocate uninitialized memory for the output texture.
2549     context->bindTexture(outputTarget, outputTexture);
2550     context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
2551     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
2552     context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
2553     context->texImage2DDirect(outputTarget, level, internalFormat, width, height, 0, format, type, nullptr);
2554
2555     // Attach the video texture to the framebuffer.
2556     context->framebufferTexture2D(GraphicsContext3D::READ_FRAMEBUFFER, GraphicsContext3D::COLOR_ATTACHMENT0, videoTextureTarget, videoTextureName, level);
2557
2558     GC3Denum status = context->checkFramebufferStatus(GraphicsContext3D::READ_FRAMEBUFFER);
2559     if (status != GraphicsContext3D::FRAMEBUFFER_COMPLETE)
2560         return false;
2561
2562     // Copy texture from the read framebuffer (and thus the video texture) to the output texture.
2563     context->copyTexImage2D(outputTarget, level, internalFormat, 0, 0, width, height, 0);
2564
2565     // Restore the previous texture and framebuffer bindings.
2566     context->bindTexture(outputTarget, boundTexture);
2567     context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, boundReadFramebuffer);
2568
2569     // Clean up after ourselves.
2570     context->deleteFramebuffer(readFramebuffer);
2571
2572     return !context->getError();
2573 }
2574
2575 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2576 {
2577     updateLastImage();
2578     return m_lastImage.get();
2579 }
2580
2581 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2582 {
2583     if (!m_videoOutputSemaphore)
2584         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2585
2586     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2587
2588     // Wait for 1 second.
2589     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2590
2591     if (result)
2592         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2593 }
2594
2595 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2596 {
2597     dispatch_semaphore_signal(m_videoOutputSemaphore);
2598 }
2599 #endif
2600
2601 #if ENABLE(ENCRYPTED_MEDIA)
2602 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2603 {
2604     if (!keySystemIsSupported(keySystem))
2605         return MediaPlayer::KeySystemNotSupported;
2606
2607     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2608     String keyURI;
2609     String keyID;
2610     RefPtr<Uint8Array> certificate;
2611     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2612         return MediaPlayer::InvalidPlayerState;
2613
2614     if (!m_keyURIToRequestMap.contains(keyURI))
2615         return MediaPlayer::InvalidPlayerState;
2616
2617     String sessionID = createCanonicalUUIDString();
2618
2619     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2620
2621     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2622     NSString* assetStr = keyID;
2623     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2624     NSError* error = 0;
2625     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2626
2627     if (!keyRequest) {
2628         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2629         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2630         return MediaPlayer::NoError;
2631     }
2632
2633     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2634     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2635     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2636
2637     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2638     m_sessionIDToRequestMap.set(sessionID, avRequest);
2639     m_keyURIToRequestMap.remove(keyURI);
2640
2641     return MediaPlayer::NoError;
2642 }
2643
2644 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2645 {
2646     if (!keySystemIsSupported(keySystem))
2647         return MediaPlayer::KeySystemNotSupported;
2648
2649     if (!m_sessionIDToRequestMap.contains(sessionID))
2650         return MediaPlayer::InvalidPlayerState;
2651
2652     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2653     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2654     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2655     [avRequest.get() finishLoading];
2656     m_sessionIDToRequestMap.remove(sessionID);
2657
2658     player()->keyAdded(keySystem, sessionID);
2659
2660     UNUSED_PARAM(initDataPtr);
2661     UNUSED_PARAM(initDataLength);
2662     return MediaPlayer::NoError;
2663 }
2664
2665 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2666 {
2667     if (!keySystemIsSupported(keySystem))
2668         return MediaPlayer::KeySystemNotSupported;
2669
2670     if (!m_sessionIDToRequestMap.contains(sessionID))
2671         return MediaPlayer::InvalidPlayerState;
2672
2673     m_sessionIDToRequestMap.remove(sessionID);
2674     return MediaPlayer::NoError;
2675 }
2676 #endif
2677
2678 #if ENABLE(ENCRYPTED_MEDIA_V2)
2679 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2680 {
2681     return m_keyURIToRequestMap.take(keyURI);
2682 }
2683
2684 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2685 {
2686     Vector<String> fulfilledKeyIds;
2687
2688     for (auto& pair : m_keyURIToRequestMap) {
2689         const String& keyId = pair.key;
2690         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2691
2692         auto keyData = player()->cachedKeyForKeyId(keyId);
2693         if (!keyData)
2694             continue;
2695
2696         fulfillRequestWithKeyData(request.get(), keyData.get());
2697         fulfilledKeyIds.append(keyId);
2698     }
2699
2700     for (auto& keyId : fulfilledKeyIds)
2701         m_keyURIToRequestMap.remove(keyId);
2702 }
2703
2704 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
2705 {
2706     if (!keySystemIsSupported(keySystem))
2707         return nullptr;
2708
2709     return std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2710 }
2711 #endif
2712
2713 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2714 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2715 {
2716 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2717     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2718 #endif
2719
2720     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2721     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2722
2723         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2724         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2725             continue;
2726
2727         bool newCCTrack = true;
2728         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2729             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2730                 continue;
2731
2732             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2733             if (track->avPlayerItemTrack() == playerItemTrack) {
2734                 removedTextTracks.remove(i - 1);
2735                 newCCTrack = false;
2736                 break;
2737             }
2738         }
2739
2740         if (!newCCTrack)
2741             continue;
2742         
2743         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2744     }
2745
2746     processNewAndRemovedTextTracks(removedTextTracks);
2747 }
2748 #endif
2749
2750 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2751 {
2752     if (!m_avAsset)
2753         return nil;
2754
2755     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2756         return nil;
2757
2758     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2759 }
2760
2761 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2762 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2763 {
2764     if (!m_avAsset)
2765         return false;
2766
2767     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2768         return false;
2769
2770     return true;
2771 }
2772
2773 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2774 {
2775     if (!hasLoadedMediaSelectionGroups())
2776         return nil;
2777
2778     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2779 }
2780
2781 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2782 {
2783     if (!hasLoadedMediaSelectionGroups())
2784         return nil;
2785
2786     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2787 }
2788
2789 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2790 {
2791     if (!hasLoadedMediaSelectionGroups())
2792         return nil;
2793
2794     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2795 }
2796
2797 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2798 {
2799     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2800     if (!legibleGroup) {
2801         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2802         return;
2803     }
2804
2805     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2806     // but set the selected legible track to nil so text tracks will not be automatically configured.
2807     if (!m_textTracks.size())
2808         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2809
2810     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2811     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2812     for (AVMediaSelectionOptionType *option in legibleOptions) {
2813         bool newTrack = true;
2814         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2815             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2816                 continue;
2817             
2818             RetainPtr<AVMediaSelectionOptionType> currentOption;
2819 #if ENABLE(AVF_CAPTIONS)
2820             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2821                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2822                 currentOption = track->mediaSelectionOption();
2823             } else
2824 #endif
2825             {
2826                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2827                 currentOption = track->mediaSelectionOption();
2828             }
2829             
2830             if ([currentOption.get() isEqual:option]) {
2831                 removedTextTracks.remove(i - 1);
2832                 newTrack = false;
2833                 break;
2834             }
2835         }
2836         if (!newTrack)
2837             continue;
2838
2839 #if ENABLE(AVF_CAPTIONS)
2840         if ([option outOfBandSource]) {
2841             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2842             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2843             continue;
2844         }
2845 #endif
2846
2847         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2848     }
2849
2850     processNewAndRemovedTextTracks(removedTextTracks);
2851 }
2852
2853 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2854 {
2855     if (m_metadataTrack)
2856         return;
2857
2858     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2859     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2860     player()->addTextTrack(m_metadataTrack);
2861 }
2862
2863 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2864 {
2865     ASSERT(time >= MediaTime::zeroTime());
2866
2867     if (!m_currentTextTrack)
2868         return;
2869
2870     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2871 }
2872
2873 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2874 {
2875     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2876
2877     if (!m_currentTextTrack)
2878         return;
2879     
2880     m_currentTextTrack->resetCueValues();
2881 }
2882 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2883
2884 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2885 {
2886     if (m_currentTextTrack == track)
2887         return;
2888
2889     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2890         
2891     m_currentTextTrack = track;
2892
2893     if (track) {
2894         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2895             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2896 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2897 #if ENABLE(AVF_CAPTIONS)
2898         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2899             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2900 #endif
2901         else
2902             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2903 #endif
2904     } else {
2905 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2906         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2907 #endif
2908         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2909     }
2910
2911 }
2912
2913 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2914 {
2915     if (!m_languageOfPrimaryAudioTrack.isNull())
2916         return m_languageOfPrimaryAudioTrack;
2917
2918     if (!m_avPlayerItem.get())
2919         return emptyString();
2920
2921 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2922     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2923     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2924     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2925     if (currentlySelectedAudibleOption) {
2926         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2927         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2928
2929         return m_languageOfPrimaryAudioTrack;
2930     }
2931 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2932
2933     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2934     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2935     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2936     if (!tracks || [tracks count] != 1) {
2937         m_languageOfPrimaryAudioTrack = emptyString();
2938         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2939         return m_languageOfPrimaryAudioTrack;
2940     }
2941
2942     AVAssetTrack *track = [tracks objectAtIndex:0];
2943     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2944
2945 #if !LOG_DISABLED
2946     if (m_languageOfPrimaryAudioTrack == emptyString())
2947         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2948     else
2949         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2950 #endif
2951
2952     return m_languageOfPrimaryAudioTrack;
2953 }
2954
2955 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2956 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2957 {
2958     bool wirelessTarget = false;
2959
2960 #if !PLATFORM(IOS)
2961     if (m_playbackTarget) {
2962         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2963             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2964         else
2965             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2966     }
2967 #else
2968     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2969 #endif
2970
2971     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2972
2973     return wirelessTarget;
2974 }
2975
2976 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2977 {
2978     if (!m_avPlayer)
2979         return MediaPlayer::TargetTypeNone;
2980
2981 #if PLATFORM(IOS)
2982     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2983     case wkExternalPlaybackTypeNone:
2984         return MediaPlayer::TargetTypeNone;
2985     case wkExternalPlaybackTypeAirPlay:
2986         return MediaPlayer::TargetTypeAirPlay;
2987     case wkExternalPlaybackTypeTVOut:
2988         return MediaPlayer::TargetTypeTVOut;
2989     }
2990
2991     ASSERT_NOT_REACHED();
2992     return MediaPlayer::TargetTypeNone;
2993
2994 #else
2995     return MediaPlayer::TargetTypeAirPlay;
2996 #endif
2997 }
2998
2999 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
3000 {
3001     if (!m_avPlayer)
3002         return emptyString();
3003
3004     String wirelessTargetName;
3005 #if !PLATFORM(IOS)
3006     if (m_playbackTarget)
3007         wirelessTargetName = m_playbackTarget->deviceName();
3008 #else
3009     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
3010 #endif
3011     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
3012
3013     return wirelessTargetName;
3014 }
3015
3016 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
3017 {
3018     if (!m_avPlayer)
3019         return !m_allowsWirelessVideoPlayback;
3020
3021     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
3022     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
3023
3024     return !m_allowsWirelessVideoPlayback;
3025 }
3026
3027 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
3028 {
3029     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
3030     m_allowsWirelessVideoPlayback = !disabled;
3031     if (!m_avPlayer)
3032         return;
3033
3034     setDelayCallbacks(true);
3035     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
3036     setDelayCallbacks(false);
3037 }
3038
3039 #if !PLATFORM(IOS)
3040 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
3041 {
3042     m_playbackTarget = WTFMove(target);
3043
3044     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
3045
3046     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
3047
3048     if (!m_playbackTarget->hasActiveRoute())
3049         setShouldPlayToPlaybackTarget(false);
3050 }
3051
3052 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
3053 {
3054     if (m_shouldPlayToPlaybackTarget == shouldPlay)
3055         return;
3056
3057     m_shouldPlayToPlaybackTarget = shouldPlay;
3058
3059     if (!m_playbackTarget)
3060         return;
3061
3062     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
3063         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
3064
3065         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
3066
3067         if (!m_avPlayer)
3068             return;
3069
3070         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
3071         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
3072             return;
3073
3074         setDelayCallbacks(true);
3075         m_avPlayer.get().outputContext = newContext;
3076         setDelayCallbacks(false);
3077
3078         return;
3079     }
3080
3081     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
3082
3083     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
3084
3085     setDelayCallbacks(true);
3086     auto weakThis = createWeakPtr();
3087     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
3088         if (!weakThis)
3089             return;
3090         weakThis->playbackTargetIsWirelessDidChange();
3091     }));
3092     setDelayCallbacks(false);
3093 }
3094 #endif // !PLATFORM(IOS)
3095
3096 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
3097 {
3098     if (!m_avPlayer)
3099         return;
3100
3101 #if PLATFORM(IOS)
3102     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
3103 #endif
3104 }
3105 #endif
3106
3107 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3108 {
3109     m_cachedItemStatus = status;
3110
3111     updateStates();
3112 }
3113
3114 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3115 {
3116     m_pendingStatusChanges++;
3117 }
3118
3119 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3120 {
3121     m_cachedLikelyToKeepUp = likelyToKeepUp;
3122
3123     ASSERT(m_pendingStatusChanges);
3124     if (!--m_pendingStatusChanges)
3125         updateStates();
3126 }
3127
3128 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3129 {
3130     m_pendingStatusChanges++;
3131 }
3132
3133 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3134 {
3135     m_cachedBufferEmpty = bufferEmpty;
3136
3137     ASSERT(m_pendingStatusChanges);
3138     if (!--m_pendingStatusChanges)
3139         updateStates();
3140 }
3141
3142 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3143 {
3144     m_pendingStatusChanges++;
3145 }
3146
3147 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3148 {
3149     m_cachedBufferFull = bufferFull;
3150
3151     ASSERT(m_pendingStatusChanges);
3152     if (!--m_pendingStatusChanges)
3153         updateStates();
3154 }
3155
3156 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3157 {
3158     m_cachedSeekableRanges = seekableRanges;
3159
3160     seekableTimeRangesChanged();
3161     updateStates();
3162 }
3163
3164 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3165 {
3166     m_cachedLoadedRanges = loadedRanges;
3167
3168     loadedTimeRangesChanged();
3169     updateStates();
3170 }
3171
3172 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3173 {
3174     m_cachedIsReadyForDisplay = isReady;
3175     if (!hasVideo() && isReady)
3176         tracksChanged();
3177     updateStates();
3178 }
3179
3180 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3181 {
3182     tracksChanged();
3183     updateStates();
3184 }
3185
3186 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3187 {
3188     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
3189     if (m_shouldBufferData == shouldBuffer)
3190         return;
3191
3192     m_shouldBufferData = shouldBuffer;
3193     
3194     if (!m_avPlayer)
3195         return;
3196
3197     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3198 }
3199
3200 #if ENABLE(DATACUE_VALUE)
3201 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3202 {
3203     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3204     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3205     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3206     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3207     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3208
3209     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3210         return quickTimeUserData;
3211     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3212         return isoUserData;
3213     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3214         return quickTimeMetadata;
3215     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3216         return iTunesMetadata;
3217     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3218         return id3Metadata;
3219
3220     return emptyAtom;
3221 }
3222 #endif
3223
3224 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3225 {
3226     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3227
3228     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3229
3230 #if ENABLE(DATACUE_VALUE)
3231     if (seeking())
3232         return;
3233
3234     if (!m_metadataTrack)
3235         processMetadataTrack();
3236
3237     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3238         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3239         return;
3240     }
3241
3242     // Set the duration of all incomplete cues before adding new ones.
3243     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3244     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3245         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3246         if (start < earliestStartTime)
3247             earliestStartTime = start;
3248     }
3249     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3250
3251     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3252         MediaTime start = std::max(toMediaTime(item.time), MediaTime::zeroTime());
3253         MediaTime end = MediaTime::positiveInfiniteTime();
3254         if (CMTIME_IS_VALID(item.duration))
3255             end = start + toMediaTime(item.duration);
3256
3257         AtomicString type = nullAtom;
3258         if (item.keySpace)
3259             type = metadataType(item.keySpace);
3260
3261         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3262     }
3263 #endif
3264 }
3265
3266 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3267 {
3268     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3269         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3270
3271     NSArray *assetTracks = [m_avAsset tracks];
3272
3273     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3274         AVAssetTrack* assetTrack = [obj assetTrack];
3275
3276         if ([assetTracks containsObject:assetTrack])
3277             return YES;
3278
3279         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3280         if (!hasLoadedMediaSelectionGroups())
3281             return NO;
3282
3283         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3284             return NO;
3285
3286         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3287             return NO;
3288
3289         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3290             return NO;
3291
3292         return YES;
3293     }]];
3294
3295     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3296         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3297
3298     m_cachedTotalBytes = 0;
3299
3300     tracksChanged();
3301     updateStates();
3302 }
3303
3304 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3305 {
3306     m_cachedHasEnabledAudio = hasEnabledAudio;
3307
3308     tracksChanged();
3309     updateStates();
3310 }
3311
3312 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3313 {
3314     m_cachedPresentationSize = size;
3315
3316     sizeChanged();
3317     updateStates();
3318 }
3319
3320 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3321 {
3322     m_cachedDuration = duration;
3323
3324     invalidateCachedDuration();
3325 }
3326
3327 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3328 {
3329     m_cachedRate = rate;
3330
3331     updateStates();
3332     rateChanged();
3333 }
3334     
3335 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3336 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3337 {
3338     playbackTargetIsWirelessChanged();
3339 }
3340 #endif
3341
3342 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3343 {
3344     m_cachedCanPlayFastForward = newValue;
3345 }
3346
3347 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3348 {
3349     m_cachedCanPlayFastReverse = newValue;
3350 }
3351
3352 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3353 {
3354     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3355         return MediaPlayerPrivateAVFoundation::resolvedURL();
3356
3357     return URL([m_avAsset resolvedURL]);
3358 }
3359
3360 NSArray* assetMetadataKeyNames()
3361 {
3362     static NSArray* keys;
3363     if (!keys) {
3364         keys = [[NSArray alloc] initWithObjects:@"duration",
3365                     @"naturalSize",
3366                     @"preferredTransform",
3367                     @"preferredVolume",
3368                     @"preferredRate",
3369                     @"playable",
3370                     @"resolvedURL",
3371                     @"tracks",
3372                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3373                    nil];
3374     }
3375     return keys;
3376 }
3377
3378 NSArray* itemKVOProperties()
3379 {
3380     static NSArray* keys;
3381     if (!keys) {
3382         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3383                 @"status",
3384                 @"asset",
3385                 @"tracks",
3386                 @"seekableTimeRanges",
3387                 @"loadedTimeRanges",
3388                 @"playbackLikelyToKeepUp",
3389                 @"playbackBufferFull",
3390                 @"playbackBufferEmpty",
3391                 @"duration",
3392                 @"hasEnabledAudio",
3393                 @"timedMetadata",
3394                 @"canPlayFastForward",
3395                 @"canPlayFastReverse",
3396                 nil];
3397     }
3398     return keys;
3399 }
3400
3401 NSArray* assetTrackMetadataKeyNames()
3402 {
3403     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3404     return keys;
3405 }
3406
3407 NSArray* playerKVOProperties()
3408 {
3409     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3410 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3411                             @"externalPlaybackActive", @"allowsExternalPlayback",
3412 #endif
3413                             nil];
3414     return keys;
3415 }
3416 } // namespace WebCore
3417
3418 @implementation WebCoreAVFMovieObserver
3419
3420 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3421 {
3422     self = [super init];
3423     if (!self)
3424         return nil;
3425     m_callback = callback;
3426     return self;
3427 }
3428
3429 - (void)disconnect
3430 {
3431     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3432     m_callback = 0;
3433 }
3434
3435 - (void)metadataLoaded
3436 {
3437     if (!m_callback)
3438         return;
3439
3440     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3441 }
3442
3443 - (void)didEnd:(NSNotification *)unusedNotification
3444 {
3445     UNUSED_PARAM(unusedNotification);
3446     if (!m_callback)
3447         return;
3448     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3449 }
3450
3451 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3452 {
3453     UNUSED_PARAM(object);
3454     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3455
3456     if (!m_callback)
3457         return;
3458
3459     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];