WeakPtr breaks vtables when upcasting to base classes
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetMIMETypeCache.h"
32 #import "AVAssetTrackUtilities.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "VideoFullscreenLayerManagerObjC.h"
63 #import "VideoTextureCopierCV.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import <JavaScriptCore/DataView.h>
69 #import <JavaScriptCore/JSCInlines.h>
70 #import <JavaScriptCore/TypedArrayInlines.h>
71 #import <JavaScriptCore/Uint16Array.h>
72 #import <JavaScriptCore/Uint32Array.h>
73 #import <JavaScriptCore/Uint8Array.h>
74 #import <functional>
75 #import <objc/runtime.h>
76 #import <pal/avfoundation/MediaTimeAVFoundation.h>
77 #import <pal/spi/cocoa/QuartzCoreSPI.h>
78 #import <pal/spi/mac/AVFoundationSPI.h>
79 #import <wtf/BlockObjCExceptions.h>
80 #import <wtf/ListHashSet.h>
81 #import <wtf/NeverDestroyed.h>
82 #import <wtf/OSObjectPtr.h>
83 #import <wtf/URL.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS_FAMILY)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #import <pal/ios/UIKitSoftLink.h>
107 #else
108 #import <Foundation/NSGeometry.h>
109 #import <QuartzCore/CoreImage.h>
110 #endif
111
112 #if USE(VIDEOTOOLBOX)
113 #import <CoreVideo/CoreVideo.h>
114 #import <VideoToolbox/VideoToolbox.h>
115 #endif
116
117 #import "CoreVideoSoftLink.h"
118 #import "MediaRemoteSoftLink.h"
119
120 namespace std {
121 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
122     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
123 };
124 }
125
126 #if ENABLE(AVF_CAPTIONS)
127 // Note: This must be defined before our SOFT_LINK macros:
128 @class AVMediaSelectionOption;
129 @interface AVMediaSelectionOption (OutOfBandExtensions)
130 @property (nonatomic, readonly) NSString* outOfBandSource;
131 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
132 @end
133 #endif
134
135 @interface AVURLAsset (WebKitExtensions)
136 @property (nonatomic, readonly) NSURL *resolvedURL;
137 @end
138
139 #import <pal/cf/CoreMediaSoftLink.h>
140 #import <pal/cocoa/AVFoundationSoftLink.h>
141
142 SOFT_LINK_FRAMEWORK(MediaToolbox)
143 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
144
145 #if PLATFORM(IOS_FAMILY)
146
147 #if HAVE(CELESTIAL)
148 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
149 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
150 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
151 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
152 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
153 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
154 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
155 #endif // HAVE(CELESTIAL)
156
157 #endif // PLATFORM(IOS_FAMILY)
158
159 using namespace WebCore;
160
161 enum MediaPlayerAVFoundationObservationContext {
162     MediaPlayerAVFoundationObservationContextPlayerItem,
163     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
164     MediaPlayerAVFoundationObservationContextPlayer,
165     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
166 };
167
168 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
169 {
170     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
171     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
172     int m_delayCallbacks;
173 }
174 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
175 -(void)disconnect;
176 -(void)metadataLoaded;
177 -(void)didEnd:(NSNotification *)notification;
178 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
179 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
180 - (void)outputSequenceWasFlushed:(id)output;
181 @end
182
183 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
184 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
185     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
186     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
187 }
188 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
189 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
190 @end
191 #endif
192
193 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
194 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
195     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
196 }
197 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
198 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
199 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
200 @end
201 #endif
202
203 namespace WebCore {
204 static String convertEnumerationToString(AVPlayerTimeControlStatus enumerationValue)
205 {
206     static const NeverDestroyed<String> values[] = {
207         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusPaused"),
208         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"),
209         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusPlaying"),
210     };
211     static_assert(!static_cast<size_t>(AVPlayerTimeControlStatusPaused), "AVPlayerTimeControlStatusPaused is not 0 as expected");
212     static_assert(static_cast<size_t>(AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) == 1, "AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate is not 1 as expected");
213     static_assert(static_cast<size_t>(AVPlayerTimeControlStatusPlaying) == 2, "AVPlayerTimeControlStatusPlaying is not 2 as expected");
214     ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
215     return values[static_cast<size_t>(enumerationValue)];
216 }
217 }
218
219 namespace WTF {
220 template<typename Type>
221 struct LogArgument;
222
223 template <>
224 struct LogArgument<AVPlayerTimeControlStatus> {
225     static String toString(const AVPlayerTimeControlStatus status)
226     {
227         return convertEnumerationToString(status);
228     }
229 };
230 }; // namespace WTF
231
232 namespace WebCore {
233 using namespace PAL;
234
235 static NSArray *assetMetadataKeyNames();
236 static NSArray *itemKVOProperties();
237 static NSArray *assetTrackMetadataKeyNames();
238 static NSArray *playerKVOProperties();
239 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
240
241 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
242 static dispatch_queue_t globalLoaderDelegateQueue()
243 {
244     static dispatch_queue_t globalQueue;
245     static dispatch_once_t onceToken;
246     dispatch_once(&onceToken, ^{
247         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
248     });
249     return globalQueue;
250 }
251 #endif
252
253 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
254 static dispatch_queue_t globalPullDelegateQueue()
255 {
256     static dispatch_queue_t globalQueue;
257     static dispatch_once_t onceToken;
258     dispatch_once(&onceToken, ^{
259         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
260     });
261     return globalQueue;
262 }
263 #endif
264
265 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
266 {
267     if (!isAvailable())
268         return;
269
270     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
271             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
272     ASSERT(AVAssetMIMETypeCache::singleton().isAvailable());
273 }
274
275 static AVAssetCache *assetCacheForPath(const String& path)
276 {
277     NSURL *assetCacheURL;
278     
279     if (path.isEmpty())
280         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
281     else
282         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
283
284     return [PAL::getAVAssetCacheClass() assetCacheWithURL:assetCacheURL];
285 }
286
287 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
288 {
289     HashSet<RefPtr<SecurityOrigin>> origins;
290     for (NSString *key in [assetCacheForPath(path) allKeys]) {
291         URL keyAsURL = URL(URL(), key);
292         if (keyAsURL.isValid())
293             origins.add(SecurityOrigin::create(keyAsURL));
294     }
295     return origins;
296 }
297
298 static WallTime toSystemClockTime(NSDate *date)
299 {
300     ASSERT(date);
301     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
302 }
303
304 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
305 {
306     AVAssetCache* assetCache = assetCacheForPath(path);
307     
308     for (NSString *key in [assetCache allKeys]) {
309         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
310             [assetCache removeEntryForKey:key];
311     }
312
313     NSFileManager *fileManager = [NSFileManager defaultManager];
314     NSURL *baseURL = [assetCache URL];
315
316     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
317         [fileManager removeItemAtURL:baseURL error:nil];
318         return;
319     }
320     
321     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
322     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
323         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
324         errorHandler:nil];
325     
326     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
327     for (NSURL *fileURL : enumerator) {
328         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
329     
330         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
331             continue;
332         
333         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
334             continue;
335         
336         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
337             continue;
338         
339         [urlsToDelete addObject:fileURL];
340     }
341     
342     for (NSURL *fileURL in urlsToDelete.get())
343         [fileManager removeItemAtURL:fileURL error:nil];
344 }
345
346 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
347 {
348     AVAssetCache* assetCache = assetCacheForPath(path);
349     for (NSString *key in [assetCache allKeys]) {
350         URL keyAsURL = URL(URL(), key);
351         if (keyAsURL.isValid()) {
352             if (origins.contains(SecurityOrigin::create(keyAsURL)))
353                 [assetCache removeEntryForKey:key];
354         }
355     }
356 }
357
358 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
359     : MediaPlayerPrivateAVFoundation(player)
360     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
361     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
362     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:makeWeakPtr(*this)]))
363     , m_videoFrameHasDrawn(false)
364     , m_haveCheckedPlayability(false)
365 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
366     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:makeWeakPtr(*this)]))
367 #endif
368 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
369     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:makeWeakPtr(*this)]))
370 #endif
371     , m_currentTextTrack(0)
372     , m_cachedRate(0)
373     , m_cachedTotalBytes(0)
374     , m_pendingStatusChanges(0)
375     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
376     , m_cachedLikelyToKeepUp(false)
377     , m_cachedBufferEmpty(false)
378     , m_cachedBufferFull(false)
379     , m_cachedHasEnabledAudio(false)
380     , m_cachedIsReadyForDisplay(false)
381     , m_haveBeenAskedToCreateLayer(false)
382 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
383     , m_allowsWirelessVideoPlayback(true)
384 #endif
385 {
386 }
387
388 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
389 {
390     weakPtrFactory().revokeAll();
391
392 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
393     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
394
395     for (auto& pair : m_resourceLoaderMap)
396         pair.value->invalidate();
397 #endif
398 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
399     [m_videoOutput setDelegate:nil queue:0];
400 #endif
401
402     if (m_videoLayer)
403         destroyVideoLayer();
404
405     cancelLoad();
406 }
407
408 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
409 {
410     ALWAYS_LOG(LOGIDENTIFIER);
411     tearDownVideoRendering();
412
413     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
414     [m_objcObserver.get() disconnect];
415
416     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
417     setIgnoreLoadStateChanges(true);
418     if (m_avAsset) {
419         [m_avAsset.get() cancelLoading];
420         m_avAsset = nil;
421     }
422
423     clearTextTracks();
424
425     if (m_legibleOutput) {
426         if (m_avPlayerItem)
427             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
428         m_legibleOutput = nil;
429     }
430
431     if (m_avPlayerItem) {
432         for (NSString *keyName in itemKVOProperties())
433             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
434         
435         m_avPlayerItem = nil;
436     }
437     if (m_avPlayer) {
438         if (m_timeObserver)
439             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
440         m_timeObserver = nil;
441
442         for (NSString *keyName in playerKVOProperties())
443             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
444
445         setShouldObserveTimeControlStatus(false);
446
447         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
448 #if !PLATFORM(IOS_FAMILY)
449         [m_avPlayer setOutputContext:nil];
450 #endif
451         m_avPlayer = nil;
452     }
453
454     // Reset cached properties
455     m_pendingStatusChanges = 0;
456     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
457     m_cachedSeekableRanges = nullptr;
458     m_cachedLoadedRanges = nullptr;
459     m_cachedHasEnabledAudio = false;
460     m_cachedPresentationSize = FloatSize();
461     m_cachedDuration = MediaTime::zeroTime();
462
463     for (AVPlayerItemTrack *track in m_cachedTracks.get())
464         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
465     m_cachedTracks = nullptr;
466
467 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
468     if (m_provider) {
469         m_provider->setPlayerItem(nullptr);
470         m_provider->setAudioTrack(nullptr);
471     }
472 #endif
473
474     setIgnoreLoadStateChanges(false);
475 }
476
477 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
478 {
479     return m_haveBeenAskedToCreateLayer;
480 }
481
482 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
483 {
484 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
485     if (m_videoOutput)
486         return true;
487 #endif
488     return m_imageGenerator;
489 }
490
491 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
492 {
493 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
494     createVideoOutput();
495 #else
496     createImageGenerator();
497 #endif
498 }
499
500 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
501 {
502     using namespace PAL;
503     INFO_LOG(LOGIDENTIFIER);
504
505     if (!m_avAsset || m_imageGenerator)
506         return;
507
508     m_imageGenerator = [PAL::getAVAssetImageGeneratorClass() assetImageGeneratorWithAsset:m_avAsset.get()];
509
510     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
511     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
512     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
513     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
514 }
515
516 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
517 {
518 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
519     destroyVideoOutput();
520 #endif
521     destroyImageGenerator();
522 }
523
524 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
525 {
526     if (!m_imageGenerator)
527         return;
528
529     INFO_LOG(LOGIDENTIFIER);
530
531     m_imageGenerator = 0;
532 }
533
534 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
535 {
536     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
537         return;
538
539     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
540         if (!weakThis)
541             return;
542
543         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
544             return;
545         m_haveBeenAskedToCreateLayer = true;
546
547         if (!m_videoLayer)
548             createAVPlayerLayer();
549
550 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
551         if (!m_videoOutput)
552             createVideoOutput();
553 #endif
554
555         player()->client().mediaPlayerRenderingModeChanged(player());
556     });
557 }
558
559 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
560 {
561     if (!m_avPlayer)
562         return;
563
564     m_videoLayer = adoptNS([PAL::allocAVPlayerLayerInstance() init]);
565     [m_videoLayer setPlayer:m_avPlayer.get()];
566
567 #ifndef NDEBUG
568     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
569 #endif
570     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
571     updateVideoLayerGravity();
572     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
573     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
574     ALWAYS_LOG(LOGIDENTIFIER);
575
576     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
577
578 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
579     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
580         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
581 #endif
582 }
583
584 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
585 {
586     if (!m_videoLayer)
587         return;
588
589     ALWAYS_LOG(LOGIDENTIFIER);
590
591     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
592     [m_videoLayer setPlayer:nil];
593     m_videoFullscreenLayerManager->didDestroyVideoLayer();
594
595     m_videoLayer = nil;
596 }
597
598 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
599 {
600     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
601     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
602
603     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
604     if (!date)
605         return MediaTime::invalidTime();
606
607     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
608
609     // Rounding due to second offset error when subtracting.
610     return MediaTime::createWithDouble(round(date - currentTime));
611 }
612
613 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
614 {
615     if (currentRenderingMode() == MediaRenderingToLayer)
616         return m_cachedIsReadyForDisplay;
617
618 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
619     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
620         return true;
621 #endif
622
623     return m_videoFrameHasDrawn;
624 }
625
626 #if ENABLE(AVF_CAPTIONS)
627 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
628 {
629     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
630     if (manualSelectionMode)
631         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
632
633     // FIXME: Match these to correct types:
634     if (kind == PlatformTextTrack::Caption)
635         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
636
637     if (kind == PlatformTextTrack::Subtitle)
638         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
639
640     if (kind == PlatformTextTrack::Description)
641         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
642
643     if (kind == PlatformTextTrack::Forced)
644         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
645
646     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
647 }
648     
649 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
650 {
651     trackModeChanged();
652 }
653     
654 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
655 {
656     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
657     
658     for (auto& textTrack : m_textTracks) {
659         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
660             continue;
661         
662         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
663         RetainPtr<AVMediaSelectionOption> currentOption = trackPrivate->mediaSelectionOption();
664         
665         for (auto& track : outOfBandTrackSources) {
666             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
667             
668             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
669                 continue;
670             
671             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
672             if (track->mode() == PlatformTextTrack::Hidden)
673                 mode = InbandTextTrackPrivate::Hidden;
674             else if (track->mode() == PlatformTextTrack::Disabled)
675                 mode = InbandTextTrackPrivate::Disabled;
676             else if (track->mode() == PlatformTextTrack::Showing)
677                 mode = InbandTextTrackPrivate::Showing;
678             
679             textTrack->setMode(mode);
680             break;
681         }
682     }
683 }
684 #endif
685
686
687 static NSURL *canonicalURL(const URL& url)
688 {
689     NSURL *cocoaURL = url;
690     if (url.isEmpty())
691         return cocoaURL;
692
693     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
694     if (!request)
695         return cocoaURL;
696
697     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
698     if (!canonicalRequest)
699         return cocoaURL;
700
701     return [canonicalRequest URL];
702 }
703
704 #if PLATFORM(IOS_FAMILY)
705 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
706 {
707     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
708     [properties setDictionary:@{
709         NSHTTPCookieName: cookie.name,
710         NSHTTPCookieValue: cookie.value,
711         NSHTTPCookieDomain: cookie.domain,
712         NSHTTPCookiePath: cookie.path,
713         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
714     }];
715     if (cookie.secure)
716         [properties setObject:@YES forKey:NSHTTPCookieSecure];
717     if (cookie.session)
718         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
719
720     return [NSHTTPCookie cookieWithProperties:properties.get()];
721 }
722 #endif
723
724 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
725 {
726     if (m_avAsset)
727         return;
728
729     ALWAYS_LOG(LOGIDENTIFIER);
730
731     setDelayCallbacks(true);
732
733     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
734
735     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
736
737     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
738
739     String referrer = player()->referrer();
740     if (!referrer.isEmpty())
741         [headerFields.get() setObject:referrer forKey:@"Referer"];
742
743     String userAgent = player()->userAgent();
744     if (!userAgent.isEmpty())
745         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
746
747     if ([headerFields.get() count])
748         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
749
750     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
751         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
752
753     if (PAL::canLoad_AVFoundation_AVURLAssetUseClientURLLoadingExclusively())
754         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
755 #if PLATFORM(IOS_FAMILY)
756     else if (PAL::canLoad_AVFoundation_AVURLAssetRequiresCustomURLLoadingKey())
757         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
758     // FIXME: rdar://problem/20354688
759     String identifier = player()->sourceApplicationIdentifier();
760     if (!identifier.isEmpty())
761         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
762 #endif
763
764     auto type = player()->contentMIMEType();
765     if (PAL::canLoad_AVFoundation_AVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
766         auto codecs = player()->contentTypeCodecs();
767         if (!codecs.isEmpty()) {
768             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
769             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
770         } else
771             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
772     }
773
774 #if ENABLE(AVF_CAPTIONS)
775     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
776     if (!outOfBandTrackSources.isEmpty()) {
777         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
778         for (auto& trackSource : outOfBandTrackSources) {
779             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
780             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
781             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
782             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
783             [outOfBandTracks.get() addObject:@{
784                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
785                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
786                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
787                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
788                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
789                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
790             }];
791         }
792
793         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
794     }
795 #endif
796
797 #if PLATFORM(IOS_FAMILY)
798     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
799     if (!networkInterfaceName.isEmpty())
800         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
801 #endif
802
803 #if PLATFORM(IOS_FAMILY)
804     Vector<Cookie> cookies;
805     if (player()->getRawCookies(url, cookies)) {
806         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
807         for (auto& cookie : cookies)
808             [nsCookies addObject:toNSHTTPCookie(cookie)];
809
810         if (PAL::canLoad_AVFoundation_AVURLAssetHTTPCookiesKey())
811             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
812     }
813 #endif
814
815     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
816     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
817     
818     if (usePersistentCache)
819         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
820
821     NSURL *cocoaURL = canonicalURL(url);
822     m_avAsset = adoptNS([PAL::allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
823
824 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
825     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
826     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
827
828     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
829         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
830         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
831         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
832         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
833         if (mediaResourceLoader)
834             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
835     }
836
837 #endif
838
839     m_haveCheckedPlayability = false;
840
841     setDelayCallbacks(false);
842 }
843
844 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItem *item)
845 {
846     if (!m_avPlayer)
847         return;
848
849     if (pthread_main_np()) {
850         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
851         return;
852     }
853
854     RetainPtr<AVPlayer> strongPlayer = m_avPlayer.get();
855     RetainPtr<AVPlayerItem> strongItem = item;
856     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
857         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
858     });
859 }
860
861 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
862 {
863     if (m_avPlayer)
864         return;
865
866     ALWAYS_LOG(LOGIDENTIFIER);
867
868     setDelayCallbacks(true);
869
870     m_avPlayer = adoptNS([PAL::allocAVPlayerInstance() init]);
871     for (NSString *keyName in playerKVOProperties())
872         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
873
874     setShouldObserveTimeControlStatus(true);
875
876     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
877
878 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
879     updateDisableExternalPlayback();
880     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
881 #endif
882
883 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
884     if (m_shouldPlayToPlaybackTarget) {
885         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
886         m_shouldPlayToPlaybackTarget = false;
887         setShouldPlayToPlaybackTarget(true);
888     }
889 #endif
890
891 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(IOSMAC)
892     setShouldDisableSleep(player()->shouldDisableSleep());
893 #endif
894
895     if (m_muted) {
896         // Clear m_muted so setMuted doesn't return without doing anything.
897         m_muted = false;
898         [m_avPlayer.get() setMuted:m_muted];
899     }
900
901     if (player()->client().mediaPlayerIsVideo())
902         createAVPlayerLayer();
903
904     if (m_avPlayerItem)
905         setAVPlayerItem(m_avPlayerItem.get());
906
907     setDelayCallbacks(false);
908 }
909
910 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
911 {
912     if (m_avPlayerItem)
913         return;
914
915     ALWAYS_LOG(LOGIDENTIFIER);
916
917     setDelayCallbacks(true);
918
919     // Create the player item so we can load media data. 
920     m_avPlayerItem = adoptNS([PAL::allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
921
922     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
923
924     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
925     for (NSString *keyName in itemKVOProperties())
926         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
927
928     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
929
930     if (m_avPlayer)
931         setAVPlayerItem(m_avPlayerItem.get());
932
933     const NSTimeInterval legibleOutputAdvanceInterval = 2;
934
935     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
936     m_legibleOutput = adoptNS([PAL::allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
937     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
938
939     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
940     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
941     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
942     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
943
944 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
945     if (m_provider) {
946         m_provider->setPlayerItem(m_avPlayerItem.get());
947         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
948     }
949 #endif
950
951 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
952     createVideoOutput();
953 #endif
954
955     setDelayCallbacks(false);
956 }
957
958 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
959 {
960     if (m_haveCheckedPlayability)
961         return;
962     m_haveCheckedPlayability = true;
963
964     INFO_LOG(LOGIDENTIFIER);
965     auto weakThis = makeWeakPtr(*this);
966
967     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
968         callOnMainThread([weakThis] {
969             if (weakThis)
970                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
971         });
972     }];
973 }
974
975 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
976 {
977     INFO_LOG(LOGIDENTIFIER);
978
979     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
980     dispatch_group_enter(metadataLoadingGroup.get());
981     auto weakThis = makeWeakPtr(*this);
982     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
983
984         callOnMainThread([weakThis, metadataLoadingGroup] {
985             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
986                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
987                     dispatch_group_enter(metadataLoadingGroup.get());
988                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
989                         dispatch_group_leave(metadataLoadingGroup.get());
990                     }];
991                 }
992             }
993             dispatch_group_leave(metadataLoadingGroup.get());
994         });
995     }];
996
997     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
998         callOnMainThread([weakThis] {
999             if (weakThis)
1000                 [weakThis->m_objcObserver.get() metadataLoaded];
1001         });
1002     });
1003 }
1004
1005 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1006 {
1007     if (!m_avPlayerItem)
1008         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1009
1010     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1011         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1012     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1013         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1014     if (m_cachedLikelyToKeepUp)
1015         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1016     if (m_cachedBufferFull)
1017         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1018     if (m_cachedBufferEmpty)
1019         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1020
1021     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1022 }
1023
1024 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1025 {
1026     return m_videoFullscreenLayerManager->videoInlineLayer();
1027 }
1028
1029 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1030 {
1031 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1032     updateLastImage(UpdateType::UpdateSynchronously);
1033     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1034 #endif
1035 }
1036
1037 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1038 {
1039 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1040     updateLastImage(UpdateType::UpdateSynchronously);
1041     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1042 #else
1043     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1044 #endif
1045     updateDisableExternalPlayback();
1046 }
1047
1048 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1049 {
1050     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1051 }
1052
1053 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1054 {
1055     m_videoFullscreenGravity = gravity;
1056
1057     if (!m_videoLayer)
1058         return;
1059
1060     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1061     if (gravity == MediaPlayer::VideoGravityResize)
1062         videoGravity = AVLayerVideoGravityResize;
1063     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1064         videoGravity = AVLayerVideoGravityResizeAspect;
1065     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1066         videoGravity = AVLayerVideoGravityResizeAspectFill;
1067     else
1068         ASSERT_NOT_REACHED();
1069     
1070     if ([m_videoLayer videoGravity] == videoGravity)
1071         return;
1072
1073     [m_videoLayer setVideoGravity:videoGravity];
1074     syncTextTrackBounds();
1075 }
1076
1077 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1078 {
1079 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1080     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1081         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1082     updateDisableExternalPlayback();
1083 #else
1084     UNUSED_PARAM(mode);
1085 #endif
1086 }
1087     
1088 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1089 {
1090 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1091     updateDisableExternalPlayback();
1092 #endif
1093 }
1094
1095 #if PLATFORM(IOS_FAMILY)
1096 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1097 {
1098     if (m_currentMetaData)
1099         return m_currentMetaData.get();
1100     return nil;
1101 }
1102
1103 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1104 {
1105     if (!m_avPlayerItem)
1106         return emptyString();
1107     
1108     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1109     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1110
1111     return logString.get();
1112 }
1113
1114 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1115 {
1116     if (!m_avPlayerItem)
1117         return emptyString();
1118
1119     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1120     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1121
1122     return logString.get();
1123 }
1124 #endif
1125
1126 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1127 {
1128     m_requestedPlaying = false;
1129     MediaPlayerPrivateAVFoundation::didEnd();
1130 }
1131
1132 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1133 {
1134     [CATransaction begin];
1135     [CATransaction setDisableActions:YES];    
1136     if (m_videoLayer)
1137         [m_videoLayer.get() setHidden:!isVisible];
1138     [CATransaction commit];
1139 }
1140     
1141 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1142 {
1143     if (!metaDataAvailable())
1144         return;
1145
1146     ALWAYS_LOG(LOGIDENTIFIER);
1147
1148     m_requestedPlaying = true;
1149     setPlayerRate(m_requestedRate);
1150 }
1151
1152 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1153 {
1154     if (!metaDataAvailable())
1155         return;
1156
1157     ALWAYS_LOG(LOGIDENTIFIER);
1158
1159     m_requestedPlaying = false;
1160     setPlayerRate(0);
1161 }
1162
1163 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1164 {
1165     return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1166 }
1167
1168 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1169 {
1170     // Do not ask the asset for duration before it has been loaded or it will fetch the
1171     // answer synchronously.
1172     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1173         return MediaTime::invalidTime();
1174     
1175     CMTime cmDuration;
1176     
1177     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1178     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1179         cmDuration = [m_avPlayerItem.get() duration];
1180     else
1181         cmDuration = [m_avAsset.get() duration];
1182
1183     if (CMTIME_IS_NUMERIC(cmDuration))
1184         return PAL::toMediaTime(cmDuration);
1185
1186     if (CMTIME_IS_INDEFINITE(cmDuration))
1187         return MediaTime::positiveInfiniteTime();
1188
1189     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1190     return MediaTime::invalidTime();
1191 }
1192
1193 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1194 {
1195     if (!metaDataAvailable() || !m_avPlayerItem)
1196         return MediaTime::zeroTime();
1197
1198     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1199     if (CMTIME_IS_NUMERIC(itemTime))
1200         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1201
1202     return MediaTime::zeroTime();
1203 }
1204
1205 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1206 {
1207     // setCurrentTime generates several event callbacks, update afterwards.
1208     setDelayCallbacks(true);
1209
1210     if (m_metadataTrack)
1211         m_metadataTrack->flushPartialCues();
1212
1213     CMTime cmTime = PAL::toCMTime(time);
1214     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1215     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1216
1217     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1218     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1219         cmBefore = kCMTimeZero;
1220     
1221     auto weakThis = makeWeakPtr(*this);
1222
1223     setShouldObserveTimeControlStatus(false);
1224     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1225         callOnMainThread([weakThis, finished] {
1226             auto _this = weakThis.get();
1227             if (!_this)
1228                 return;
1229
1230             _this->setShouldObserveTimeControlStatus(true);
1231             _this->seekCompleted(finished);
1232         });
1233     }];
1234
1235     setDelayCallbacks(false);
1236 }
1237
1238 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1239 {
1240 #if PLATFORM(IOS_FAMILY)
1241     UNUSED_PARAM(volume);
1242     return;
1243 #else
1244
1245     if (!m_avPlayer)
1246         return;
1247
1248     ALWAYS_LOG(LOGIDENTIFIER, volume);
1249
1250     [m_avPlayer.get() setVolume:volume];
1251 #endif
1252 }
1253
1254 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1255 {
1256     if (m_muted == muted)
1257         return;
1258
1259     ALWAYS_LOG(LOGIDENTIFIER, muted);
1260
1261     m_muted = muted;
1262
1263     if (!m_avPlayer)
1264         return;
1265
1266     [m_avPlayer.get() setMuted:m_muted];
1267 }
1268
1269 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1270 {
1271     UNUSED_PARAM(closedCaptionsVisible);
1272
1273     if (!metaDataAvailable())
1274         return;
1275
1276     ALWAYS_LOG(LOGIDENTIFIER, closedCaptionsVisible);
1277 }
1278
1279 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1280 {
1281     m_requestedRate = rate;
1282     if (m_requestedPlaying)
1283         setPlayerRate(rate);
1284 }
1285
1286 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1287 {
1288     setDelayCallbacks(true);
1289     m_cachedRate = rate;
1290     setShouldObserveTimeControlStatus(false);
1291     [m_avPlayer setRate:rate];
1292     m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1293     setShouldObserveTimeControlStatus(true);
1294     setDelayCallbacks(false);
1295 }
1296
1297 double MediaPlayerPrivateAVFoundationObjC::rate() const
1298 {
1299     if (!metaDataAvailable())
1300         return 0;
1301
1302     return m_cachedRate;
1303 }
1304
1305 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1306 {
1307 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1308     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1309 #else
1310     return 0;
1311 #endif
1312 }
1313
1314 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1315 {
1316 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1317     return [m_avPlayerItem liveUpdateInterval];
1318 #else
1319     return 0;
1320 #endif
1321 }
1322
1323 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1324 {
1325     if (m_avPlayerItem)
1326         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1327 }
1328
1329 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1330 {
1331     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1332
1333     if (!m_avPlayerItem)
1334         return timeRanges;
1335
1336     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1337         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1338         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1339             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1340     }
1341     return timeRanges;
1342 }
1343
1344 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1345 {
1346     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1347         return MediaTime::zeroTime();
1348
1349     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1350     bool hasValidRange = false;
1351     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1352         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1353         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1354             continue;
1355
1356         hasValidRange = true;
1357         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1358         if (minTimeSeekable > startOfRange)
1359             minTimeSeekable = startOfRange;
1360     }
1361     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1362 }
1363
1364 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1365 {
1366     if (!m_cachedSeekableRanges)
1367         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1368
1369     MediaTime maxTimeSeekable;
1370     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1371         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1372         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1373             continue;
1374         
1375         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1376         if (maxTimeSeekable < endOfRange)
1377             maxTimeSeekable = endOfRange;
1378     }
1379     return maxTimeSeekable;
1380 }
1381
1382 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1383 {
1384     if (!m_cachedLoadedRanges)
1385         return MediaTime::zeroTime();
1386
1387     MediaTime maxTimeLoaded;
1388     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1389         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1390         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1391             continue;
1392         
1393         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1394         if (maxTimeLoaded < endOfRange)
1395             maxTimeLoaded = endOfRange;
1396     }
1397
1398     return maxTimeLoaded;   
1399 }
1400
1401 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1402 {
1403     if (!metaDataAvailable())
1404         return 0;
1405
1406     if (m_cachedTotalBytes)
1407         return m_cachedTotalBytes;
1408
1409     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1410         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1411
1412     return m_cachedTotalBytes;
1413 }
1414
1415 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1416 {
1417     m_avAsset = WTFMove(asset);
1418 }
1419
1420 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1421 {
1422     if (!m_avAsset)
1423         return MediaPlayerAVAssetStatusDoesNotExist;
1424
1425     for (NSString *keyName in assetMetadataKeyNames()) {
1426         NSError *error = nil;
1427         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1428
1429         if (error)
1430             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1431
1432         if (keyStatus < AVKeyValueStatusLoaded)
1433             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1434         
1435         if (keyStatus == AVKeyValueStatusFailed)
1436             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1437
1438         if (keyStatus == AVKeyValueStatusCancelled)
1439             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1440     }
1441
1442     if (!player()->shouldCheckHardwareSupport())
1443         m_tracksArePlayable = true;
1444
1445     if (!m_tracksArePlayable) {
1446         m_tracksArePlayable = true;
1447         for (AVAssetTrack *track in [m_avAsset tracks]) {
1448             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1449                 m_tracksArePlayable = false;
1450                 break;
1451             }
1452         }
1453     }
1454
1455     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1456         return MediaPlayerAVAssetStatusPlayable;
1457
1458     return MediaPlayerAVAssetStatusLoaded;
1459 }
1460
1461 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1462 {
1463     if (!m_avAsset)
1464         return 0;
1465
1466     NSError *error = nil;
1467     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1468     return [error code];
1469 }
1470
1471 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1472 {
1473     if (!metaDataAvailable() || context.paintingDisabled())
1474         return;
1475
1476     setDelayCallbacks(true);
1477     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1478
1479 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1480     if (videoOutputHasAvailableFrame())
1481         paintWithVideoOutput(context, rect);
1482     else
1483 #endif
1484         paintWithImageGenerator(context, rect);
1485
1486     END_BLOCK_OBJC_EXCEPTIONS;
1487     setDelayCallbacks(false);
1488
1489     m_videoFrameHasDrawn = true;
1490 }
1491
1492 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1493 {
1494     if (!metaDataAvailable() || context.paintingDisabled())
1495         return;
1496
1497     // We can ignore the request if we are already rendering to a layer.
1498     if (currentRenderingMode() == MediaRenderingToLayer)
1499         return;
1500
1501     // paint() is best effort, so only paint if we already have an image generator or video output available.
1502     if (!hasContextRenderer())
1503         return;
1504
1505     paintCurrentFrameInContext(context, rect);
1506 }
1507
1508 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1509 {
1510     INFO_LOG(LOGIDENTIFIER);
1511
1512     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1513     if (image) {
1514         GraphicsContextStateSaver stateSaver(context);
1515         context.translate(rect.x(), rect.y() + rect.height());
1516         context.scale(FloatSize(1.0f, -1.0f));
1517         context.setImageInterpolationQuality(InterpolationLow);
1518         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1519         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1520     }
1521 }
1522
1523 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1524 {
1525     if (!m_imageGenerator)
1526         createImageGenerator();
1527     ASSERT(m_imageGenerator);
1528
1529 #if !RELEASE_LOG_DISABLED
1530     MonotonicTime start = MonotonicTime::now();
1531 #endif
1532
1533     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1534     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1535     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1536
1537 #if !RELEASE_LOG_DISABLED
1538     INFO_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1539 #endif
1540
1541     return image;
1542 }
1543
1544 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1545 {
1546     supportedTypes = AVAssetMIMETypeCache::singleton().types();
1547 }
1548
1549 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1550 static bool keySystemIsSupported(const String& keySystem)
1551 {
1552     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1553         return true;
1554     return false;
1555 }
1556 #endif
1557
1558 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1559 {
1560 #if ENABLE(MEDIA_SOURCE)
1561     if (parameters.isMediaSource)
1562         return MediaPlayer::IsNotSupported;
1563 #endif
1564 #if ENABLE(MEDIA_STREAM)
1565     if (parameters.isMediaStream)
1566         return MediaPlayer::IsNotSupported;
1567 #endif
1568
1569     auto containerType = parameters.type.containerType();
1570     if (isUnsupportedMIMEType(containerType))
1571         return MediaPlayer::IsNotSupported;
1572
1573     if (!staticMIMETypeList().contains(containerType) && !AVAssetMIMETypeCache::singleton().canDecodeType(containerType))
1574         return MediaPlayer::IsNotSupported;
1575
1576     // The spec says:
1577     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1578     if (parameters.type.codecs().isEmpty())
1579         return MediaPlayer::MayBeSupported;
1580
1581     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1582         return MediaPlayer::IsNotSupported;
1583
1584     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1585     return [PAL::getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1586 }
1587
1588 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1589 {
1590 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1591     if (!keySystem.isEmpty()) {
1592         // "Clear Key" is only supported with HLS:
1593         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1594             return MediaPlayer::IsNotSupported;
1595
1596         if (!keySystemIsSupported(keySystem))
1597             return false;
1598
1599         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1600             return false;
1601
1602         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVAssetMIMETypeCache::singleton().canDecodeType(mimeType))
1603             return false;
1604
1605         return true;
1606     }
1607 #else
1608     UNUSED_PARAM(keySystem);
1609     UNUSED_PARAM(mimeType);
1610 #endif
1611     return false;
1612 }
1613
1614 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1615 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1616 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1617 {
1618     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1619         [infoRequest setContentLength:keyData->byteLength()];
1620         [infoRequest setByteRangeAccessSupported:YES];
1621     }
1622
1623     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1624         long long start = [dataRequest currentOffset];
1625         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1626
1627         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1628             [request finishLoadingWithError:nil];
1629             return;
1630         }
1631
1632         ASSERT(start <= std::numeric_limits<int>::max());
1633         ASSERT(end <= std::numeric_limits<int>::max());
1634         auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1635         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1636         [dataRequest respondWithData:nsData.get()];
1637     }
1638
1639     [request finishLoading];
1640 }
1641 #endif
1642
1643 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1644 {
1645     String scheme = [[[avRequest request] URL] scheme];
1646     String keyURI = [[[avRequest request] URL] absoluteString];
1647
1648 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1649     if (scheme == "skd") {
1650 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1651         // Create an initData with the following layout:
1652         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1653         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1654         auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1655         unsigned byteLength = initDataBuffer->byteLength();
1656         auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1657         initDataView->set<uint32_t>(0, keyURISize, true);
1658
1659         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1660         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1661
1662         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1663         if (!player()->keyNeeded(initData.ptr()))
1664             return false;
1665 #endif
1666
1667 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1668         if (m_cdmInstance) {
1669             avRequest.contentInformationRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
1670             [avRequest finishLoading];
1671             return true;
1672         }
1673
1674         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1675         m_keyID = SharedBuffer::create(keyURIData.get());
1676         player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1677         setWaitingForKey(true);
1678 #endif
1679         m_keyURIToRequestMap.set(keyURI, avRequest);
1680
1681         return true;
1682     }
1683
1684 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1685     if (scheme == "clearkey") {
1686         String keyID = [[[avRequest request] URL] resourceSpecifier];
1687         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1688
1689         auto initData = Uint8Array::create(encodedKeyId.size());
1690         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1691
1692         auto keyData = player()->cachedKeyForKeyId(keyID);
1693         if (keyData) {
1694             fulfillRequestWithKeyData(avRequest, keyData.get());
1695             return false;
1696         }
1697
1698         if (!player()->keyNeeded(initData.ptr()))
1699             return false;
1700
1701         m_keyURIToRequestMap.set(keyID, avRequest);
1702         return true;
1703     }
1704 #endif
1705 #endif
1706
1707     auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1708     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1709     resourceLoader->startLoading();
1710     return true;
1711 }
1712
1713 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1714 {
1715     String scheme = [[[avRequest request] URL] scheme];
1716
1717     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1718
1719     if (resourceLoader)
1720         resourceLoader->stopLoading();
1721 }
1722
1723 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1724 {
1725     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1726 }
1727 #endif
1728
1729 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1730 {
1731     return PAL::isAVFoundationFrameworkAvailable() && isCoreMediaFrameworkAvailable();
1732 }
1733
1734 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1735 {
1736     if (!metaDataAvailable())
1737         return timeValue;
1738
1739     // FIXME - impossible to implement until rdar://8721510 is fixed.
1740     return timeValue;
1741 }
1742
1743 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1744 {
1745     return 0;
1746 }
1747
1748 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1749 {
1750     if (!m_videoLayer)
1751         return;
1752
1753     // Do not attempt to change the video gravity while in full screen mode.
1754     // See setVideoFullscreenGravity().
1755     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1756         return;
1757
1758     [CATransaction begin];
1759     [CATransaction setDisableActions:YES];    
1760     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1761     [m_videoLayer.get() setVideoGravity:gravity];
1762     [CATransaction commit];
1763 }
1764
1765 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1766 {
1767     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1768         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1769     }];
1770     if (index == NSNotFound)
1771         return nil;
1772     return [tracks objectAtIndex:index];
1773 }
1774
1775 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1776 {
1777     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1778     m_languageOfPrimaryAudioTrack = String();
1779
1780     if (!m_avAsset)
1781         return;
1782
1783     setDelayCharacteristicsChangedNotification(true);
1784
1785     bool haveCCTrack = false;
1786     bool hasCaptions = false;
1787
1788     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1789     // asked about those fairly fequently.
1790     if (!m_avPlayerItem) {
1791         // We don't have a player item yet, so check with the asset because some assets support inspection
1792         // prior to becoming ready to play.
1793         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1794         setHasVideo(firstEnabledVideoTrack);
1795         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1796         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1797         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1798         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1799         if (size.width() < 0)
1800             size.setWidth(-size.width());
1801         if (size.height() < 0)
1802             size.setHeight(-size.height());
1803         presentationSizeDidChange(size);
1804     } else {
1805         bool hasVideo = false;
1806         bool hasAudio = false;
1807         bool hasMetaData = false;
1808         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1809             if ([track isEnabled]) {
1810                 AVAssetTrack *assetTrack = [track assetTrack];
1811                 NSString *mediaType = [assetTrack mediaType];
1812                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1813                     hasVideo = true;
1814                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1815                     hasAudio = true;
1816                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1817                     haveCCTrack = true;
1818                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1819                     hasMetaData = true;
1820                 }
1821             }
1822         }
1823
1824         updateAudioTracks();
1825         updateVideoTracks();
1826
1827         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1828         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1829
1830         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1831         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1832         // when it is not.
1833         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1834
1835         setHasAudio(hasAudio);
1836 #if ENABLE(DATACUE_VALUE)
1837         if (hasMetaData)
1838             processMetadataTrack();
1839 #endif
1840     }
1841
1842     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1843     if (legibleGroup && m_cachedTracks) {
1844         hasCaptions = [[PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1845         if (hasCaptions)
1846             processMediaSelectionOptions();
1847     }
1848
1849     setHasClosedCaptions(hasCaptions);
1850
1851     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1852
1853     sizeChanged();
1854
1855     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1856         characteristicsChanged();
1857
1858 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1859     if (m_provider)
1860         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1861 #endif
1862
1863     setDelayCharacteristicsChangedNotification(false);
1864 }
1865
1866 #if ENABLE(VIDEO_TRACK)
1867
1868 template <typename RefT, typename PassRefT>
1869 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1870 {
1871     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1872         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1873     }]]]);
1874     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1875
1876     for (auto& oldItem : oldItems) {
1877         if (oldItem->playerItemTrack())
1878             [oldTracks addObject:oldItem->playerItemTrack()];
1879     }
1880
1881     // Find the added & removed AVPlayerItemTracks:
1882     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1883     [removedTracks minusSet:newTracks.get()];
1884
1885     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1886     [addedTracks minusSet:oldTracks.get()];
1887
1888     typedef Vector<RefT> ItemVector;
1889     ItemVector replacementItems;
1890     ItemVector addedItems;
1891     ItemVector removedItems;
1892     for (auto& oldItem : oldItems) {
1893         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1894             removedItems.append(oldItem);
1895         else
1896             replacementItems.append(oldItem);
1897     }
1898
1899     for (AVPlayerItemTrack* track in addedTracks.get())
1900         addedItems.append(itemFactory(track));
1901
1902     replacementItems.appendVector(addedItems);
1903     oldItems.swap(replacementItems);
1904
1905     for (auto& removedItem : removedItems)
1906         (player->*removedFunction)(*removedItem);
1907
1908     for (auto& addedItem : addedItems)
1909         (player->*addedFunction)(*addedItem);
1910 }
1911
1912 template <typename RefT, typename PassRefT>
1913 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1914 {
1915     group->updateOptions(characteristics);
1916
1917     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1918     for (auto& option : group->options()) {
1919         if (!option)
1920             continue;
1921         AVMediaSelectionOption* avOption = option->avMediaSelectionOption();
1922         if (!avOption)
1923             continue;
1924         newSelectionOptions.add(option);
1925     }
1926
1927     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1928     for (auto& oldItem : oldItems) {
1929         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1930             oldSelectionOptions.add(option);
1931     }
1932
1933     // Find the added & removed AVMediaSelectionOptions:
1934     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1935     for (auto& oldOption : oldSelectionOptions) {
1936         if (!newSelectionOptions.contains(oldOption))
1937             removedSelectionOptions.add(oldOption);
1938     }
1939
1940     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1941     for (auto& newOption : newSelectionOptions) {
1942         if (!oldSelectionOptions.contains(newOption))
1943             addedSelectionOptions.add(newOption);
1944     }
1945
1946     typedef Vector<RefT> ItemVector;
1947     ItemVector replacementItems;
1948     ItemVector addedItems;
1949     ItemVector removedItems;
1950     for (auto& oldItem : oldItems) {
1951         if (!oldItem->mediaSelectionOption())
1952             removedItems.append(oldItem);
1953         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1954             removedItems.append(oldItem);
1955         else
1956             replacementItems.append(oldItem);
1957     }
1958
1959     for (auto& option : addedSelectionOptions)
1960         addedItems.append(itemFactory(*option.get()));
1961
1962     replacementItems.appendVector(addedItems);
1963     oldItems.swap(replacementItems);
1964     
1965     for (auto& removedItem : removedItems)
1966         (player->*removedFunction)(*removedItem);
1967
1968     for (auto& addedItem : addedItems)
1969         (player->*addedFunction)(*addedItem);
1970 }
1971
1972 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1973 {
1974 #if !RELEASE_LOG_DISABLED
1975     size_t count = m_audioTracks.size();
1976 #endif
1977
1978     Vector<String> characteristics = player()->preferredAudioCharacteristics();
1979     if (!m_audibleGroup) {
1980         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForAudibleMedia())
1981             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
1982     }
1983
1984     if (m_audibleGroup)
1985         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1986     else
1987         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1988
1989     for (auto& track : m_audioTracks)
1990         track->resetPropertiesFromTrack();
1991
1992 #if !RELEASE_LOG_DISABLED
1993     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
1994 #endif
1995 }
1996
1997 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1998 {
1999 #if !RELEASE_LOG_DISABLED
2000     size_t count = m_videoTracks.size();
2001 #endif
2002
2003     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2004
2005     if (!m_visualGroup) {
2006         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForVisualMedia())
2007             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2008     }
2009
2010     if (m_visualGroup)
2011         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2012
2013     for (auto& track : m_audioTracks)
2014         track->resetPropertiesFromTrack();
2015
2016 #if !RELEASE_LOG_DISABLED
2017     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2018 #endif
2019 }
2020
2021 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2022 {
2023     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2024 }
2025
2026 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2027 {
2028     m_videoFullscreenLayerManager->syncTextTrackBounds();
2029 }
2030
2031 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2032 {
2033     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2034 }
2035
2036 #endif // ENABLE(VIDEO_TRACK)
2037
2038 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2039
2040 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2041 {
2042     if (!m_provider) {
2043         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2044         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2045     }
2046     return m_provider.get();
2047 }
2048
2049 #endif
2050
2051 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2052 {
2053     if (!m_avAsset)
2054         return;
2055
2056     setNaturalSize(m_cachedPresentationSize);
2057 }
2058
2059 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2060 {
2061     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2062 }
2063
2064 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2065 {
2066     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2067     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2068         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2069         return false;
2070
2071     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2072     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2073         return session.didPassCORSAccessChecks;
2074
2075     return false;
2076 }
2077
2078 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2079 {
2080     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2081     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2082         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2083         return false;
2084
2085     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2086     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2087         return [session wouldTaintOrigin:origin];
2088
2089     return WTF::nullopt;
2090 }
2091
2092
2093 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2094
2095 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2096 {
2097     INFO_LOG(LOGIDENTIFIER);
2098
2099     if (!m_avPlayerItem || m_videoOutput)
2100         return;
2101
2102 #if USE(VIDEOTOOLBOX)
2103     NSDictionary* attributes = nil;
2104 #else
2105     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2106 #endif
2107     m_videoOutput = adoptNS([PAL::allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2108     ASSERT(m_videoOutput);
2109
2110     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2111
2112     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2113 }
2114
2115 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2116 {
2117     if (!m_videoOutput)
2118         return;
2119
2120     if (m_avPlayerItem)
2121         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2122
2123     INFO_LOG(LOGIDENTIFIER);
2124
2125     m_videoOutput = 0;
2126 }
2127
2128 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2129 {
2130     if (!m_avPlayerItem)
2131         return false;
2132
2133     if (!m_videoOutput)
2134         createVideoOutput();
2135     ASSERT(m_videoOutput);
2136
2137     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2138
2139     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2140         return false;
2141
2142     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2143     m_lastImage = nullptr;
2144     return true;
2145 }
2146
2147 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2148 {
2149     if (!m_avPlayerItem)
2150         return false;
2151
2152     if (m_lastImage)
2153         return true;
2154
2155     if (!m_videoOutput)
2156         createVideoOutput();
2157
2158     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2159 }
2160
2161 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2162 {
2163 #if HAVE(CORE_VIDEO)
2164     if (!m_avPlayerItem)
2165         return;
2166
2167     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2168         waitForVideoOutputMediaDataWillChange();
2169
2170     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2171     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2172     // should be displayed.
2173     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2174         return;
2175
2176     if (!m_pixelBufferConformer) {
2177 #if USE(VIDEOTOOLBOX)
2178         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2179 #else
2180         NSDictionary *attributes = nil;
2181 #endif
2182         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2183     }
2184
2185 #if !RELEASE_LOG_DISABLED
2186     MonotonicTime start = MonotonicTime::now();
2187 #endif
2188
2189     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2190
2191 #if !RELEASE_LOG_DISABLED
2192     INFO_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2193 #endif
2194 #endif // HAVE(CORE_VIDEO)
2195 }
2196
2197 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2198 {
2199     updateLastImage(UpdateType::UpdateSynchronously);
2200     if (!m_lastImage)
2201         return;
2202
2203     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2204     if (!firstEnabledVideoTrack)
2205         return;
2206
2207     INFO_LOG(LOGIDENTIFIER);
2208
2209     GraphicsContextStateSaver stateSaver(context);
2210     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2211     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2212     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2213
2214     context.concatCTM(videoTransform);
2215     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2216
2217     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2218     // video frame, destroy it now that it is no longer needed.
2219     if (m_imageGenerator)
2220         destroyImageGenerator();
2221
2222 }
2223
2224 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2225 {
2226     ASSERT(context);
2227
2228     updateLastPixelBuffer();
2229     if (!m_lastPixelBuffer)
2230         return false;
2231
2232     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2233     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2234
2235     if (!m_videoTextureCopier)
2236         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2237
2238     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2239 }
2240
2241 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2242 {
2243     updateLastImage();
2244     return m_lastImage;
2245 }
2246
2247 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2248 {
2249     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2250
2251     // Wait for 1 second.
2252     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2253     if (!satisfied)
2254         ERROR_LOG(LOGIDENTIFIER, "timed out");
2255 }
2256
2257 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput *)
2258 {
2259     m_videoOutputSemaphore.signal();
2260 }
2261
2262 #endif
2263
2264 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2265
2266 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2267 {
2268     return m_keyURIToRequestMap.take(keyURI);
2269 }
2270
2271 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2272 {
2273     Vector<String> fulfilledKeyIds;
2274
2275     for (auto& pair : m_keyURIToRequestMap) {
2276         const String& keyId = pair.key;
2277         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2278
2279         auto keyData = player()->cachedKeyForKeyId(keyId);
2280         if (!keyData)
2281             continue;
2282
2283         fulfillRequestWithKeyData(request.get(), keyData.get());
2284         fulfilledKeyIds.append(keyId);
2285     }
2286
2287     for (auto& keyId : fulfilledKeyIds)
2288         m_keyURIToRequestMap.remove(keyId);
2289 }
2290
2291 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2292 {
2293     ASSERT_UNUSED(session, &session == m_session);
2294     m_session = nullptr;
2295 }
2296
2297 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2298 {
2299     if (!keySystemIsSupported(keySystem))
2300         return nullptr;
2301     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2302     m_session = makeWeakPtr(*session);
2303     return WTFMove(session);
2304 }
2305 #endif
2306
2307 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2308 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2309 {
2310 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2311     if (m_session && newValue)
2312         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2313 #endif
2314
2315 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2316     if (m_cdmInstance)
2317         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2318 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2319     UNUSED_PARAM(newValue);
2320 #endif
2321 }
2322 #endif
2323
2324 #if ENABLE(ENCRYPTED_MEDIA)
2325 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2326 {
2327 #if HAVE(AVCONTENTKEYSESSION)
2328     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2329         return;
2330
2331     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2332     if (&fpsInstance == m_cdmInstance)
2333         return;
2334
2335     if (m_cdmInstance)
2336         cdmInstanceDetached(*m_cdmInstance);
2337
2338     m_cdmInstance = &fpsInstance;
2339 #else
2340     UNUSED_PARAM(instance);
2341 #endif
2342 }
2343
2344 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2345 {
2346 #if HAVE(AVCONTENTKEYSESSION)
2347     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2348     m_cdmInstance = nullptr;
2349 #else
2350     UNUSED_PARAM(instance);
2351 #endif
2352 }
2353
2354 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2355 {
2356 #if HAVE(AVCONTENTKEYSESSION)
2357     if (!m_keyID || !m_cdmInstance)
2358         return;
2359
2360     auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2361     if (!instanceSession)
2362         return;
2363
2364     [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2365
2366     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2367     for (auto& request : keyURIToRequestMap.values()) {
2368         if (auto *infoRequest = request.get().contentInformationRequest)
2369             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2370         [request finishLoading];
2371     }
2372     setWaitingForKey(false);
2373 #endif
2374 }
2375
2376 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2377 {
2378     if (m_waitingForKey == waitingForKey)
2379         return;
2380
2381     m_waitingForKey = waitingForKey;
2382     player()->waitingForKeyChanged();
2383 }
2384 #endif
2385
2386 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2387 {
2388     if (!m_avAsset)
2389         return nil;
2390
2391     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2392         return nil;
2393
2394     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2395 }
2396
2397 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2398 {
2399     if (!m_avAsset)
2400         return false;
2401
2402     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2403         return false;
2404
2405     return true;
2406 }
2407
2408 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2409 {
2410     if (!hasLoadedMediaSelectionGroups())
2411         return nil;
2412
2413     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2414 }
2415
2416 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2417 {
2418     if (!hasLoadedMediaSelectionGroups())
2419         return nil;
2420
2421     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2422 }
2423
2424 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2425 {
2426     if (!hasLoadedMediaSelectionGroups())
2427         return nil;
2428
2429     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2430 }
2431
2432 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2433 {
2434     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2435     if (!legibleGroup) {
2436         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2437         return;
2438     }
2439
2440     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2441     // but set the selected legible track to nil so text tracks will not be automatically configured.
2442     if (!m_textTracks.size())
2443         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2444
2445     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2446     NSArray *legibleOptions = [PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2447     for (AVMediaSelectionOption *option in legibleOptions) {
2448         bool newTrack = true;
2449         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2450             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2451                 continue;
2452             
2453             RetainPtr<AVMediaSelectionOption> currentOption;
2454 #if ENABLE(AVF_CAPTIONS)
2455             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2456                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2457                 currentOption = track->mediaSelectionOption();
2458             } else
2459 #endif
2460             {
2461                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2462                 currentOption = track->mediaSelectionOption();
2463             }
2464             
2465             if ([currentOption.get() isEqual:option]) {
2466                 removedTextTracks.remove(i - 1);
2467                 newTrack = false;
2468                 break;
2469             }
2470         }
2471         if (!newTrack)
2472             continue;
2473
2474 #if ENABLE(AVF_CAPTIONS)
2475         if ([option outOfBandSource]) {
2476             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2477             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2478             continue;
2479         }
2480 #endif
2481
2482         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2483     }
2484
2485     processNewAndRemovedTextTracks(removedTextTracks);
2486 }
2487
2488 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2489 {
2490     if (m_metadataTrack)
2491         return;
2492
2493     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2494     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2495     player()->addTextTrack(*m_metadataTrack);
2496 }
2497
2498 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2499 {
2500     ASSERT(time >= MediaTime::zeroTime());
2501
2502     if (!m_currentTextTrack)
2503         return;
2504
2505     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2506 }
2507
2508 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2509 {
2510     INFO_LOG(LOGIDENTIFIER);
2511
2512     if (!m_currentTextTrack)
2513         return;
2514     
2515     m_currentTextTrack->resetCueValues();
2516 }
2517
2518 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2519 {
2520     if (m_currentTextTrack == track)
2521         return;
2522
2523     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2524
2525     m_currentTextTrack = track;
2526
2527     if (track) {
2528         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2529             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2530             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2531             ALLOW_DEPRECATED_DECLARATIONS_END
2532 #if ENABLE(AVF_CAPTIONS)
2533         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2534             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2535 #endif
2536         else
2537             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2538     } else {
2539         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2540         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2541         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2542         ALLOW_DEPRECATED_DECLARATIONS_END
2543     }
2544
2545 }
2546
2547 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2548 {
2549     if (!m_languageOfPrimaryAudioTrack.isNull())
2550         return m_languageOfPrimaryAudioTrack;
2551
2552     if (!m_avPlayerItem.get())
2553         return emptyString();
2554
2555     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2556     AVMediaSelectionGroup *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2557     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2558     AVMediaSelectionOption *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2559     ALLOW_DEPRECATED_DECLARATIONS_END
2560     if (currentlySelectedAudibleOption) {
2561         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2562         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2563
2564         return m_languageOfPrimaryAudioTrack;
2565     }
2566
2567     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2568     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2569     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2570     if (!tracks || [tracks count] != 1) {
2571         m_languageOfPrimaryAudioTrack = emptyString();
2572         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2573         return m_languageOfPrimaryAudioTrack;
2574     }
2575
2576     AVAssetTrack *track = [tracks objectAtIndex:0];
2577     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2578
2579     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2580
2581     return m_languageOfPrimaryAudioTrack;
2582 }
2583
2584 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2585 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2586 {
2587     bool wirelessTarget = false;
2588
2589 #if !PLATFORM(IOS_FAMILY)
2590     if (m_playbackTarget) {
2591         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2592             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2593         else
2594             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2595     }
2596 #else
2597     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2598 #endif
2599
2600     INFO_LOG(LOGIDENTIFIER, wirelessTarget);
2601
2602     return wirelessTarget;
2603 }
2604
2605 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2606 {
2607     if (!m_avPlayer)
2608         return MediaPlayer::TargetTypeNone;
2609
2610 #if PLATFORM(IOS_FAMILY)
2611     if (!PAL::isAVFoundationFrameworkAvailable())
2612         return MediaPlayer::TargetTypeNone;
2613
2614     switch ([m_avPlayer externalPlaybackType]) {
2615     case AVPlayerExternalPlaybackTypeNone:
2616         return MediaPlayer::TargetTypeNone;
2617     case AVPlayerExternalPlaybackTypeAirPlay:
2618         return MediaPlayer::TargetTypeAirPlay;
2619     case AVPlayerExternalPlaybackTypeTVOut:
2620         return MediaPlayer::TargetTypeTVOut;
2621     }
2622
2623     ASSERT_NOT_REACHED();
2624     return MediaPlayer::TargetTypeNone;
2625
2626 #else
2627     return MediaPlayer::TargetTypeAirPlay;
2628 #endif
2629 }
2630     
2631 #if PLATFORM(IOS_FAMILY)
2632 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayer *player)
2633 {
2634 #if HAVE(CELESTIAL)
2635     if (!PAL::isAVFoundationFrameworkAvailable())
2636         return nil;
2637
2638     if ([PAL::getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2639         AVOutputContext *outputContext = [PAL::getAVOutputContextClass() sharedAudioPresentationOutputContext];
2640
2641         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2642             || ![outputContext supportsMultipleOutputDevices]
2643             || ![outputContext respondsToSelector:@selector(outputDevices)])
2644             return [outputContext deviceName];
2645
2646         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2647         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2648 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2649             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2650 ALLOW_DEPRECATED_DECLARATIONS_END
2651             [outputDeviceNames addObject:outputDeviceName.get()];
2652         }
2653
2654         return [outputDeviceNames componentsJoinedByString:@" + "];
2655     }
2656
2657     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2658         return nil;
2659
2660     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2661     if (!pickableRoutes.count)
2662         return nil;
2663
2664     NSString *displayName = nil;
2665     for (NSDictionary *pickableRoute in pickableRoutes) {
2666         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2667             continue;
2668
2669         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2670
2671         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2672         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2673             break;
2674
2675         // The route is a speaker or HDMI out, override the name to be the localized device model.
2676         NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2677
2678         // In cases where a route with that name already exists, prefix the name with the model.
2679         BOOL includeLocalizedDeviceModelName = NO;
2680         for (NSDictionary *otherRoute in pickableRoutes) {
2681             if (otherRoute == pickableRoute)
2682                 continue;
2683
2684             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2685                 includeLocalizedDeviceModelName = YES;
2686                 break;
2687             }
2688         }
2689
2690         if (includeLocalizedDeviceModelName)
2691             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2692         else
2693             displayName = localizedDeviceModel;
2694
2695         break;
2696     }
2697
2698     return displayName;
2699 #else
2700     UNUSED_PARAM(player);
2701     return nil;
2702 #endif
2703 }
2704 #endif
2705
2706 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2707 {
2708     if (!m_avPlayer)
2709         return emptyString();
2710
2711     String wirelessTargetName;
2712 #if !PLATFORM(IOS_FAMILY)
2713     if (m_playbackTarget)
2714         wirelessTargetName = m_playbackTarget->deviceName();
2715 #else
2716     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2717 #endif
2718
2719     return wirelessTargetName;
2720 }
2721
2722 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2723 {
2724     if (!m_avPlayer)
2725         return !m_allowsWirelessVideoPlayback;
2726
2727     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2728     INFO_LOG(LOGIDENTIFIER, !m_allowsWirelessVideoPlayback);
2729
2730     return !m_allowsWirelessVideoPlayback;
2731 }
2732
2733 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2734 {
2735     INFO_LOG(LOGIDENTIFIER, disabled);
2736     m_allowsWirelessVideoPlayback = !disabled;
2737     if (!m_avPlayer)
2738         return;
2739
2740     setDelayCallbacks(true);
2741     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2742     setDelayCallbacks(false);
2743 }
2744
2745 #if !PLATFORM(IOS_FAMILY)
2746
2747 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2748 {
2749     m_playbackTarget = WTFMove(target);
2750
2751     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2752
2753     INFO_LOG(LOGIDENTIFIER);
2754
2755     if (!m_playbackTarget->hasActiveRoute())
2756         setShouldPlayToPlaybackTarget(false);
2757 }
2758
2759 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2760 {
2761     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2762         return;
2763
2764     m_shouldPlayToPlaybackTarget = shouldPlay;
2765
2766     if (!m_playbackTarget)
2767         return;
2768
2769     INFO_LOG(LOGIDENTIFIER, shouldPlay);
2770
2771     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2772         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2773
2774         if (!m_avPlayer)
2775             return;
2776
2777         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2778         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2779             return;
2780
2781         setDelayCallbacks(true);
2782         m_avPlayer.get().outputContext = newContext;
2783         setDelayCallbacks(false);
2784
2785         return;
2786     }
2787
2788     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2789
2790     setDelayCallbacks(true);
2791     auto weakThis = makeWeakPtr(*this);
2792     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2793         if (!weakThis)
2794             return;
2795         weakThis->playbackTargetIsWirelessDidChange();
2796     }));
2797     setDelayCallbacks(false);
2798 }
2799
2800 #endif // !PLATFORM(IOS_FAMILY)
2801
2802 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2803 {
2804 #if PLATFORM(IOS_FAMILY)
2805     if (!m_avPlayer)
2806         return;
2807
2808     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2809         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
2810 #endif
2811 }
2812
2813 #endif
2814
2815 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2816 {
2817     m_cachedItemStatus = status;
2818
2819     updateStates();
2820 }
2821
2822 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2823 {
2824     m_pendingStatusChanges++;
2825 }
2826
2827 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2828 {
2829     m_cachedLikelyToKeepUp = likelyToKeepUp;
2830
2831     ASSERT(m_pendingStatusChanges);
2832     if (!--m_pendingStatusChanges)
2833         updateStates();
2834 }
2835
2836 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2837 {
2838     m_pendingStatusChanges++;
2839 }
2840
2841 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2842 {
2843     m_cachedBufferEmpty = bufferEmpty;
2844
2845     ASSERT(m_pendingStatusChanges);
2846     if (!--m_pendingStatusChanges)
2847         updateStates();
2848 }
2849
2850 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2851 {
2852     m_pendingStatusChanges++;
2853 }
2854
2855 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2856 {
2857     m_cachedBufferFull = bufferFull;
2858
2859     ASSERT(m_pendingStatusChanges);
2860     if (!--m_pendingStatusChanges)
2861         updateStates();
2862 }
2863
2864 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray>&& seekableRanges)
2865 {
2866     m_cachedSeekableRanges = WTFMove(seekableRanges);
2867
2868     seekableTimeRangesChanged();
2869     updateStates();
2870 }
2871
2872 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray>&& loadedRanges)
2873 {
2874     m_cachedLoadedRanges = WTFMove(loadedRanges);
2875
2876     loadedTimeRangesChanged();
2877     updateStates();
2878 }
2879
2880 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2881 {
2882     m_cachedIsReadyForDisplay = isReady;
2883     if (!hasVideo() && isReady)
2884         tracksChanged();
2885     updateStates();
2886 }
2887
2888 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2889 {
2890     tracksChanged();
2891     updateStates();
2892 }
2893
2894 void MediaPlayerPrivateAVFoundationObjC::setBufferingPolicy(MediaPlayer::BufferingPolicy policy)
2895 {
2896     ALWAYS_LOG(LOGIDENTIFIER, policy);
2897
2898     if (m_bufferingPolicy == policy)
2899         return;
2900
2901     m_bufferingPolicy = policy;
2902     
2903     if (!m_avPlayer)
2904         return;
2905
2906 #if HAVE(AVPLAYER_RESOURCE_CONSERVATION_LEVEL)
2907     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::Default) == AVPlayerResourceConservationLevelNone, "MediaPlayer::BufferingPolicy::Default is not AVPlayerResourceConservationLevelNone as expected");
2908     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::LimitReadAhead) == AVPlayerResourceConservationLevelReduceReadAhead, "MediaPlayer::BufferingPolicy::LimitReadAhead is not AVPlayerResourceConservationLevelReduceReadAhead as expected");
2909     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::MakeResourcesPurgeable) == AVPlayerResourceConservationLevelReuseActivePlayerResources, "MediaPlayer::BufferingPolicy::MakeResourcesPurgeable is not AVPlayerResourceConservationLevelReuseActivePlayerResources as expected");
2910     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::PurgeResources) == AVPlayerResourceConservationLevelRecycleBuffer, "MediaPlayer::BufferingPolicy::PurgeResources is not AVPlayerResourceConservationLevelRecycleBuffer as expected");
2911
2912     if ([m_avPlayer respondsToSelector:@selector(setResourceConservationLevelWhilePaused:)]) {
2913         m_avPlayer.get().resourceConservationLevelWhilePaused = static_cast<AVPlayerResourceConservationLevel>(policy);
2914         updateStates();
2915         return;
2916     }
2917 #endif
2918
2919     switch (policy) {
2920     case MediaPlayer::BufferingPolicy::Default:
2921         setAVPlayerItem(m_avPlayerItem.get());
2922         break;
2923     case MediaPlayer::BufferingPolicy::LimitReadAhead:
2924     case MediaPlayer::BufferingPolicy::MakeResourcesPurgeable:
2925         setAVPlayerItem(nil);
2926         break;
2927     case MediaPlayer::BufferingPolicy::PurgeResources:
2928         setAVPlayerItem(nil);
2929         setAVPlayerItem(m_avPlayerItem.get());
2930         break;
2931     }
2932
2933     updateStates();
2934 }
2935
2936 #if ENABLE(DATACUE_VALUE)
2937
2938 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2939 {
2940     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2941     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2942     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2943     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2944     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2945
2946     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2947         return quickTimeUserData;
2948     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2949         return isoUserData;
2950     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2951         return quickTimeMetadata;
2952     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2953         return iTunesMetadata;
2954     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2955         return id3Metadata;
2956
2957     return emptyAtom();
2958 }
2959
2960 #endif
2961
2962 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(const RetainPtr<NSArray>& metadata, const MediaTime& mediaTime)
2963 {
2964     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2965
2966     INFO_LOG(LOGIDENTIFIER, "adding ", m_currentMetaData ? [m_currentMetaData.get() count] : 0, " at time ", mediaTime);
2967
2968 #if ENABLE(DATACUE_VALUE)
2969     if (seeking())
2970         return;
2971
2972     if (!m_metadataTrack)
2973         processMetadataTrack();
2974
2975     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2976         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2977         return;
2978     }
2979
2980     // Set the duration of all incomplete cues before adding new ones.
2981     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2982     for (AVMetadataItem *item in m_currentMetaData.get()) {
2983         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
2984         if (start < earliestStartTime)
2985             earliestStartTime = start;
2986     }
2987     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2988
2989     for (AVMetadataItem *item in m_currentMetaData.get()) {
2990         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
2991         MediaTime end = MediaTime::positiveInfiniteTime();
2992         if (CMTIME_IS_VALID(item.duration))
2993             end = start + PAL::toMediaTime(item.duration);
2994
2995         AtomicString type = nullAtom();
2996         if (item.keySpace)
2997             type = metadataType(item.keySpace);
2998
2999         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3000     }
3001 #endif
3002 }
3003
3004 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(const RetainPtr<NSArray>& tracks)
3005 {
3006     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3007         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3008
3009     NSArray *assetTracks = [m_avAsset tracks];
3010
3011     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3012         AVAssetTrack* assetTrack = [obj assetTrack];
3013
3014         if ([assetTracks containsObject:assetTrack])
3015             return YES;
3016
3017         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3018         if (!hasLoadedMediaSelectionGroups())
3019             return NO;
3020
3021         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3022             return NO;
3023
3024         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3025             return NO;
3026
3027         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3028             return NO;
3029
3030         return YES;
3031     }]];
3032
3033     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3034         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3035
3036     m_cachedTotalBytes = 0;
3037
3038     tracksChanged();
3039     updateStates();
3040 }
3041
3042 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3043 {
3044     m_cachedHasEnabledAudio = hasEnabledAudio;
3045
3046     tracksChanged();
3047     updateStates();
3048 }
3049
3050 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3051 {
3052     m_cachedPresentationSize = size;
3053
3054     sizeChanged();
3055     updateStates();
3056 }
3057
3058 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3059 {
3060     m_cachedDuration = duration;
3061
3062     invalidateCachedDuration();