[JSC] Int52Rep(DoubleRepAnyIntUse) should not call operation function
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetMIMETypeCache.h"
32 #import "AVAssetTrackUtilities.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "ImageRotationSessionVT.h"
47 #import "InbandMetadataTextTrackPrivateAVF.h"
48 #import "InbandTextTrackPrivateAVFObjC.h"
49 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
50 #import "Logging.h"
51 #import "MediaPlaybackTargetMac.h"
52 #import "MediaPlaybackTargetMock.h"
53 #import "MediaSelectionGroupAVFObjC.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "SecurityOrigin.h"
58 #import "SerializedPlatformRepresentationMac.h"
59 #import "SharedBuffer.h"
60 #import "TextEncoding.h"
61 #import "TextTrackRepresentation.h"
62 #import "TextureCacheCV.h"
63 #import "VideoFullscreenLayerManagerObjC.h"
64 #import "VideoTextureCopierCV.h"
65 #import "VideoTrackPrivateAVFObjC.h"
66 #import "WebCoreAVFResourceLoader.h"
67 #import "WebCoreCALayerExtras.h"
68 #import "WebCoreNSURLSession.h"
69 #import <JavaScriptCore/DataView.h>
70 #import <JavaScriptCore/JSCInlines.h>
71 #import <JavaScriptCore/TypedArrayInlines.h>
72 #import <JavaScriptCore/Uint16Array.h>
73 #import <JavaScriptCore/Uint32Array.h>
74 #import <JavaScriptCore/Uint8Array.h>
75 #import <functional>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/URL.h>
85 #import <wtf/text/CString.h>
86
87 #if ENABLE(AVF_CAPTIONS)
88 #include "TextTrack.h"
89 #endif
90
91 #import <AVFoundation/AVAssetImageGenerator.h>
92 #import <AVFoundation/AVAssetTrack.h>
93 #import <AVFoundation/AVMediaSelectionGroup.h>
94 #import <AVFoundation/AVMetadataItem.h>
95 #import <AVFoundation/AVPlayer.h>
96 #import <AVFoundation/AVPlayerItem.h>
97 #import <AVFoundation/AVPlayerItemOutput.h>
98 #import <AVFoundation/AVPlayerItemTrack.h>
99 #import <AVFoundation/AVPlayerLayer.h>
100 #import <AVFoundation/AVTime.h>
101
102 #if PLATFORM(IOS_FAMILY)
103 #import "WAKAppKitStubs.h"
104 #import <CoreImage/CoreImage.h>
105 #import <UIKit/UIDevice.h>
106 #import <mach/mach_port.h>
107 #import <pal/ios/UIKitSoftLink.h>
108 #else
109 #import <Foundation/NSGeometry.h>
110 #import <QuartzCore/CoreImage.h>
111 #endif
112
113 #if USE(VIDEOTOOLBOX)
114 #import <CoreVideo/CoreVideo.h>
115 #import <VideoToolbox/VideoToolbox.h>
116 #endif
117
118 #import "CoreVideoSoftLink.h"
119 #import "MediaRemoteSoftLink.h"
120
121 namespace std {
122 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
123     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
124 };
125 }
126
127 #if ENABLE(AVF_CAPTIONS)
128 // Note: This must be defined before our SOFT_LINK macros:
129 @class AVMediaSelectionOption;
130 @interface AVMediaSelectionOption (OutOfBandExtensions)
131 @property (nonatomic, readonly) NSString* outOfBandSource;
132 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
133 @end
134 #endif
135
136 @interface AVURLAsset (WebKitExtensions)
137 @property (nonatomic, readonly) NSURL *resolvedURL;
138 @end
139
140 #import <pal/cf/CoreMediaSoftLink.h>
141 #import <pal/cocoa/AVFoundationSoftLink.h>
142
143 SOFT_LINK_FRAMEWORK(MediaToolbox)
144 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
145
146 #if PLATFORM(IOS_FAMILY)
147
148 #if HAVE(CELESTIAL)
149 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
150 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
151 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
152 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
153 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
154 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
155 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
156 #endif // HAVE(CELESTIAL)
157
158 #endif // PLATFORM(IOS_FAMILY)
159
160 using namespace WebCore;
161
162 enum MediaPlayerAVFoundationObservationContext {
163     MediaPlayerAVFoundationObservationContextPlayerItem,
164     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
165     MediaPlayerAVFoundationObservationContextPlayer,
166     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
167 };
168
169 static void ensureOnMainThread(Function<void()>&& f)
170 {
171     if (isMainThread())
172         f();
173     else
174         callOnMainThread(WTFMove(f));
175 }
176
177 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
178 {
179     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
180     GenericTaskQueue<Timer> m_taskQueue;
181     int m_delayCallbacks;
182 }
183 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
184 -(void)disconnect;
185 -(void)metadataLoaded;
186 -(void)didEnd:(NSNotification *)notification;
187 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
188 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
189 - (void)outputSequenceWasFlushed:(id)output;
190 @end
191
192 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
193 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
194     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
195     GenericTaskQueue<Timer> m_taskQueue;
196 }
197 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
198 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
199 @end
200 #endif
201
202 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
203 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
204     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
205 }
206 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
207 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
208 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
209 @end
210 #endif
211
212 namespace WebCore {
213 static String convertEnumerationToString(AVPlayerTimeControlStatus enumerationValue)
214 {
215     static const NeverDestroyed<String> values[] = {
216         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusPaused"),
217         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"),
218         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusPlaying"),
219     };
220     static_assert(!static_cast<size_t>(AVPlayerTimeControlStatusPaused), "AVPlayerTimeControlStatusPaused is not 0 as expected");
221     static_assert(static_cast<size_t>(AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) == 1, "AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate is not 1 as expected");
222     static_assert(static_cast<size_t>(AVPlayerTimeControlStatusPlaying) == 2, "AVPlayerTimeControlStatusPlaying is not 2 as expected");
223     ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
224     return values[static_cast<size_t>(enumerationValue)];
225 }
226 }
227
228 namespace WTF {
229 template<typename Type>
230 struct LogArgument;
231
232 template <>
233 struct LogArgument<AVPlayerTimeControlStatus> {
234     static String toString(const AVPlayerTimeControlStatus status)
235     {
236         return convertEnumerationToString(status);
237     }
238 };
239 }; // namespace WTF
240
241 namespace WebCore {
242 using namespace PAL;
243
244 static NSArray *assetMetadataKeyNames();
245 static NSArray *itemKVOProperties();
246 static NSArray *assetTrackMetadataKeyNames();
247 static NSArray *playerKVOProperties();
248 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
249
250 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
251 static dispatch_queue_t globalLoaderDelegateQueue()
252 {
253     static dispatch_queue_t globalQueue;
254     static dispatch_once_t onceToken;
255     dispatch_once(&onceToken, ^{
256         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
257     });
258     return globalQueue;
259 }
260 #endif
261
262 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
263 static dispatch_queue_t globalPullDelegateQueue()
264 {
265     static dispatch_queue_t globalQueue;
266     static dispatch_once_t onceToken;
267     dispatch_once(&onceToken, ^{
268         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
269     });
270     return globalQueue;
271 }
272 #endif
273
274 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
275 {
276     if (!isAvailable())
277         return;
278
279     registrar([](MediaPlayer* player) { return makeUnique<MediaPlayerPrivateAVFoundationObjC>(player); },
280             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
281     ASSERT(AVAssetMIMETypeCache::singleton().isAvailable());
282 }
283
284 static AVAssetCache *assetCacheForPath(const String& path)
285 {
286     NSURL *assetCacheURL;
287     
288     if (path.isEmpty())
289         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
290     else
291         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
292
293     return [PAL::getAVAssetCacheClass() assetCacheWithURL:assetCacheURL];
294 }
295
296 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
297 {
298     HashSet<RefPtr<SecurityOrigin>> origins;
299     for (NSString *key in [assetCacheForPath(path) allKeys]) {
300         URL keyAsURL = URL(URL(), key);
301         if (keyAsURL.isValid())
302             origins.add(SecurityOrigin::create(keyAsURL));
303     }
304     return origins;
305 }
306
307 static WallTime toSystemClockTime(NSDate *date)
308 {
309     ASSERT(date);
310     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
311 }
312
313 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
314 {
315     AVAssetCache* assetCache = assetCacheForPath(path);
316     
317     for (NSString *key in [assetCache allKeys]) {
318         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
319             [assetCache removeEntryForKey:key];
320     }
321
322     NSFileManager *fileManager = [NSFileManager defaultManager];
323     NSURL *baseURL = [assetCache URL];
324
325     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
326         [fileManager removeItemAtURL:baseURL error:nil];
327         return;
328     }
329     
330     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
331     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
332         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
333         errorHandler:nil];
334     
335     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
336     for (NSURL *fileURL : enumerator) {
337         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
338     
339         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
340             continue;
341         
342         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
343             continue;
344         
345         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
346             continue;
347         
348         [urlsToDelete addObject:fileURL];
349     }
350     
351     for (NSURL *fileURL in urlsToDelete.get())
352         [fileManager removeItemAtURL:fileURL error:nil];
353 }
354
355 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
356 {
357     AVAssetCache* assetCache = assetCacheForPath(path);
358     for (NSString *key in [assetCache allKeys]) {
359         URL keyAsURL = URL(URL(), key);
360         if (keyAsURL.isValid()) {
361             if (origins.contains(SecurityOrigin::create(keyAsURL)))
362                 [assetCache removeEntryForKey:key];
363         }
364     }
365 }
366
367 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
368     : MediaPlayerPrivateAVFoundation(player)
369     , m_videoFullscreenLayerManager(makeUnique<VideoFullscreenLayerManagerObjC>())
370     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
371     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:makeWeakPtr(*this)]))
372     , m_videoFrameHasDrawn(false)
373     , m_haveCheckedPlayability(false)
374 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
375     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:makeWeakPtr(*this)]))
376 #endif
377 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
378     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:makeWeakPtr(*this)]))
379 #endif
380     , m_currentTextTrack(0)
381     , m_cachedRate(0)
382     , m_cachedTotalBytes(0)
383     , m_pendingStatusChanges(0)
384     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
385     , m_cachedLikelyToKeepUp(false)
386     , m_cachedBufferEmpty(false)
387     , m_cachedBufferFull(false)
388     , m_cachedHasEnabledAudio(false)
389     , m_cachedIsReadyForDisplay(false)
390     , m_haveBeenAskedToCreateLayer(false)
391 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
392     , m_allowsWirelessVideoPlayback(true)
393 #endif
394 {
395 }
396
397 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
398 {
399     weakPtrFactory().revokeAll();
400
401 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
402     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
403
404     for (auto& pair : m_resourceLoaderMap)
405         pair.value->invalidate();
406 #endif
407 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
408     [m_videoOutput setDelegate:nil queue:0];
409 #endif
410
411     if (m_videoLayer)
412         destroyVideoLayer();
413
414     cancelLoad();
415 }
416
417 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
418 {
419     ALWAYS_LOG(LOGIDENTIFIER);
420     tearDownVideoRendering();
421
422     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
423     [m_objcObserver.get() disconnect];
424
425     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
426     setIgnoreLoadStateChanges(true);
427     if (m_avAsset) {
428         [m_avAsset.get() cancelLoading];
429         m_avAsset = nil;
430     }
431
432     clearTextTracks();
433
434     if (m_legibleOutput) {
435         if (m_avPlayerItem)
436             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
437         m_legibleOutput = nil;
438     }
439
440     if (m_avPlayerItem) {
441         for (NSString *keyName in itemKVOProperties())
442             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
443         
444         m_avPlayerItem = nil;
445     }
446     if (m_avPlayer) {
447         if (m_timeObserver)
448             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
449         m_timeObserver = nil;
450
451         for (NSString *keyName in playerKVOProperties())
452             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
453
454         setShouldObserveTimeControlStatus(false);
455
456         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
457 #if !PLATFORM(IOS_FAMILY)
458         [m_avPlayer setOutputContext:nil];
459 #endif
460         m_avPlayer = nil;
461     }
462
463     // Reset cached properties
464     m_pendingStatusChanges = 0;
465     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
466     m_cachedSeekableRanges = nullptr;
467     m_cachedLoadedRanges = nullptr;
468     m_cachedHasEnabledAudio = false;
469     m_cachedPresentationSize = FloatSize();
470     m_cachedDuration = MediaTime::zeroTime();
471
472     for (AVPlayerItemTrack *track in m_cachedTracks.get())
473         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
474     m_cachedTracks = nullptr;
475
476 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
477     if (m_provider) {
478         m_provider->setPlayerItem(nullptr);
479         m_provider->setAudioTrack(nullptr);
480     }
481 #endif
482
483     setIgnoreLoadStateChanges(false);
484 }
485
486 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
487 {
488     return m_haveBeenAskedToCreateLayer;
489 }
490
491 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
492 {
493 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
494     if (m_videoOutput)
495         return true;
496 #endif
497     return m_imageGenerator;
498 }
499
500 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
501 {
502 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
503     createVideoOutput();
504 #else
505     createImageGenerator();
506 #endif
507 }
508
509 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
510 {
511     using namespace PAL;
512     INFO_LOG(LOGIDENTIFIER);
513
514     if (!m_avAsset || m_imageGenerator)
515         return;
516
517     m_imageGenerator = [PAL::getAVAssetImageGeneratorClass() assetImageGeneratorWithAsset:m_avAsset.get()];
518
519     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
520     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
521     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
522     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
523 }
524
525 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
526 {
527 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
528     destroyVideoOutput();
529 #endif
530     destroyImageGenerator();
531 }
532
533 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
534 {
535     if (!m_imageGenerator)
536         return;
537
538     INFO_LOG(LOGIDENTIFIER);
539
540     m_imageGenerator = 0;
541 }
542
543 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
544 {
545     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
546         return;
547
548     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
549         if (!weakThis)
550             return;
551
552         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
553             return;
554         m_haveBeenAskedToCreateLayer = true;
555
556         if (!m_videoLayer)
557             createAVPlayerLayer();
558
559 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
560         if (!m_videoOutput)
561             createVideoOutput();
562 #endif
563
564         player()->client().mediaPlayerRenderingModeChanged(player());
565     });
566 }
567
568 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
569 {
570     if (!m_avPlayer)
571         return;
572
573     m_videoLayer = adoptNS([PAL::allocAVPlayerLayerInstance() init]);
574     [m_videoLayer setPlayer:m_avPlayer.get()];
575
576 #ifndef NDEBUG
577     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
578 #endif
579     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
580     updateVideoLayerGravity();
581     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
582     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
583     ALWAYS_LOG(LOGIDENTIFIER);
584
585     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
586
587 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
588     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
589         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
590 #endif
591 }
592
593 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
594 {
595     if (!m_videoLayer)
596         return;
597
598     ALWAYS_LOG(LOGIDENTIFIER);
599
600     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
601     [m_videoLayer setPlayer:nil];
602     m_videoFullscreenLayerManager->didDestroyVideoLayer();
603
604     m_videoLayer = nil;
605 }
606
607 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
608 {
609     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
610     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
611
612     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
613     if (!date)
614         return MediaTime::invalidTime();
615
616     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
617
618     // Rounding due to second offset error when subtracting.
619     return MediaTime::createWithDouble(round(date - currentTime));
620 }
621
622 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
623 {
624     if (currentRenderingMode() == MediaRenderingToLayer)
625         return m_cachedIsReadyForDisplay;
626
627 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
628     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
629         return true;
630 #endif
631
632     return m_videoFrameHasDrawn;
633 }
634
635 #if ENABLE(AVF_CAPTIONS)
636 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
637 {
638     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
639     if (manualSelectionMode)
640         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
641
642     // FIXME: Match these to correct types:
643     if (kind == PlatformTextTrack::Caption)
644         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
645
646     if (kind == PlatformTextTrack::Subtitle)
647         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
648
649     if (kind == PlatformTextTrack::Description)
650         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
651
652     if (kind == PlatformTextTrack::Forced)
653         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
654
655     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
656 }
657     
658 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
659 {
660     trackModeChanged();
661 }
662     
663 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
664 {
665     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
666     
667     for (auto& textTrack : m_textTracks) {
668         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
669             continue;
670         
671         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
672         RetainPtr<AVMediaSelectionOption> currentOption = trackPrivate->mediaSelectionOption();
673         
674         for (auto& track : outOfBandTrackSources) {
675             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
676             
677             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
678                 continue;
679             
680             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
681             if (track->mode() == PlatformTextTrack::Hidden)
682                 mode = InbandTextTrackPrivate::Hidden;
683             else if (track->mode() == PlatformTextTrack::Disabled)
684                 mode = InbandTextTrackPrivate::Disabled;
685             else if (track->mode() == PlatformTextTrack::Showing)
686                 mode = InbandTextTrackPrivate::Showing;
687             
688             textTrack->setMode(mode);
689             break;
690         }
691     }
692 }
693 #endif
694
695
696 static NSURL *canonicalURL(const URL& url)
697 {
698     NSURL *cocoaURL = url;
699     if (url.isEmpty())
700         return cocoaURL;
701
702     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
703     if (!request)
704         return cocoaURL;
705
706     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
707     if (!canonicalRequest)
708         return cocoaURL;
709
710     return [canonicalRequest URL];
711 }
712
713 #if PLATFORM(IOS_FAMILY)
714 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
715 {
716     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
717     [properties setDictionary:@{
718         NSHTTPCookieName: cookie.name,
719         NSHTTPCookieValue: cookie.value,
720         NSHTTPCookieDomain: cookie.domain,
721         NSHTTPCookiePath: cookie.path,
722         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
723     }];
724     if (cookie.secure)
725         [properties setObject:@YES forKey:NSHTTPCookieSecure];
726     if (cookie.session)
727         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
728
729     return [NSHTTPCookie cookieWithProperties:properties.get()];
730 }
731 #endif
732
733 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
734 {
735     if (m_avAsset)
736         return;
737
738     ALWAYS_LOG(LOGIDENTIFIER);
739
740     setDelayCallbacks(true);
741
742     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
743
744     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
745
746     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
747
748     String referrer = player()->referrer();
749     if (!referrer.isEmpty())
750         [headerFields.get() setObject:referrer forKey:@"Referer"];
751
752     String userAgent = player()->userAgent();
753     if (!userAgent.isEmpty())
754         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
755
756     if ([headerFields.get() count])
757         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
758
759     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
760         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
761
762     if (PAL::canLoad_AVFoundation_AVURLAssetUseClientURLLoadingExclusively())
763         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
764 #if PLATFORM(IOS_FAMILY)
765     else if (PAL::canLoad_AVFoundation_AVURLAssetRequiresCustomURLLoadingKey())
766         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
767     // FIXME: rdar://problem/20354688
768     String identifier = player()->sourceApplicationIdentifier();
769     if (!identifier.isEmpty())
770         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
771 #endif
772
773     auto type = player()->contentMIMEType();
774     if (PAL::canLoad_AVFoundation_AVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
775         auto codecs = player()->contentTypeCodecs();
776         if (!codecs.isEmpty()) {
777             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
778             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
779         } else
780             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
781     }
782
783 #if ENABLE(AVF_CAPTIONS)
784     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
785     if (!outOfBandTrackSources.isEmpty()) {
786         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
787         for (auto& trackSource : outOfBandTrackSources) {
788             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
789             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
790             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
791             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
792             [outOfBandTracks.get() addObject:@{
793                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
794                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
795                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
796                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
797                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
798                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
799             }];
800         }
801
802         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
803     }
804 #endif
805
806 #if PLATFORM(IOS_FAMILY)
807     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
808     if (!networkInterfaceName.isEmpty())
809         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
810 #endif
811
812 #if PLATFORM(IOS_FAMILY)
813     Vector<Cookie> cookies;
814     if (player()->getRawCookies(url, cookies)) {
815         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
816         for (auto& cookie : cookies)
817             [nsCookies addObject:toNSHTTPCookie(cookie)];
818
819         if (PAL::canLoad_AVFoundation_AVURLAssetHTTPCookiesKey())
820             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
821     }
822 #endif
823
824     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
825     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
826     
827     if (usePersistentCache)
828         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
829
830     NSURL *cocoaURL = canonicalURL(url);
831     m_avAsset = adoptNS([PAL::allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
832
833 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
834     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
835     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
836
837     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
838         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
839         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
840         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
841         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
842         if (mediaResourceLoader)
843             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
844     }
845
846 #endif
847
848     m_haveCheckedPlayability = false;
849
850     setDelayCallbacks(false);
851 }
852
853 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItem *item)
854 {
855     if (!m_avPlayer)
856         return;
857
858     if (pthread_main_np()) {
859         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
860         return;
861     }
862
863     RetainPtr<AVPlayer> strongPlayer = m_avPlayer.get();
864     RetainPtr<AVPlayerItem> strongItem = item;
865     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
866         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
867     });
868 }
869
870 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
871 {
872     if (m_avPlayer)
873         return;
874
875     ALWAYS_LOG(LOGIDENTIFIER);
876
877     setDelayCallbacks(true);
878
879     m_avPlayer = adoptNS([PAL::allocAVPlayerInstance() init]);
880     for (NSString *keyName in playerKVOProperties())
881         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
882
883     setShouldObserveTimeControlStatus(true);
884
885     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
886
887 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
888     updateDisableExternalPlayback();
889     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
890 #endif
891
892 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
893     if (m_shouldPlayToPlaybackTarget) {
894         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
895         m_shouldPlayToPlaybackTarget = false;
896         setShouldPlayToPlaybackTarget(true);
897     }
898 #endif
899
900 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(MACCATALYST)
901     setShouldDisableSleep(player()->shouldDisableSleep());
902 #endif
903
904     if (m_muted) {
905         // Clear m_muted so setMuted doesn't return without doing anything.
906         m_muted = false;
907         [m_avPlayer.get() setMuted:m_muted];
908     }
909
910     if (player()->client().mediaPlayerIsVideo())
911         createAVPlayerLayer();
912
913     if (m_avPlayerItem)
914         setAVPlayerItem(m_avPlayerItem.get());
915
916     setDelayCallbacks(false);
917 }
918
919 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
920 {
921     if (m_avPlayerItem)
922         return;
923
924     ALWAYS_LOG(LOGIDENTIFIER);
925
926     setDelayCallbacks(true);
927
928     // Create the player item so we can load media data. 
929     m_avPlayerItem = adoptNS([PAL::allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
930
931     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
932
933     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
934     for (NSString *keyName in itemKVOProperties())
935         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
936
937     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
938
939     if (m_avPlayer)
940         setAVPlayerItem(m_avPlayerItem.get());
941
942     const NSTimeInterval legibleOutputAdvanceInterval = 2;
943
944     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
945     m_legibleOutput = adoptNS([PAL::allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
946     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
947
948     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
949     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
950     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
951     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
952
953 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
954     if (m_provider) {
955         m_provider->setPlayerItem(m_avPlayerItem.get());
956         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
957     }
958 #endif
959
960 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
961     createVideoOutput();
962 #endif
963
964     setDelayCallbacks(false);
965 }
966
967 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
968 {
969     if (m_haveCheckedPlayability)
970         return;
971     m_haveCheckedPlayability = true;
972
973     INFO_LOG(LOGIDENTIFIER);
974     auto weakThis = makeWeakPtr(*this);
975
976     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
977         callOnMainThread([weakThis] {
978             if (weakThis)
979                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
980         });
981     }];
982 }
983
984 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
985 {
986     INFO_LOG(LOGIDENTIFIER);
987
988     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
989     dispatch_group_enter(metadataLoadingGroup.get());
990     auto weakThis = makeWeakPtr(*this);
991     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
992
993         callOnMainThread([weakThis, metadataLoadingGroup] {
994             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
995                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
996                     dispatch_group_enter(metadataLoadingGroup.get());
997                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
998                         dispatch_group_leave(metadataLoadingGroup.get());
999                     }];
1000                 }
1001             }
1002             dispatch_group_leave(metadataLoadingGroup.get());
1003         });
1004     }];
1005
1006     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1007         callOnMainThread([weakThis] {
1008             if (weakThis)
1009                 [weakThis->m_objcObserver.get() metadataLoaded];
1010         });
1011     });
1012 }
1013
1014 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1015 {
1016     if (!m_avPlayerItem)
1017         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1018
1019     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1020         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1021     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1022         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1023     if (m_cachedLikelyToKeepUp)
1024         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1025     if (m_cachedBufferFull)
1026         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1027     if (m_cachedBufferEmpty)
1028         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1029
1030     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1031 }
1032
1033 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1034 {
1035     return m_videoFullscreenLayerManager->videoInlineLayer();
1036 }
1037
1038 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1039 {
1040 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1041     updateLastImage(UpdateType::UpdateSynchronously);
1042     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1043 #endif
1044 }
1045
1046 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1047 {
1048 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1049     updateLastImage(UpdateType::UpdateSynchronously);
1050     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1051 #else
1052     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1053 #endif
1054     updateDisableExternalPlayback();
1055 }
1056
1057 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1058 {
1059     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1060 }
1061
1062 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1063 {
1064     m_videoFullscreenGravity = gravity;
1065
1066     if (!m_videoLayer)
1067         return;
1068
1069     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1070     if (gravity == MediaPlayer::VideoGravityResize)
1071         videoGravity = AVLayerVideoGravityResize;
1072     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1073         videoGravity = AVLayerVideoGravityResizeAspect;
1074     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1075         videoGravity = AVLayerVideoGravityResizeAspectFill;
1076     else
1077         ASSERT_NOT_REACHED();
1078     
1079     if ([m_videoLayer videoGravity] == videoGravity)
1080         return;
1081
1082     [m_videoLayer setVideoGravity:videoGravity];
1083     syncTextTrackBounds();
1084 }
1085
1086 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1087 {
1088 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1089     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1090         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1091     updateDisableExternalPlayback();
1092 #else
1093     UNUSED_PARAM(mode);
1094 #endif
1095 }
1096     
1097 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1098 {
1099 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1100     updateDisableExternalPlayback();
1101 #endif
1102 }
1103
1104 #if PLATFORM(IOS_FAMILY)
1105 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1106 {
1107     if (m_currentMetaData)
1108         return m_currentMetaData.get();
1109     return nil;
1110 }
1111
1112 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1113 {
1114     if (!m_avPlayerItem)
1115         return emptyString();
1116     
1117     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1118     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1119
1120     return logString.get();
1121 }
1122
1123 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1124 {
1125     if (!m_avPlayerItem)
1126         return emptyString();
1127
1128     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1129     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1130
1131     return logString.get();
1132 }
1133 #endif
1134
1135 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1136 {
1137     m_requestedPlaying = false;
1138     MediaPlayerPrivateAVFoundation::didEnd();
1139 }
1140
1141 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1142 {
1143     [CATransaction begin];
1144     [CATransaction setDisableActions:YES];    
1145     if (m_videoLayer)
1146         [m_videoLayer.get() setHidden:!isVisible];
1147     [CATransaction commit];
1148 }
1149     
1150 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1151 {
1152     if (!metaDataAvailable())
1153         return;
1154
1155     ALWAYS_LOG(LOGIDENTIFIER);
1156
1157     m_requestedPlaying = true;
1158     setPlayerRate(m_requestedRate);
1159 }
1160
1161 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1162 {
1163     if (!metaDataAvailable())
1164         return;
1165
1166     ALWAYS_LOG(LOGIDENTIFIER);
1167
1168     m_requestedPlaying = false;
1169     setPlayerRate(0);
1170 }
1171
1172 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1173 {
1174     return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1175 }
1176
1177 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1178 {
1179     // Do not ask the asset for duration before it has been loaded or it will fetch the
1180     // answer synchronously.
1181     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1182         return MediaTime::invalidTime();
1183     
1184     CMTime cmDuration;
1185     
1186     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1187     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1188         cmDuration = [m_avPlayerItem.get() duration];
1189     else
1190         cmDuration = [m_avAsset.get() duration];
1191
1192     if (CMTIME_IS_NUMERIC(cmDuration))
1193         return PAL::toMediaTime(cmDuration);
1194
1195     if (CMTIME_IS_INDEFINITE(cmDuration))
1196         return MediaTime::positiveInfiniteTime();
1197
1198     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1199     return MediaTime::invalidTime();
1200 }
1201
1202 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1203 {
1204     if (!metaDataAvailable() || !m_avPlayerItem)
1205         return MediaTime::zeroTime();
1206
1207     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1208     if (CMTIME_IS_NUMERIC(itemTime))
1209         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1210
1211     return MediaTime::zeroTime();
1212 }
1213
1214 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1215 {
1216     // setCurrentTime generates several event callbacks, update afterwards.
1217     setDelayCallbacks(true);
1218
1219     if (m_metadataTrack)
1220         m_metadataTrack->flushPartialCues();
1221
1222     CMTime cmTime = PAL::toCMTime(time);
1223     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1224     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1225
1226     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1227     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1228         cmBefore = kCMTimeZero;
1229     
1230     auto weakThis = makeWeakPtr(*this);
1231
1232     setShouldObserveTimeControlStatus(false);
1233     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1234         callOnMainThread([weakThis, finished] {
1235             auto _this = weakThis.get();
1236             if (!_this)
1237                 return;
1238
1239             _this->setShouldObserveTimeControlStatus(true);
1240             _this->seekCompleted(finished);
1241         });
1242     }];
1243
1244     setDelayCallbacks(false);
1245 }
1246
1247 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1248 {
1249 #if PLATFORM(IOS_FAMILY)
1250     UNUSED_PARAM(volume);
1251     return;
1252 #else
1253
1254     if (!m_avPlayer)
1255         return;
1256
1257     ALWAYS_LOG(LOGIDENTIFIER, volume);
1258
1259     [m_avPlayer.get() setVolume:volume];
1260 #endif
1261 }
1262
1263 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1264 {
1265     if (m_muted == muted)
1266         return;
1267
1268     ALWAYS_LOG(LOGIDENTIFIER, muted);
1269
1270     m_muted = muted;
1271
1272     if (!m_avPlayer)
1273         return;
1274
1275     [m_avPlayer.get() setMuted:m_muted];
1276 }
1277
1278 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1279 {
1280     UNUSED_PARAM(closedCaptionsVisible);
1281
1282     if (!metaDataAvailable())
1283         return;
1284
1285     ALWAYS_LOG(LOGIDENTIFIER, closedCaptionsVisible);
1286 }
1287
1288 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1289 {
1290     m_requestedRate = rate;
1291     if (m_requestedPlaying)
1292         setPlayerRate(rate);
1293 }
1294
1295 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1296 {
1297     setDelayCallbacks(true);
1298     m_cachedRate = rate;
1299     setShouldObserveTimeControlStatus(false);
1300     [m_avPlayer setRate:rate];
1301     m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1302     setShouldObserveTimeControlStatus(true);
1303     setDelayCallbacks(false);
1304 }
1305
1306 double MediaPlayerPrivateAVFoundationObjC::rate() const
1307 {
1308     if (!metaDataAvailable())
1309         return 0;
1310
1311     return m_cachedRate;
1312 }
1313
1314 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1315 {
1316 #if PLATFORM(MAC) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1317     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1318 #else
1319     return 0;
1320 #endif
1321 }
1322
1323 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1324 {
1325 #if PLATFORM(MAC) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1326     return [m_avPlayerItem liveUpdateInterval];
1327 #else
1328     return 0;
1329 #endif
1330 }
1331
1332 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1333 {
1334     if (m_avPlayerItem)
1335         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1336 }
1337
1338 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1339 {
1340     auto timeRanges = makeUnique<PlatformTimeRanges>();
1341
1342     if (!m_avPlayerItem)
1343         return timeRanges;
1344
1345     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1346         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1347         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1348             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1349     }
1350     return timeRanges;
1351 }
1352
1353 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1354 {
1355     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1356         return MediaTime::zeroTime();
1357
1358     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1359     bool hasValidRange = false;
1360     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1361         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1362         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1363             continue;
1364
1365         hasValidRange = true;
1366         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1367         if (minTimeSeekable > startOfRange)
1368             minTimeSeekable = startOfRange;
1369     }
1370     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1371 }
1372
1373 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1374 {
1375     if (!m_cachedSeekableRanges)
1376         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1377
1378     MediaTime maxTimeSeekable;
1379     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1380         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1381         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1382             continue;
1383         
1384         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1385         if (maxTimeSeekable < endOfRange)
1386             maxTimeSeekable = endOfRange;
1387     }
1388     return maxTimeSeekable;
1389 }
1390
1391 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1392 {
1393     if (!m_cachedLoadedRanges)
1394         return MediaTime::zeroTime();
1395
1396     MediaTime maxTimeLoaded;
1397     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1398         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1399         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1400             continue;
1401         
1402         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1403         if (maxTimeLoaded < endOfRange)
1404             maxTimeLoaded = endOfRange;
1405     }
1406
1407     return maxTimeLoaded;   
1408 }
1409
1410 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1411 {
1412     if (!metaDataAvailable())
1413         return 0;
1414
1415     if (m_cachedTotalBytes)
1416         return m_cachedTotalBytes;
1417
1418     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1419         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1420
1421     return m_cachedTotalBytes;
1422 }
1423
1424 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1425 {
1426     m_avAsset = WTFMove(asset);
1427 }
1428
1429 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1430 {
1431     if (!m_avAsset)
1432         return MediaPlayerAVAssetStatusDoesNotExist;
1433
1434     for (NSString *keyName in assetMetadataKeyNames()) {
1435         NSError *error = nil;
1436         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1437
1438         if (error)
1439             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1440
1441         if (keyStatus < AVKeyValueStatusLoaded)
1442             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1443         
1444         if (keyStatus == AVKeyValueStatusFailed)
1445             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1446
1447         if (keyStatus == AVKeyValueStatusCancelled)
1448             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1449     }
1450
1451     if (!player()->shouldCheckHardwareSupport())
1452         m_tracksArePlayable = true;
1453
1454     if (!m_tracksArePlayable) {
1455         m_tracksArePlayable = true;
1456         for (AVAssetTrack *track in [m_avAsset tracks]) {
1457             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1458                 m_tracksArePlayable = false;
1459                 break;
1460             }
1461         }
1462     }
1463
1464     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1465         return MediaPlayerAVAssetStatusPlayable;
1466
1467     return MediaPlayerAVAssetStatusLoaded;
1468 }
1469
1470 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1471 {
1472     if (!m_avAsset)
1473         return 0;
1474
1475     NSError *error = nil;
1476     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1477     return [error code];
1478 }
1479
1480 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1481 {
1482     if (!metaDataAvailable() || context.paintingDisabled())
1483         return;
1484
1485     setDelayCallbacks(true);
1486     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1487
1488 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1489     if (videoOutputHasAvailableFrame())
1490         paintWithVideoOutput(context, rect);
1491     else
1492 #endif
1493         paintWithImageGenerator(context, rect);
1494
1495     END_BLOCK_OBJC_EXCEPTIONS;
1496     setDelayCallbacks(false);
1497
1498     m_videoFrameHasDrawn = true;
1499 }
1500
1501 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1502 {
1503     if (!metaDataAvailable() || context.paintingDisabled())
1504         return;
1505
1506     // We can ignore the request if we are already rendering to a layer.
1507     if (currentRenderingMode() == MediaRenderingToLayer)
1508         return;
1509
1510     // paint() is best effort, so only paint if we already have an image generator or video output available.
1511     if (!hasContextRenderer())
1512         return;
1513
1514     paintCurrentFrameInContext(context, rect);
1515 }
1516
1517 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1518 {
1519     INFO_LOG(LOGIDENTIFIER);
1520
1521     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1522     if (image) {
1523         GraphicsContextStateSaver stateSaver(context);
1524         context.translate(rect.x(), rect.y() + rect.height());
1525         context.scale(FloatSize(1.0f, -1.0f));
1526         context.setImageInterpolationQuality(InterpolationLow);
1527         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1528         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1529     }
1530 }
1531
1532 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1533 {
1534     if (!m_imageGenerator)
1535         createImageGenerator();
1536     ASSERT(m_imageGenerator);
1537
1538 #if !RELEASE_LOG_DISABLED
1539     MonotonicTime start = MonotonicTime::now();
1540 #endif
1541
1542     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1543     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1544     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1545
1546 #if !RELEASE_LOG_DISABLED
1547     INFO_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1548 #endif
1549
1550     return image;
1551 }
1552
1553 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1554 {
1555     supportedTypes = AVAssetMIMETypeCache::singleton().types();
1556 }
1557
1558 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1559 static bool keySystemIsSupported(const String& keySystem)
1560 {
1561     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1562         return true;
1563     return false;
1564 }
1565 #endif
1566
1567 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1568 {
1569 #if ENABLE(MEDIA_SOURCE)
1570     if (parameters.isMediaSource)
1571         return MediaPlayer::IsNotSupported;
1572 #endif
1573 #if ENABLE(MEDIA_STREAM)
1574     if (parameters.isMediaStream)
1575         return MediaPlayer::IsNotSupported;
1576 #endif
1577
1578     auto containerType = parameters.type.containerType();
1579     if (isUnsupportedMIMEType(containerType))
1580         return MediaPlayer::IsNotSupported;
1581
1582     if (!staticMIMETypeList().contains(containerType) && !AVAssetMIMETypeCache::singleton().canDecodeType(containerType))
1583         return MediaPlayer::IsNotSupported;
1584
1585     // The spec says:
1586     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1587     if (parameters.type.codecs().isEmpty())
1588         return MediaPlayer::MayBeSupported;
1589
1590     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1591         return MediaPlayer::IsNotSupported;
1592
1593     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1594     return [PAL::getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1595 }
1596
1597 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1598 {
1599 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1600     if (!keySystem.isEmpty()) {
1601         // "Clear Key" is only supported with HLS:
1602         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1603             return MediaPlayer::IsNotSupported;
1604
1605         if (!keySystemIsSupported(keySystem))
1606             return false;
1607
1608         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1609             return false;
1610
1611         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVAssetMIMETypeCache::singleton().canDecodeType(mimeType))
1612             return false;
1613
1614         return true;
1615     }
1616 #else
1617     UNUSED_PARAM(keySystem);
1618     UNUSED_PARAM(mimeType);
1619 #endif
1620     return false;
1621 }
1622
1623 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1624 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1625 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1626 {
1627     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1628         [infoRequest setContentLength:keyData->byteLength()];
1629         [infoRequest setByteRangeAccessSupported:YES];
1630     }
1631
1632     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1633         long long start = [dataRequest currentOffset];
1634         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1635
1636         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1637             [request finishLoadingWithError:nil];
1638             return;
1639         }
1640
1641         ASSERT(start <= std::numeric_limits<int>::max());
1642         ASSERT(end <= std::numeric_limits<int>::max());
1643         auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1644         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1645         [dataRequest respondWithData:nsData.get()];
1646     }
1647
1648     [request finishLoading];
1649 }
1650 #endif
1651
1652 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1653 {
1654     String scheme = [[[avRequest request] URL] scheme];
1655     String keyURI = [[[avRequest request] URL] absoluteString];
1656
1657 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1658     if (scheme == "skd") {
1659 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1660         // Create an initData with the following layout:
1661         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1662         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1663         auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1664         unsigned byteLength = initDataBuffer->byteLength();
1665         auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1666         initDataView->set<uint32_t>(0, keyURISize, true);
1667
1668         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1669         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1670
1671         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1672         if (!player()->keyNeeded(initData.ptr()))
1673             return false;
1674 #endif
1675
1676 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1677         if (m_cdmInstance) {
1678             avRequest.contentInformationRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
1679             [avRequest finishLoading];
1680             return true;
1681         }
1682
1683         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1684         m_keyID = SharedBuffer::create(keyURIData.get());
1685         player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1686         setWaitingForKey(true);
1687 #endif
1688         m_keyURIToRequestMap.set(keyURI, avRequest);
1689
1690         return true;
1691     }
1692
1693 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1694     if (scheme == "clearkey") {
1695         String keyID = [[[avRequest request] URL] resourceSpecifier];
1696         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1697
1698         auto initData = Uint8Array::create(encodedKeyId.size());
1699         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1700
1701         auto keyData = player()->cachedKeyForKeyId(keyID);
1702         if (keyData) {
1703             fulfillRequestWithKeyData(avRequest, keyData.get());
1704             return false;
1705         }
1706
1707         if (!player()->keyNeeded(initData.ptr()))
1708             return false;
1709
1710         m_keyURIToRequestMap.set(keyID, avRequest);
1711         return true;
1712     }
1713 #endif
1714 #endif
1715
1716     auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1717     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1718     resourceLoader->startLoading();
1719     return true;
1720 }
1721
1722 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1723 {
1724     String scheme = [[[avRequest request] URL] scheme];
1725
1726     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1727
1728     if (resourceLoader)
1729         resourceLoader->stopLoading();
1730 }
1731
1732 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1733 {
1734     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1735 }
1736 #endif
1737
1738 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1739 {
1740     return PAL::isAVFoundationFrameworkAvailable() && isCoreMediaFrameworkAvailable();
1741 }
1742
1743 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1744 {
1745     if (!metaDataAvailable())
1746         return timeValue;
1747
1748     // FIXME - impossible to implement until rdar://8721510 is fixed.
1749     return timeValue;
1750 }
1751
1752 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1753 {
1754     return 0;
1755 }
1756
1757 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1758 {
1759     if (!m_videoLayer)
1760         return;
1761
1762     // Do not attempt to change the video gravity while in full screen mode.
1763     // See setVideoFullscreenGravity().
1764     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1765         return;
1766
1767     [CATransaction begin];
1768     [CATransaction setDisableActions:YES];    
1769     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1770     [m_videoLayer.get() setVideoGravity:gravity];
1771     [CATransaction commit];
1772 }
1773
1774 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1775 {
1776     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1777         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1778     }];
1779     if (index == NSNotFound)
1780         return nil;
1781     return [tracks objectAtIndex:index];
1782 }
1783
1784 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1785 {
1786     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1787     m_languageOfPrimaryAudioTrack = String();
1788
1789     if (!m_avAsset)
1790         return;
1791
1792     setDelayCharacteristicsChangedNotification(true);
1793
1794     bool haveCCTrack = false;
1795     bool hasCaptions = false;
1796
1797     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1798     // asked about those fairly fequently.
1799     if (!m_avPlayerItem) {
1800         // We don't have a player item yet, so check with the asset because some assets support inspection
1801         // prior to becoming ready to play.
1802         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1803         setHasVideo(firstEnabledVideoTrack);
1804         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1805         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1806         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1807         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1808         if (size.width() < 0)
1809             size.setWidth(-size.width());
1810         if (size.height() < 0)
1811             size.setHeight(-size.height());
1812         presentationSizeDidChange(size);
1813     } else {
1814         bool hasVideo = false;
1815         bool hasAudio = false;
1816         bool hasMetaData = false;
1817         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1818             if ([track isEnabled]) {
1819                 AVAssetTrack *assetTrack = [track assetTrack];
1820                 NSString *mediaType = [assetTrack mediaType];
1821                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1822                     hasVideo = true;
1823                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1824                     hasAudio = true;
1825                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1826                     haveCCTrack = true;
1827                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1828                     hasMetaData = true;
1829                 }
1830             }
1831         }
1832
1833         updateAudioTracks();
1834         updateVideoTracks();
1835
1836         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1837         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1838
1839         // HLS streams will occasionally recreate all their tracks; during seek and after
1840         // buffering policy changes. "debounce" notifications which result in no enabled
1841         // audio tracks by also taking AVPlayerItem.hasEnabledAudio into account when determining
1842         // whethere there is any audio present.
1843         hasAudio |= m_cachedHasEnabledAudio;
1844
1845         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1846         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1847         // when it is not.
1848         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1849
1850         setHasAudio(hasAudio);
1851 #if ENABLE(DATACUE_VALUE)
1852         if (hasMetaData)
1853             processMetadataTrack();
1854 #endif
1855     }
1856
1857     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1858     if (legibleGroup && m_cachedTracks) {
1859         hasCaptions = [[PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1860         if (hasCaptions)
1861             processMediaSelectionOptions();
1862     }
1863
1864     setHasClosedCaptions(hasCaptions);
1865
1866     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1867
1868     sizeChanged();
1869
1870     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1871         characteristicsChanged();
1872
1873 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1874     if (m_provider)
1875         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1876 #endif
1877
1878     setDelayCharacteristicsChangedNotification(false);
1879 }
1880
1881 void MediaPlayerPrivateAVFoundationObjC::updateRotationSession()
1882 {
1883     AffineTransform finalTransform = m_avAsset.get().preferredTransform;
1884     FloatSize naturalSize;
1885     if (auto* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual])) {
1886         naturalSize = FloatSize(firstEnabledVideoTrack.naturalSize);
1887         finalTransform *= firstEnabledVideoTrack.preferredTransform;
1888     }
1889
1890     if (finalTransform.isIdentity()) {
1891         m_imageRotationSession = nullptr;
1892         return;
1893     }
1894
1895     if (m_imageRotationSession
1896         && m_imageRotationSession->transform()
1897         && m_imageRotationSession->transform().value() == finalTransform
1898         && m_imageRotationSession->size() == naturalSize)
1899         return;
1900
1901     m_imageRotationSession = makeUnique<ImageRotationSessionVT>(WTFMove(finalTransform), naturalSize, kCVPixelFormatType_32BGRA, ImageRotationSessionVT::IsCGImageCompatible::Yes);
1902 }
1903
1904 #if ENABLE(VIDEO_TRACK)
1905
1906 template <typename RefT, typename PassRefT>
1907 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1908 {
1909     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1910         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1911     }]]]);
1912     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1913
1914     for (auto& oldItem : oldItems) {
1915         if (oldItem->playerItemTrack())
1916             [oldTracks addObject:oldItem->playerItemTrack()];
1917     }
1918
1919     // Find the added & removed AVPlayerItemTracks:
1920     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1921     [removedTracks minusSet:newTracks.get()];
1922
1923     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1924     [addedTracks minusSet:oldTracks.get()];
1925
1926     typedef Vector<RefT> ItemVector;
1927     ItemVector replacementItems;
1928     ItemVector addedItems;
1929     ItemVector removedItems;
1930     for (auto& oldItem : oldItems) {
1931         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1932             removedItems.append(oldItem);
1933         else
1934             replacementItems.append(oldItem);
1935     }
1936
1937     for (AVPlayerItemTrack* track in addedTracks.get())
1938         addedItems.append(itemFactory(track));
1939
1940     replacementItems.appendVector(addedItems);
1941     oldItems.swap(replacementItems);
1942
1943     for (auto& removedItem : removedItems)
1944         (player->*removedFunction)(*removedItem);
1945
1946     for (auto& addedItem : addedItems)
1947         (player->*addedFunction)(*addedItem);
1948 }
1949
1950 template <typename RefT, typename PassRefT>
1951 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1952 {
1953     group->updateOptions(characteristics);
1954
1955     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1956     for (auto& option : group->options()) {
1957         if (!option)
1958             continue;
1959         AVMediaSelectionOption* avOption = option->avMediaSelectionOption();
1960         if (!avOption)
1961             continue;
1962         newSelectionOptions.add(option);
1963     }
1964
1965     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1966     for (auto& oldItem : oldItems) {
1967         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1968             oldSelectionOptions.add(option);
1969     }
1970
1971     // Find the added & removed AVMediaSelectionOptions:
1972     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1973     for (auto& oldOption : oldSelectionOptions) {
1974         if (!newSelectionOptions.contains(oldOption))
1975             removedSelectionOptions.add(oldOption);
1976     }
1977
1978     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1979     for (auto& newOption : newSelectionOptions) {
1980         if (!oldSelectionOptions.contains(newOption))
1981             addedSelectionOptions.add(newOption);
1982     }
1983
1984     typedef Vector<RefT> ItemVector;
1985     ItemVector replacementItems;
1986     ItemVector addedItems;
1987     ItemVector removedItems;
1988     for (auto& oldItem : oldItems) {
1989         if (!oldItem->mediaSelectionOption())
1990             removedItems.append(oldItem);
1991         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1992             removedItems.append(oldItem);
1993         else
1994             replacementItems.append(oldItem);
1995     }
1996
1997     for (auto& option : addedSelectionOptions)
1998         addedItems.append(itemFactory(*option.get()));
1999
2000     replacementItems.appendVector(addedItems);
2001     oldItems.swap(replacementItems);
2002     
2003     for (auto& removedItem : removedItems)
2004         (player->*removedFunction)(*removedItem);
2005
2006     for (auto& addedItem : addedItems)
2007         (player->*addedFunction)(*addedItem);
2008 }
2009
2010 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2011 {
2012 #if !RELEASE_LOG_DISABLED
2013     size_t count = m_audioTracks.size();
2014 #endif
2015
2016     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2017     if (!m_audibleGroup) {
2018         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForAudibleMedia())
2019             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2020     }
2021
2022     if (m_audibleGroup)
2023         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2024     else
2025         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2026
2027     for (auto& track : m_audioTracks)
2028         track->resetPropertiesFromTrack();
2029
2030 #if !RELEASE_LOG_DISABLED
2031     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2032 #endif
2033 }
2034
2035 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2036 {
2037 #if !RELEASE_LOG_DISABLED
2038     size_t count = m_videoTracks.size();
2039 #endif
2040
2041     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2042
2043     if (!m_visualGroup) {
2044         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForVisualMedia())
2045             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2046     }
2047
2048     if (m_visualGroup)
2049         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2050
2051     for (auto& track : m_audioTracks)
2052         track->resetPropertiesFromTrack();
2053
2054 #if !RELEASE_LOG_DISABLED
2055     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2056 #endif
2057 }
2058
2059 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2060 {
2061     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2062 }
2063
2064 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2065 {
2066     m_videoFullscreenLayerManager->syncTextTrackBounds();
2067 }
2068
2069 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2070 {
2071     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2072 }
2073
2074 #endif // ENABLE(VIDEO_TRACK)
2075
2076 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2077
2078 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2079 {
2080     if (!m_provider) {
2081         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2082         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2083     }
2084     return m_provider.get();
2085 }
2086
2087 #endif
2088
2089 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2090 {
2091     if (!m_avAsset)
2092         return;
2093
2094     updateRotationSession();
2095     setNaturalSize(m_cachedPresentationSize);
2096 }
2097
2098 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2099 {
2100     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2101 }
2102
2103 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2104 {
2105     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2106     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2107         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2108         return false;
2109
2110     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2111     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2112         return session.didPassCORSAccessChecks;
2113
2114     return false;
2115 }
2116
2117 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2118 {
2119     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2120     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2121         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2122         return false;
2123
2124     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2125     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2126         return [session wouldTaintOrigin:origin];
2127
2128     return WTF::nullopt;
2129 }
2130
2131
2132 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2133
2134 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2135 {
2136     INFO_LOG(LOGIDENTIFIER);
2137
2138     if (!m_avPlayerItem || m_videoOutput)
2139         return;
2140
2141 #if USE(VIDEOTOOLBOX)
2142     NSDictionary* attributes = nil;
2143 #else
2144     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2145 #endif
2146     m_videoOutput = adoptNS([PAL::allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2147     ASSERT(m_videoOutput);
2148
2149     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2150
2151     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2152 }
2153
2154 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2155 {
2156     if (!m_videoOutput)
2157         return;
2158
2159     if (m_avPlayerItem)
2160         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2161
2162     INFO_LOG(LOGIDENTIFIER);
2163
2164     m_videoOutput = 0;
2165 }
2166
2167 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2168 {
2169     if (!m_avPlayerItem)
2170         return false;
2171
2172     if (!m_videoOutput)
2173         createVideoOutput();
2174     ASSERT(m_videoOutput);
2175
2176     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2177
2178     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2179         return false;
2180
2181     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2182
2183     if (m_imageRotationSession)
2184         m_lastPixelBuffer = m_imageRotationSession->rotate(m_lastPixelBuffer.get());
2185
2186     m_lastImage = nullptr;
2187     return true;
2188 }
2189
2190 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2191 {
2192     if (!m_avPlayerItem)
2193         return false;
2194
2195     if (m_lastImage)
2196         return true;
2197
2198     if (!m_videoOutput)
2199         createVideoOutput();
2200
2201     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2202 }
2203
2204 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2205 {
2206 #if HAVE(CORE_VIDEO)
2207     if (!m_avPlayerItem)
2208         return;
2209
2210     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2211         waitForVideoOutputMediaDataWillChange();
2212
2213     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2214     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2215     // should be displayed.
2216     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2217         return;
2218
2219     if (!m_pixelBufferConformer) {
2220 #if USE(VIDEOTOOLBOX)
2221         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2222 #else
2223         NSDictionary *attributes = nil;
2224 #endif
2225         m_pixelBufferConformer = makeUnique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2226     }
2227
2228 #if !RELEASE_LOG_DISABLED
2229     MonotonicTime start = MonotonicTime::now();
2230 #endif
2231
2232     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2233
2234 #if !RELEASE_LOG_DISABLED
2235     INFO_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2236 #endif
2237 #endif // HAVE(CORE_VIDEO)
2238 }
2239
2240 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2241 {
2242     updateLastImage(UpdateType::UpdateSynchronously);
2243     if (!m_lastImage)
2244         return;
2245
2246     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2247     if (!firstEnabledVideoTrack)
2248         return;
2249
2250     INFO_LOG(LOGIDENTIFIER);
2251
2252     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2253     context.drawNativeImage(m_lastImage.get(), imageRect.size(), outputRect, imageRect);
2254
2255     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2256     // video frame, destroy it now that it is no longer needed.
2257     if (m_imageGenerator)
2258         destroyImageGenerator();
2259
2260 }
2261
2262 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2263 {
2264     ASSERT(context);
2265
2266     updateLastPixelBuffer();
2267     if (!m_lastPixelBuffer)
2268         return false;
2269
2270     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2271     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2272
2273     if (!m_videoTextureCopier)
2274         m_videoTextureCopier = makeUnique<VideoTextureCopierCV>(*context);
2275
2276     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2277 }
2278
2279 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2280 {
2281     updateLastImage();
2282     return m_lastImage;
2283 }
2284
2285 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2286 {
2287     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2288
2289     // Wait for 1 second.
2290     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2291     if (!satisfied)
2292         ERROR_LOG(LOGIDENTIFIER, "timed out");
2293 }
2294
2295 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput *)
2296 {
2297     m_videoOutputSemaphore.signal();
2298 }
2299
2300 #endif
2301
2302 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2303
2304 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2305 {
2306     return m_keyURIToRequestMap.take(keyURI);
2307 }
2308
2309 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2310 {
2311     Vector<String> fulfilledKeyIds;
2312
2313     for (auto& pair : m_keyURIToRequestMap) {
2314         const String& keyId = pair.key;
2315         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2316
2317         auto keyData = player()->cachedKeyForKeyId(keyId);
2318         if (!keyData)
2319             continue;
2320
2321         fulfillRequestWithKeyData(request.get(), keyData.get());
2322         fulfilledKeyIds.append(keyId);
2323     }
2324
2325     for (auto& keyId : fulfilledKeyIds)
2326         m_keyURIToRequestMap.remove(keyId);
2327 }
2328
2329 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2330 {
2331     ASSERT_UNUSED(session, &session == m_session);
2332     m_session = nullptr;
2333 }
2334
2335 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2336 {
2337     if (!keySystemIsSupported(keySystem))
2338         return nullptr;
2339     auto session = makeUnique<CDMSessionAVFoundationObjC>(this, client);
2340     m_session = makeWeakPtr(*session);
2341     return WTFMove(session);
2342 }
2343 #endif
2344
2345 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2346 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2347 {
2348 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2349     if (m_session && newValue)
2350         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2351 #endif
2352
2353 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2354     if (m_cdmInstance)
2355         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2356 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2357     UNUSED_PARAM(newValue);
2358 #endif
2359 }
2360 #endif
2361
2362 #if ENABLE(ENCRYPTED_MEDIA)
2363 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2364 {
2365 #if HAVE(AVCONTENTKEYSESSION)
2366     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2367         return;
2368
2369     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2370     if (&fpsInstance == m_cdmInstance)
2371         return;
2372
2373     if (m_cdmInstance)
2374         cdmInstanceDetached(*m_cdmInstance);
2375
2376     m_cdmInstance = &fpsInstance;
2377 #else
2378     UNUSED_PARAM(instance);
2379 #endif
2380 }
2381
2382 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2383 {
2384 #if HAVE(AVCONTENTKEYSESSION)
2385     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2386     m_cdmInstance = nullptr;
2387 #else
2388     UNUSED_PARAM(instance);
2389 #endif
2390 }
2391
2392 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2393 {
2394 #if HAVE(AVCONTENTKEYSESSION)
2395     if (!m_keyID || !m_cdmInstance)
2396         return;
2397
2398     auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2399     if (!instanceSession)
2400         return;
2401
2402     [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2403
2404     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2405     for (auto& request : keyURIToRequestMap.values()) {
2406         if (auto *infoRequest = request.get().contentInformationRequest)
2407             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2408         [request finishLoading];
2409     }
2410     setWaitingForKey(false);
2411 #endif
2412 }
2413
2414 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2415 {
2416     if (m_waitingForKey == waitingForKey)
2417         return;
2418
2419     m_waitingForKey = waitingForKey;
2420     player()->waitingForKeyChanged();
2421 }
2422 #endif
2423
2424 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2425 {
2426     if (!m_avAsset)
2427         return nil;
2428
2429     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2430         return nil;
2431
2432     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2433 }
2434
2435 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2436 {
2437     if (!m_avAsset)
2438         return false;
2439
2440     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2441         return false;
2442
2443     return true;
2444 }
2445
2446 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2447 {
2448     if (!hasLoadedMediaSelectionGroups())
2449         return nil;
2450
2451     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2452 }
2453
2454 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2455 {
2456     if (!hasLoadedMediaSelectionGroups())
2457         return nil;
2458
2459     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2460 }
2461
2462 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2463 {
2464     if (!hasLoadedMediaSelectionGroups())
2465         return nil;
2466
2467     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2468 }
2469
2470 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2471 {
2472     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2473     if (!legibleGroup) {
2474         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2475         return;
2476     }
2477
2478     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2479     // but set the selected legible track to nil so text tracks will not be automatically configured.
2480     if (!m_textTracks.size())
2481         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2482
2483     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2484     NSArray *legibleOptions = [PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2485     for (AVMediaSelectionOption *option in legibleOptions) {
2486         bool newTrack = true;
2487         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2488             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2489                 continue;
2490             
2491             RetainPtr<AVMediaSelectionOption> currentOption;
2492 #if ENABLE(AVF_CAPTIONS)
2493             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2494                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2495                 currentOption = track->mediaSelectionOption();
2496             } else
2497 #endif
2498             {
2499                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2500                 currentOption = track->mediaSelectionOption();
2501             }
2502             
2503             if ([currentOption.get() isEqual:option]) {
2504                 removedTextTracks.remove(i - 1);
2505                 newTrack = false;
2506                 break;
2507             }
2508         }
2509         if (!newTrack)
2510             continue;
2511
2512 #if ENABLE(AVF_CAPTIONS)
2513         if ([option outOfBandSource]) {
2514             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2515             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2516             continue;
2517         }
2518 #endif
2519
2520         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2521     }
2522
2523     processNewAndRemovedTextTracks(removedTextTracks);
2524 }
2525
2526 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2527 {
2528     if (m_metadataTrack)
2529         return;
2530
2531     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2532     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2533     player()->addTextTrack(*m_metadataTrack);
2534 }
2535
2536 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2537 {
2538     ASSERT(time >= MediaTime::zeroTime());
2539
2540     if (!m_currentTextTrack)
2541         return;
2542
2543     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2544 }
2545
2546 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2547 {
2548     INFO_LOG(LOGIDENTIFIER);
2549
2550     if (!m_currentTextTrack)
2551         return;
2552     
2553     m_currentTextTrack->resetCueValues();
2554 }
2555
2556 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2557 {
2558     if (m_currentTextTrack == track)
2559         return;
2560
2561     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2562
2563     m_currentTextTrack = track;
2564
2565     if (track) {
2566         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2567             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2568             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2569             ALLOW_DEPRECATED_DECLARATIONS_END
2570 #if ENABLE(AVF_CAPTIONS)
2571         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2572             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2573 #endif
2574         else
2575             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2576     } else {
2577         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2578         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2579         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2580         ALLOW_DEPRECATED_DECLARATIONS_END
2581     }
2582
2583 }
2584
2585 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2586 {
2587     if (!m_languageOfPrimaryAudioTrack.isNull())
2588         return m_languageOfPrimaryAudioTrack;
2589
2590     if (!m_avPlayerItem.get())
2591         return emptyString();
2592
2593     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2594     AVMediaSelectionGroup *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2595     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2596     AVMediaSelectionOption *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2597     ALLOW_DEPRECATED_DECLARATIONS_END
2598     if (currentlySelectedAudibleOption) {
2599         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2600         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2601
2602         return m_languageOfPrimaryAudioTrack;
2603     }
2604
2605     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2606     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2607     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2608     if (!tracks || [tracks count] != 1) {
2609         m_languageOfPrimaryAudioTrack = emptyString();
2610         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2611         return m_languageOfPrimaryAudioTrack;
2612     }
2613
2614     AVAssetTrack *track = [tracks objectAtIndex:0];
2615     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2616
2617     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2618
2619     return m_languageOfPrimaryAudioTrack;
2620 }
2621
2622 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2623 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2624 {
2625     bool wirelessTarget = false;
2626
2627 #if !PLATFORM(IOS_FAMILY)
2628     if (m_playbackTarget) {
2629         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2630             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2631         else
2632             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2633     }
2634 #else
2635     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2636 #endif
2637
2638     INFO_LOG(LOGIDENTIFIER, wirelessTarget);
2639
2640     return wirelessTarget;
2641 }
2642
2643 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2644 {
2645     if (!m_avPlayer)
2646         return MediaPlayer::TargetTypeNone;
2647
2648 #if PLATFORM(IOS_FAMILY)
2649     if (!PAL::isAVFoundationFrameworkAvailable())
2650         return MediaPlayer::TargetTypeNone;
2651
2652     switch ([m_avPlayer externalPlaybackType]) {
2653     case AVPlayerExternalPlaybackTypeNone:
2654         return MediaPlayer::TargetTypeNone;
2655     case AVPlayerExternalPlaybackTypeAirPlay:
2656         return MediaPlayer::TargetTypeAirPlay;
2657     case AVPlayerExternalPlaybackTypeTVOut:
2658         return MediaPlayer::TargetTypeTVOut;
2659     }
2660
2661     ASSERT_NOT_REACHED();
2662     return MediaPlayer::TargetTypeNone;
2663
2664 #else
2665     return MediaPlayer::TargetTypeAirPlay;
2666 #endif
2667 }
2668     
2669 #if PLATFORM(IOS_FAMILY)
2670 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayer *player)
2671 {
2672 #if HAVE(CELESTIAL)
2673     if (!PAL::isAVFoundationFrameworkAvailable())
2674         return nil;
2675
2676     if ([PAL::getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2677         AVOutputContext *outputContext = [PAL::getAVOutputContextClass() sharedAudioPresentationOutputContext];
2678
2679         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2680             || ![outputContext supportsMultipleOutputDevices]
2681             || ![outputContext respondsToSelector:@selector(outputDevices)])
2682             return [outputContext deviceName];
2683
2684         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2685         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2686 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2687             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2688 ALLOW_DEPRECATED_DECLARATIONS_END
2689             [outputDeviceNames addObject:outputDeviceName.get()];
2690         }
2691
2692         return [outputDeviceNames componentsJoinedByString:@" + "];
2693     }
2694
2695     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2696         return nil;
2697
2698     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2699     if (!pickableRoutes.count)
2700         return nil;
2701
2702     NSString *displayName = nil;
2703     for (NSDictionary *pickableRoute in pickableRoutes) {
2704         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2705             continue;
2706
2707         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2708
2709         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2710         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2711             break;
2712
2713         // The route is a speaker or HDMI out, override the name to be the localized device model.
2714         NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2715
2716         // In cases where a route with that name already exists, prefix the name with the model.
2717         BOOL includeLocalizedDeviceModelName = NO;
2718         for (NSDictionary *otherRoute in pickableRoutes) {
2719             if (otherRoute == pickableRoute)
2720                 continue;
2721
2722             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2723                 includeLocalizedDeviceModelName = YES;
2724                 break;
2725             }
2726         }
2727
2728         if (includeLocalizedDeviceModelName)
2729             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2730         else
2731             displayName = localizedDeviceModel;
2732
2733         break;
2734     }
2735
2736     return displayName;
2737 #else
2738     UNUSED_PARAM(player);
2739     return nil;
2740 #endif
2741 }
2742 #endif
2743
2744 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2745 {
2746     if (!m_avPlayer)
2747         return emptyString();
2748
2749     String wirelessTargetName;
2750 #if !PLATFORM(IOS_FAMILY)
2751     if (m_playbackTarget)
2752         wirelessTargetName = m_playbackTarget->deviceName();
2753 #else
2754     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2755 #endif
2756
2757     return wirelessTargetName;
2758 }
2759
2760 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2761 {
2762     if (!m_avPlayer)
2763         return !m_allowsWirelessVideoPlayback;
2764
2765     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2766     INFO_LOG(LOGIDENTIFIER, !m_allowsWirelessVideoPlayback);
2767
2768     return !m_allowsWirelessVideoPlayback;
2769 }
2770
2771 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2772 {
2773     INFO_LOG(LOGIDENTIFIER, disabled);
2774     m_allowsWirelessVideoPlayback = !disabled;
2775     if (!m_avPlayer)
2776         return;
2777
2778     setDelayCallbacks(true);
2779     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2780     setDelayCallbacks(false);
2781 }
2782
2783 #if !PLATFORM(IOS_FAMILY)
2784
2785 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2786 {
2787     m_playbackTarget = WTFMove(target);
2788
2789     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2790
2791     INFO_LOG(LOGIDENTIFIER);
2792
2793     if (!m_playbackTarget->hasActiveRoute())
2794         setShouldPlayToPlaybackTarget(false);
2795 }
2796
2797 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2798 {
2799     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2800         return;
2801
2802     m_shouldPlayToPlaybackTarget = shouldPlay;
2803
2804     if (!m_playbackTarget)
2805         return;
2806
2807     INFO_LOG(LOGIDENTIFIER, shouldPlay);
2808
2809     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2810         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2811
2812         if (!m_avPlayer)
2813             return;
2814
2815         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2816         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2817             return;
2818
2819         setDelayCallbacks(true);
2820         m_avPlayer.get().outputContext = newContext;
2821         setDelayCallbacks(false);
2822
2823         return;
2824     }
2825
2826     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2827
2828     setDelayCallbacks(true);
2829     auto weakThis = makeWeakPtr(*this);
2830     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2831         if (!weakThis)
2832             return;
2833         weakThis->playbackTargetIsWirelessDidChange();
2834     }));
2835     setDelayCallbacks(false);
2836 }
2837
2838 #endif // !PLATFORM(IOS_FAMILY)
2839
2840 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2841 {
2842 #if PLATFORM(IOS_FAMILY)
2843     if (!m_avPlayer)
2844         return;
2845
2846     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2847         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
2848 #endif
2849 }
2850
2851 #endif
2852
2853 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2854 {
2855     m_cachedItemStatus = status;
2856
2857     updateStates();
2858 }
2859
2860 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2861 {
2862     m_pendingStatusChanges++;
2863 }
2864
2865 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2866 {
2867     m_cachedLikelyToKeepUp = likelyToKeepUp;
2868
2869     ASSERT(m_pendingStatusChanges);
2870     if (!--m_pendingStatusChanges)
2871         updateStates();
2872 }
2873
2874 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2875 {
2876     m_pendingStatusChanges++;
2877 }
2878
2879 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2880 {
2881     m_cachedBufferEmpty = bufferEmpty;
2882
2883     ASSERT(m_pendingStatusChanges);
2884     if (!--m_pendingStatusChanges)
2885         updateStates();
2886 }
2887
2888 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2889 {
2890     m_pendingStatusChanges++;
2891 }
2892
2893 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2894 {
2895     m_cachedBufferFull = bufferFull;
2896
2897     ASSERT(m_pendingStatusChanges);
2898     if (!--m_pendingStatusChanges)
2899         updateStates();
2900 }
2901
2902 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray>&& seekableRanges)
2903 {
2904     m_cachedSeekableRanges = WTFMove(seekableRanges);
2905
2906     seekableTimeRangesChanged();
2907     updateStates();
2908 }
2909
2910 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray>&& loadedRanges)
2911 {
2912     m_cachedLoadedRanges = WTFMove(loadedRanges);
2913
2914     loadedTimeRangesChanged();
2915     updateStates();
2916 }
2917
2918 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2919 {
2920     m_cachedIsReadyForDisplay = isReady;
2921     if (!hasVideo() && isReady)
2922         tracksChanged();
2923     updateStates();
2924 }
2925
2926 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2927 {
2928     tracksChanged();
2929     updateStates();
2930 }
2931
2932 void MediaPlayerPrivateAVFoundationObjC::setBufferingPolicy(MediaPlayer::BufferingPolicy policy)
2933 {
2934     ALWAYS_LOG(LOGIDENTIFIER, policy);
2935
2936     if (m_bufferingPolicy == policy)
2937         return;
2938
2939     m_bufferingPolicy = policy;
2940     
2941     if (!m_avPlayer)
2942         return;
2943
2944 #if HAVE(AVPLAYER_RESOURCE_CONSERVATION_LEVEL)
2945     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::Default) == AVPlayerResourceConservationLevelNone, "MediaPlayer::BufferingPolicy::Default is not AVPlayerResourceConservationLevelNone as expected");
2946     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::LimitReadAhead) == AVPlayerResourceConservationLevelReduceReadAhead, "MediaPlayer::BufferingPolicy::LimitReadAhead is not AVPlayerResourceConservationLevelReduceReadAhead as expected");
2947     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::MakeResourcesPurgeable) == AVPlayerResourceConservationLevelReuseActivePlayerResources, "MediaPlayer::BufferingPolicy::MakeResourcesPurgeable is not AVPlayerResourceConservationLevelReuseActivePlayerResources as expected");
2948     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::PurgeResources) == AVPlayerResourceConservationLevelRecycleBuffer, "MediaPlayer::BufferingPolicy::PurgeResources is not AVPlayerResourceConservationLevelRecycleBuffer as expected");
2949
2950     if ([m_avPlayer respondsToSelector:@selector(setResourceConservationLevelWhilePaused:)]) {
2951         m_avPlayer.get().resourceConservationLevelWhilePaused = static_cast<AVPlayerResourceConservationLevel>(policy);
2952         updateStates();
2953         return;
2954     }
2955 #endif
2956
2957     switch (policy) {
2958     case MediaPlayer::BufferingPolicy::Default:
2959         setAVPlayerItem(m_avPlayerItem.get());
2960         break;
2961     case MediaPlayer::BufferingPolicy::LimitReadAhead:
2962     case MediaPlayer::BufferingPolicy::MakeResourcesPurgeable:
2963         setAVPlayerItem(nil);
2964         break;
2965     case MediaPlayer::BufferingPolicy::PurgeResources:
2966         setAVPlayerItem(nil);
2967         setAVPlayerItem(m_avPlayerItem.get());
2968         break;
2969     }
2970
2971     updateStates();
2972 }
2973
2974 #if ENABLE(DATACUE_VALUE)
2975
2976 static const AtomString& metadataType(NSString *avMetadataKeySpace)
2977 {
2978     static NeverDestroyed<const AtomString> quickTimeUserData("com.apple.quicktime.udta", AtomString::ConstructFromLiteral);
2979     static NeverDestroyed<const AtomString> isoUserData("org.mp4ra", AtomString::ConstructFromLiteral);
2980     static NeverDestroyed<const AtomString> quickTimeMetadata("com.apple.quicktime.mdta", AtomString::ConstructFromLiteral);
2981     static NeverDestroyed<const AtomString> iTunesMetadata("com.apple.itunes", AtomString::ConstructFromLiteral);
2982     static NeverDestroyed<const AtomString> id3Metadata("org.id3", AtomString::ConstructFromLiteral);
2983
2984     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2985         return quickTimeUserData;
2986     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2987         return isoUserData;
2988     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2989         return quickTimeMetadata;
2990     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2991         return iTunesMetadata;
2992     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2993         return id3Metadata;
2994
2995     return emptyAtom();
2996 }
2997
2998 #endif
2999
3000 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(const RetainPtr<NSArray>& metadata, const MediaTime& mediaTime)
3001 {
3002     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3003
3004     INFO_LOG(LOGIDENTIFIER, "adding ", m_currentMetaData ? [m_currentMetaData.get() count] : 0, " at time ", mediaTime);
3005
3006 #if ENABLE(DATACUE_VALUE)
3007     if (seeking())
3008         return;
3009
3010     if (!m_metadataTrack)
3011         processMetadataTrack();
3012
3013     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3014         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3015         return;
3016     }
3017
3018     // Set the duration of all incomplete cues before adding new ones.
3019     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3020     for (AVMetadataItem *item in m_currentMetaData.get()) {
3021         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3022         if (start < earliestStartTime)
3023             earliestStartTime = start;
3024     }
3025     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3026
3027     for (AVMetadataItem *item in m_currentMetaData.get()) {
3028         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3029         MediaTime end = MediaTime::positiveInfiniteTime();
3030         if (CMTIME_IS_VALID(item.duration))
3031             end = start + PAL::toMediaTime(item.duration);
3032
3033         AtomString type = nullAtom();
3034         if (item.keySpace)
3035             type = metadataType(item.keySpace);
3036
3037         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3038     }
3039 #endif
3040 }
3041
3042 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(const RetainPtr<NSArray>& tracks)
3043 {
3044     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3045         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3046
3047     NSArray *assetTracks = [m_avAsset tracks];
3048
3049     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3050         AVAssetTrack* assetTrack = [obj assetTrack];
3051
3052         if ([assetTracks containsObject:assetTrack])
3053             return YES;
3054
3055         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3056         if (!hasLoadedMediaSelectionGroups())
3057             return NO;
3058
3059         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3060             return NO;
3061
3062         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3063             return NO;
3064
3065         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3066             return NO;
3067
3068         return YES;
3069     }]];
3070
3071     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3072         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3073
3074     m_cachedTotalBytes = 0;
3075
3076     tracksChanged();
3077     updateStates();
3078 }
3079
3080 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3081 {
3082     m_cachedHasEnabledAudio = hasEnabledAudio;
3083
3084     tracksChanged();
3085     updateStates();
3086 }
3087
3088 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3089 {
3090     m_cachedPresentationSize = size;
3091
3092     sizeChanged();
3093     updateStates();
3094 }
3095
3096 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3097 {
3098     m_cachedDuration = duration;
3099
3100     invalidateCachedDuration();
3101 }
3102
3103 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3104 {
3105     m_cachedRate = rate;
3106
3107     updateStates();
3108     rateChanged();
3109 }
3110
3111 void MediaPlayerPrivateAVFoundationObjC::timeControlStatusDidChange(int timeControlStatus)
3112 {
3113     if (m_cachedTimeControlStatus == timeControlStatus)
3114         return;
3115
3116     ALWAYS_LOG(LOGIDENTIFIER, static_cast<AVPlayerTimeControlStatus>(timeControlStatus), ", observing = ", m_shouldObserveTimeControlStatus);
3117
3118     if (!m_shouldObserveTimeControlStatus)
3119         return;
3120
3121     m_cachedTimeControlStatus = timeControlStatus;
3122
3123 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3124     if (!isCurrentPlaybackTargetWireless())
3125         return;
3126
3127     bool playerIsPlaying = m_cachedTimeControlStatus != AVPlayerTimeControlStatusPaused;
3128     if (playerIsPlaying != m_requestedPlaying) {
3129         m_requestedPlaying = playerIsPlaying;
3130         player()->playbackStateChanged();
3131     }
3132 #endif
3133 }
3134
3135 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3136
3137 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3138 {
3139     playbackTargetIsWirelessChanged();
3140 }
3141
3142 #endif
3143
3144 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3145 {
3146     m_cachedCanPlayFastForward = newValue;
3147 }
3148
3149 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3150 {
3151     m_cachedCanPlayFastReverse = newValue;
3152 }
3153
3154 void MediaPlayerPrivateAVFoundationObjC::setShouldDisableSleep(bool flag)
3155 {
3156 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(MACCATALYST)
3157     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
3158     [m_avPlayer _setPreventsSleepDuringVideoPlayback:flag];
3159     ALLOW_DEPRECATED_DECLARATIONS_END
3160 #else
3161     UNUSED_PARAM(flag);
3162 #endif
3163 }
3164
3165 Optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateAVFoundationObjC::videoPlaybackQualityMetrics()
3166 {
3167     if (![m_videoLayer respondsToSelector:@selector(videoPerformanceMetrics)])
3168         return WTF::nullopt;
3169
3170 #if PLATFORM(WATCHOS)
3171     return WTF::nullopt;
3172 #else
3173     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
3174
3175     auto metrics = [m_videoLayer videoPerformanceMetrics];
3176     if (!metrics)
3177         return WTF::nullopt;
3178
3179     uint32_t displayCompositedFrames = 0;
3180     if ([metrics respondsToSelector:@selector(numberOfDisplayCompositedVideoFrames)])
3181         displayCompositedFrames = [metrics numberOfDisplayCompositedVideoFrames];
3182
3183     return VideoPlaybackQualityMetrics {
3184         static_cast<uint32_t>([metrics totalNumberOfVideoFrames]),
3185         static_cast<uint32_t>([metrics numberOfDroppedVideoFrames]),
3186         static_cast<uint32_t>([metrics numberOfCorruptedVideoFrames]),
3187         [metrics totalFrameDelay],
3188         displayCompositedFrames,
3189     };
3190
3191     ALLOW_NEW_API_WITHOUT_GUARDS_END
3192 #endif
3193 }
3194
3195 bool MediaPlayerPrivateAVFoundationObjC::performTaskAtMediaTime(WTF::Function<void()>&& task, MediaTime time)
3196 {
3197     if (!m_avPlayer)
3198         return false;
3199
3200     __block WTF::Function<void()> taskIn = WTFMove(task);
3201
3202     if (m_timeObserver)
3203         [m_avPlayer removeTimeObserver:m_timeObserver.get()];
3204
3205     m_timeObserver = [m_avPlayer addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:toCMTime(time)]] queue:dispatch_get_main_queue() usingBlock:^{
3206         taskIn();
3207     }];
3208     return true;
3209 }
3210
3211 void MediaPlayerPrivateAVFoundationObjC::setShouldObserveTimeControlStatus(bool shouldObserve)
3212 {
3213     if (shouldObserve == m_shouldObserveTimeControlStatus)
3214         return;
3215
3216     m_shouldObserveTimeControlStatus = shouldObserve;
3217     if (shouldObserve) {
3218         [m_avPlayer addObserver:m_objcObserver.get() forKeyPath:@"timeControlStatus" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
3219         timeControlStatusDidChange(m_avPlayer.get().timeControlStatus);
3220     } else {
3221 BEGIN_BLOCK_OBJC_EXCEPTIONS
3222         [m_avPlayer removeObserver:m_objcObserver.get() forKeyPath:@"timeControlStatus"];
3223 END_BLOCK_OBJC_EXCEPTIONS
3224     }
3225 }
3226
3227 NSArray* assetMetadataKeyNames()
3228 {
3229     static NSArray* keys = [[NSArray alloc] initWithObjects:
3230         @"duration",
3231         @"naturalSize",
3232         @"preferredTransform",
3233         @"preferredVolume",
3234         @"preferredRate",
3235         @"playable",
3236         @"resolvedURL",
3237         @"tracks",
3238         @"availableMediaCharacteristicsWithMediaSelectionOptions",
3239     nil];
3240     return keys;
3241 }
3242
3243 NSArray* itemKVOProperties()
3244 {
3245     static NSArray* keys = [[NSArray alloc] initWithObjects:
3246         @"presentationSize",
3247         @"status",
3248         @"asset",
3249         @"tracks",
3250         @"seekableTimeRanges",
3251         @"loadedTimeRanges",
3252         @"playbackLikelyToKeepUp",
3253         @"playbackBufferFull",
3254         @"playbackBufferEmpty",
3255         @"duration",
3256         @"hasEnabledAudio",
3257         @"timedMetadata",
3258         @"canPlayFastForward",
3259         @"canPlayFastReverse",
3260     nil];
3261     return keys;
3262 }
3263
3264 NSArray* assetTrackMetadataKeyNames()
3265 {
3266     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3267     return keys;
3268 }
3269
3270 NSArray* playerKVOProperties()
3271 {
3272     static NSArray* keys = [[NSArray alloc] initWithObjects:
3273         @"rate",
3274 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3275         @"externalPlaybackActive",
3276         @"allowsExternalPlayback",
3277 #endif
3278 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3279         @"outputObscuredDueToInsufficientExternalProtection",
3280 #endif
3281     nil];
3282     return keys;
3283 }
3284 } // namespace WebCore
3285
3286 @implementation WebCoreAVFMovieObserver
3287
3288 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player
3289 {
3290     self = [super init];
3291     if (!self)
3292         return nil;
3293     m_player = WTFMove(player);
3294     return self;
3295 }
3296
3297 - (void)disconnect
3298 {
3299     m_player = nullptr;
3300 }
3301
3302 - (void)metadataLoaded
3303 {
3304     ensureOnMainThread([self, strongSelf = retainPtr(self)] {
3305         m_taskQueue.enqueueTask([player = m_player] {
3306             if (player)
3307                 player->metadataLoaded();
3308         });
3309     });
3310 }
3311
3312 - (void)didEnd:(NSNotification *)unusedNotification
3313 {
3314     UNUSED_PARAM(unusedNotification);
3315     ensureOnMainThread([self, strongSelf = retainPtr(self)] {
3316         m_taskQueue.enqueueTask([player = m_player] {
3317             if (player)
3318                 player->didEnd();
3319         });
3320     });
3321 }
3322
3323 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3324 {
3325     ensureOnMainThread([self, strongSelf = retainPtr(self), keyPath = retainPtr(keyPath), change = retainPtr(change), object = retainPtr(object), context]() mutable {
3326         m_taskQueue.enqueueTask([player = m_player, keyPath = WTFMove(keyPath), change = WTFMove(change), object = WTFMove(object), context] {
3327             if (!player)
3328                 return;
3329             id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3330             bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3331             bool shouldLogValue = !willChange;
3332
3333             if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3334                 if ([keyPath isEqualToString:@"readyForDisplay"])
3335                     player->firstFrameAvailableDidChange([newValue boolValue]);
3336             }
3337
3338             if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3339                 if ([keyPath isEqualToString:@"enabled"])
3340                     player->trackEnabledDidChange([newValue boolValue]);
3341             }
3342
3343             if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3344                 if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3345                     player->playbackLikelyToKeepUpWillChange();
3346                 else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3347                     player->playbackBufferEmptyWillChange();
3348                 else if ([keyPath isEqualToString:@"playbackBufferFull"])
3349                     player->playbackBufferFullWillChange();
3350             }
3351
3352             if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3353                 // A value changed for an AVPlayerItem
3354                 if ([keyPath isEqualToString:@"status"])
3355                     player->playerItemStatusDidChange([newValue intValue]);
3356                 else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3357                     player->playbackLikelyToKeepUpDidChange([newValue boolValue]);
3358                 else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3359                     player->playbackBufferEmptyDidChange([newValue boolValue]);
3360                 else if ([keyPath isEqualToString:@"playbackBufferFull"])
3361                     player->playbackBufferFullDidChange([newValue boolValue]);
3362                 else if ([keyPath isEqualToString:@"asset"]) {
3363                     player->setAsset(RetainPtr<id>(newValue));
3364                     shouldLogValue = false;
3365                 } else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3366                     player->loadedTimeRangesDidChange(RetainPtr<NSArray>(newValue));
3367                 else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3368                     player->seekableTimeRangesDidChange(RetainPtr<NSArray>(newValue));
3369                 else if ([keyPath isEqualToString:@"tracks"]) {
3370                     player->tracksDidChange(RetainPtr<NSArray>(newValue));
3371                     shouldLogValue = false;
3372                 } else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3373                     player->hasEnabledAudioDidChange([newValue boolValue]);
3374                 else if ([keyPath isEqualToString:@"presentationSize"])
3375                     player->presentationSizeDidChange(FloatSize([newValue sizeValue]));
3376                 else if ([keyPath isEqualToString:@"duration"])
3377                     player->durationDidChange(PAL::toMediaTime([newValue CMTimeValue]));
3378                 else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3379                     MediaTime now;
3380                     CMTime itemTime = [(AVPlayerItem *)object.get() currentTime];
3381                     if (CMTIME_IS_NUMERIC(itemTime))
3382                         now = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3383                     player->metadataDidArrive(RetainPtr<NSArray>(newValue), now);
3384                     shouldLogValue = false;
3385                 } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3386                     player->canPlayFastReverseDidChange([newValue boolValue]);
3387                 else if ([keyPath isEqualToString:@"canPlayFastForward"])
3388                     player->canPlayFastForwardDidChange([newValue boolValue]);
3389             }
3390
3391             if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3392                 // A value changed for an AVPlayer.
3393                 if ([keyPath isEqualToString:@"rate"])
3394                     player->rateDidChange([newValue doubleValue]);
3395                 else if ([keyPath isEqualToString:@"timeControlStatus"])
3396                     player->timeControlStatusDidChange([newValue intValue]);
3397 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3398                 else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3399                     player->playbackTargetIsWirelessDidChange();
3400 #endif
3401 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3402                 else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3403                     player->outputObscuredDueToInsufficientExternalProtectionChanged([newValue boolValue]);
3404 #endif
3405             }
3406
3407 #if !RELEASE_LOG_DISABLED
3408             if (player->logger().willLog(player->logChannel(), WTFLogLevel::Debug) && !([keyPath isEqualToString:@"loadedTimeRanges"] || [keyPath isEqualToString:@"seekableTimeRanges"])) {
3409                 auto identifier = Logger::LogSiteIdentifier("MediaPlayerPrivateAVFoundation", "observeValueForKeyPath", player->logIdentifier());
3410
3411                 if (shouldLogValue) {
3412                     if ([keyPath isEqualToString:@"duration"])
3413                         player->logger().debug(player->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", PAL::toMediaTime([newValue CMTimeValue]));
3414                     else {
3415                         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3416                         player->logger().debug(player->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", [valueString.get() UTF8String]);
3417                     }
3418                 } else
3419                     player->logger().debug(player->logChannel(), identifier, willChange ? "will" : "did", " change '", [keyPath UTF8String], "'");
3420             }
3421 #endif
3422         });
3423     });
3424 }
3425
3426 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3427 {
3428     UNUSED_PARAM(output);
3429
3430     ensureOnMainThread([self, strongSelf = retainPtr(self), strings = retainPtr(strings), nativeSamples = retainPtr(nativeSamples), itemTime]() mutable {
3431         m_taskQueue.enqueueTask([player = m_player, strings = WTFMove(strings), nativeSamples = WTFMove(nativeSamples), itemTime] {
3432             if (!player)
3433                 return;
3434             MediaTime time = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3435             player->processCue(strings.get(), nativeSamples.get(), time);
3436         });
3437     });
3438 }
3439
3440 - (void)outputSequenceWasFlushed:(id)output
3441 {
3442     UNUSED_PARAM(output);
3443
3444     ensureOnMainThread([self, strongSelf = retainPtr(self)] {
3445         m_taskQueue.enqueueTask([player = m_player] {
3446             if (player)
3447                 player->flushCues();
3448         });
3449     });
3450 }
3451
3452 @end
3453
3454 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3455
3456 @implementation WebCoreAVFLoaderDelegate
3457
3458 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player
3459 {
3460     self = [super init];
3461     if (!self)
3462         return nil;
3463     m_player = WTFMove(player);
3464     return self;
3465 }
3466
3467 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3468 {
3469     UNUSED_PARAM(resourceLoader);
3470     if (!m_player)
3471         return NO;
3472
3473     ensureOnMainThread([self, strongSelf = retainPtr(self), loadingRequest = retainPtr(loadingRequest)]() mutable {
3474         m_taskQueue.enqueueTask([player = m_player, loadingRequest = WTFMove(loadingRequest)] {
3475             if (!player) {
3476                 [loadingRequest finishLoadingWithError:nil];
3477                 return;
3478             }
3479
3480             if (!player->shouldWaitForLoadingOfResource(loadingRequest.get()))
3481                 [loadingRequest finishLoadingWithError:nil];
3482         });
3483     });
3484
3485     return YES;
3486 }
3487
3488 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3489 {
3490     UNUSED_PARAM(resourceLoader);
3491     UNUSED_PARAM(challenge);
3492     ASSERT_NOT_REACHED();
3493     return NO;
3494 }
3495
3496 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3497 {
3498     UNUSED_PARAM(resourceLoader);
3499     ensureOnMainThread([self, strongSelf = retainPtr(self), loadingRequest = retainPtr(loadingRequest)]() mutable {
3500         m_taskQueue.enqueueTask([player = m_player, loadingRequest = WTFMove(loadingRequest)] {
3501             if (player)
3502                 player->didCancelLoadingRequest(loadingRequest.get());
3503         });
3504     });
3505 }
3506
3507 @end
3508
3509 #endif
3510
3511 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3512
3513 @implementation WebCoreAVFPullDelegate
3514
3515 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player
3516 {
3517     self = [super init];
3518     if (self)
3519         m_player = WTFMove(player);
3520     return self;
3521 }
3522
3523 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3524 {
3525     if (m_player)
3526         m_player->outputMediaDataWillChange(output);
3527 }
3528
3529 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3530 {
3531     UNUSED_PARAM(output);
3532     // No-op.
3533 }
3534
3535 @end
3536
3537 #endif
3538
3539 #endif