2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "VideoFullscreenLayerManagerObjC.h"
63 #import "VideoTextureCopierCV.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import <JavaScriptCore/DataView.h>
69 #import <JavaScriptCore/JSCInlines.h>
70 #import <JavaScriptCore/TypedArrayInlines.h>
71 #import <JavaScriptCore/Uint16Array.h>
72 #import <JavaScriptCore/Uint32Array.h>
73 #import <JavaScriptCore/Uint8Array.h>
75 #import <objc/runtime.h>
76 #import <pal/avfoundation/MediaTimeAVFoundation.h>
77 #import <pal/spi/cocoa/QuartzCoreSPI.h>
78 #import <pal/spi/mac/AVFoundationSPI.h>
79 #import <wtf/BlockObjCExceptions.h>
80 #import <wtf/ListHashSet.h>
81 #import <wtf/NeverDestroyed.h>
82 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
101 #if PLATFORM(IOS_FAMILY)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #import <pal/ios/UIKitSoftLink.h>
108 #import <Foundation/NSGeometry.h>
109 #import <QuartzCore/CoreImage.h>
112 #if USE(VIDEOTOOLBOX)
113 #import <CoreVideo/CoreVideo.h>
114 #import <VideoToolbox/VideoToolbox.h>
117 #import "CoreVideoSoftLink.h"
118 #import "MediaRemoteSoftLink.h"
121 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
122 typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
126 #if ENABLE(AVF_CAPTIONS)
127 // Note: This must be defined before our SOFT_LINK macros:
128 @class AVMediaSelectionOption;
129 @interface AVMediaSelectionOption (OutOfBandExtensions)
130 @property (nonatomic, readonly) NSString* outOfBandSource;
131 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
135 @interface AVURLAsset (WebKitExtensions)
136 @property (nonatomic, readonly) NSURL *resolvedURL;
139 typedef AVPlayer AVPlayerType;
140 typedef AVPlayerItem AVPlayerItemType;
141 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
142 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
143 typedef AVMetadataItem AVMetadataItemType;
144 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
145 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
146 typedef AVAssetCache AVAssetCacheType;
148 #pragma mark - Soft Linking
150 // Soft-linking headers must be included last since they #define functions, constants, etc.
151 #import <pal/cf/CoreMediaSoftLink.h>
153 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
155 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
166 SOFT_LINK_CLASS(CoreImage, CIContext)
167 SOFT_LINK_CLASS(CoreImage, CIImage)
169 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString *)
170 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString *)
171 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString *)
172 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString *)
173 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeClosedCaption, NSString *)
174 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
175 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeAudio, NSString *)
176 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeMetadata, NSString *)
177 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
178 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
179 SOFT_LINK_CONSTANT(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
180 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
181 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
182 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
183 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
184 SOFT_LINK_CONSTANT(AVFoundation, AVStreamingKeyDeliveryContentKeyType, NSString *)
186 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
187 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
189 #define AVPlayer initAVPlayer()
190 #define AVPlayerItem initAVPlayerItem()
191 #define AVPlayerLayer initAVPlayerLayer()
192 #define AVURLAsset initAVURLAsset()
193 #define AVAssetImageGenerator initAVAssetImageGenerator()
194 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
195 #define AVMetadataItem initAVMetadataItem()
196 #define AVAssetCache initAVAssetCache()
198 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
199 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
200 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
201 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
202 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
203 #define AVMediaTypeVideo getAVMediaTypeVideo()
204 #define AVMediaTypeAudio getAVMediaTypeAudio()
205 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
206 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
207 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
208 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
209 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
210 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
211 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
212 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
213 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
214 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
215 #define AVStreamingKeyDeliveryContentKeyType getAVStreamingKeyDeliveryContentKeyType()
217 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
219 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
220 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
222 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
224 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
225 SOFT_LINK_CLASS(AVFoundation, AVOutputContext)
227 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString *)
228 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeSubtitle, NSString *)
229 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
230 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
232 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
233 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
234 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
235 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
236 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
237 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
238 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
242 #if ENABLE(AVF_CAPTIONS)
244 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetCacheKey, NSString *)
245 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString *)
246 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
247 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString *)
248 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString *)
249 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString *)
250 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString *)
251 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString *)
252 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString *)
253 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString *)
254 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString *)
255 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString *)
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
259 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
260 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
261 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
262 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
263 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
264 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
265 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
266 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
267 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
268 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
272 #if ENABLE(DATACUE_VALUE)
274 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString *)
275 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString *)
276 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString *)
277 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceiTunes, NSString *)
278 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceID3, NSString *)
280 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
281 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
282 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
283 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
284 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
288 #if PLATFORM(IOS_FAMILY)
290 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
292 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetHTTPCookiesKey, NSString *)
293 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
295 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
296 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
297 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
298 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
302 SOFT_LINK_FRAMEWORK(MediaToolbox)
303 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
305 #if PLATFORM(IOS_FAMILY)
308 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
309 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
310 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
311 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
312 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
313 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
314 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
315 #endif // HAVE(CELESTIAL)
317 #endif // PLATFORM(IOS_FAMILY)
319 using namespace WebCore;
321 enum MediaPlayerAVFoundationObservationContext {
322 MediaPlayerAVFoundationObservationContextPlayerItem,
323 MediaPlayerAVFoundationObservationContextPlayerItemTrack,
324 MediaPlayerAVFoundationObservationContextPlayer,
325 MediaPlayerAVFoundationObservationContextAVPlayerLayer,
328 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
329 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
331 @interface WebCoreAVFMovieObserver : NSObject
334 WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
335 GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
336 int m_delayCallbacks;
338 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
340 -(void)metadataLoaded;
341 -(void)didEnd:(NSNotification *)notification;
342 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
343 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
344 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
345 - (void)outputSequenceWasFlushed:(id)output;
349 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
350 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
351 WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
352 GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
354 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
355 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
359 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
360 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
361 WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
363 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
364 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
365 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
372 static NSArray *assetMetadataKeyNames();
373 static NSArray *itemKVOProperties();
374 static NSArray *assetTrackMetadataKeyNames();
375 static NSArray *playerKVOProperties();
376 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
378 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
379 static dispatch_queue_t globalLoaderDelegateQueue()
381 static dispatch_queue_t globalQueue;
382 static dispatch_once_t onceToken;
383 dispatch_once(&onceToken, ^{
384 globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
390 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
391 static dispatch_queue_t globalPullDelegateQueue()
393 static dispatch_queue_t globalQueue;
394 static dispatch_once_t onceToken;
395 dispatch_once(&onceToken, ^{
396 globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
402 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
407 registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
408 getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
409 ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
412 static AVAssetCacheType *assetCacheForPath(const String& path)
414 NSURL *assetCacheURL;
417 assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
419 assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
421 return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
424 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
426 HashSet<RefPtr<SecurityOrigin>> origins;
427 for (NSString *key in [assetCacheForPath(path) allKeys]) {
428 URL keyAsURL = URL(URL(), key);
429 if (keyAsURL.isValid())
430 origins.add(SecurityOrigin::create(keyAsURL));
435 static WallTime toSystemClockTime(NSDate *date)
438 return WallTime::fromRawSeconds(date.timeIntervalSince1970);
441 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
443 AVAssetCacheType* assetCache = assetCacheForPath(path);
445 for (NSString *key in [assetCache allKeys]) {
446 if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
447 [assetCache removeEntryForKey:key];
450 NSFileManager *fileManager = [NSFileManager defaultManager];
451 NSURL *baseURL = [assetCache URL];
453 if (modifiedSince <= WallTime::fromRawSeconds(0)) {
454 [fileManager removeItemAtURL:baseURL error:nil];
458 NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
459 NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
460 propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
463 RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
464 for (NSURL *fileURL : enumerator) {
465 NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
467 if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
470 if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
473 if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
476 [urlsToDelete addObject:fileURL];
479 for (NSURL *fileURL in urlsToDelete.get())
480 [fileManager removeItemAtURL:fileURL error:nil];
483 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
485 AVAssetCacheType* assetCache = assetCacheForPath(path);
486 for (NSString *key in [assetCache allKeys]) {
487 URL keyAsURL = URL(URL(), key);
488 if (keyAsURL.isValid()) {
489 if (origins.contains(SecurityOrigin::create(keyAsURL)))
490 [assetCache removeEntryForKey:key];
495 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
496 : MediaPlayerPrivateAVFoundation(player)
497 , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
498 , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
499 , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
500 , m_videoFrameHasDrawn(false)
501 , m_haveCheckedPlayability(false)
502 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
503 , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
505 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
506 , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
508 , m_currentTextTrack(0)
510 , m_cachedTotalBytes(0)
511 , m_pendingStatusChanges(0)
512 , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
513 , m_cachedLikelyToKeepUp(false)
514 , m_cachedBufferEmpty(false)
515 , m_cachedBufferFull(false)
516 , m_cachedHasEnabledAudio(false)
517 , m_shouldBufferData(true)
518 , m_cachedIsReadyForDisplay(false)
519 , m_haveBeenAskedToCreateLayer(false)
520 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
521 , m_allowsWirelessVideoPlayback(true)
526 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
528 m_weakPtrFactory.revokeAll();
530 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
531 [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
533 for (auto& pair : m_resourceLoaderMap)
534 pair.value->invalidate();
536 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
537 [m_videoOutput setDelegate:nil queue:0];
546 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
548 INFO_LOG(LOGIDENTIFIER);
549 tearDownVideoRendering();
551 [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
552 [m_objcObserver.get() disconnect];
554 // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
555 setIgnoreLoadStateChanges(true);
557 [m_avAsset.get() cancelLoading];
563 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
564 if (m_legibleOutput) {
566 [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
567 m_legibleOutput = nil;
571 if (m_avPlayerItem) {
572 for (NSString *keyName in itemKVOProperties())
573 [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
575 m_avPlayerItem = nil;
579 [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
580 m_timeObserver = nil;
582 for (NSString *keyName in playerKVOProperties())
583 [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
585 setShouldObserveTimeControlStatus(false);
587 [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
588 #if !PLATFORM(IOS_FAMILY)
589 [m_avPlayer setOutputContext:nil];
594 // Reset cached properties
595 m_pendingStatusChanges = 0;
596 m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
597 m_cachedSeekableRanges = nullptr;
598 m_cachedLoadedRanges = nullptr;
599 m_cachedHasEnabledAudio = false;
600 m_cachedPresentationSize = FloatSize();
601 m_cachedDuration = MediaTime::zeroTime();
603 for (AVPlayerItemTrack *track in m_cachedTracks.get())
604 [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
605 m_cachedTracks = nullptr;
607 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
609 m_provider->setPlayerItem(nullptr);
610 m_provider->setAudioTrack(nullptr);
614 setIgnoreLoadStateChanges(false);
617 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
619 return m_haveBeenAskedToCreateLayer;
622 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
624 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
628 return m_imageGenerator;
631 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
633 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
636 createImageGenerator();
640 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
643 INFO_LOG(LOGIDENTIFIER);
645 if (!m_avAsset || m_imageGenerator)
648 m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
650 [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
651 [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
652 [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
653 [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
656 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
658 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
659 destroyVideoOutput();
661 destroyImageGenerator();
664 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
666 if (!m_imageGenerator)
669 INFO_LOG(LOGIDENTIFIER);
671 m_imageGenerator = 0;
674 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
676 if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
679 callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
683 if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
685 m_haveBeenAskedToCreateLayer = true;
688 createAVPlayerLayer();
690 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
695 player()->client().mediaPlayerRenderingModeChanged(player());
699 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
704 m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
705 [m_videoLayer setPlayer:m_avPlayer.get()];
708 [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
710 [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
711 updateVideoLayerGravity();
712 [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
713 IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
714 INFO_LOG(LOGIDENTIFIER);
716 m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
718 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
719 if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
720 [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
724 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
729 INFO_LOG(LOGIDENTIFIER);
731 [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
732 [m_videoLayer setPlayer:nil];
733 m_videoFullscreenLayerManager->didDestroyVideoLayer();
738 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
740 // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
741 double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
743 // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
745 return MediaTime::invalidTime();
747 double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
749 // Rounding due to second offset error when subtracting.
750 return MediaTime::createWithDouble(round(date - currentTime));
753 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
755 if (currentRenderingMode() == MediaRenderingToLayer)
756 return m_cachedIsReadyForDisplay;
758 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
759 if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
763 return m_videoFrameHasDrawn;
766 #if ENABLE(AVF_CAPTIONS)
767 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
769 static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
770 if (manualSelectionMode)
771 return @[ AVMediaCharacteristicIsAuxiliaryContent ];
773 // FIXME: Match these to correct types:
774 if (kind == PlatformTextTrack::Caption)
775 return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
777 if (kind == PlatformTextTrack::Subtitle)
778 return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
780 if (kind == PlatformTextTrack::Description)
781 return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
783 if (kind == PlatformTextTrack::Forced)
784 return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
786 return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
789 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
794 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
796 const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
798 for (auto& textTrack : m_textTracks) {
799 if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
802 RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
803 RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
805 for (auto& track : outOfBandTrackSources) {
806 RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
808 if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
811 InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
812 if (track->mode() == PlatformTextTrack::Hidden)
813 mode = InbandTextTrackPrivate::Hidden;
814 else if (track->mode() == PlatformTextTrack::Disabled)
815 mode = InbandTextTrackPrivate::Disabled;
816 else if (track->mode() == PlatformTextTrack::Showing)
817 mode = InbandTextTrackPrivate::Showing;
819 textTrack->setMode(mode);
827 static NSURL *canonicalURL(const URL& url)
829 NSURL *cocoaURL = url;
833 RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
837 NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
838 if (!canonicalRequest)
841 return [canonicalRequest URL];
844 #if PLATFORM(IOS_FAMILY)
845 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
847 RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
848 [properties setDictionary:@{
849 NSHTTPCookieName: cookie.name,
850 NSHTTPCookieValue: cookie.value,
851 NSHTTPCookieDomain: cookie.domain,
852 NSHTTPCookiePath: cookie.path,
853 NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
856 [properties setObject:@YES forKey:NSHTTPCookieSecure];
858 [properties setObject:@YES forKey:NSHTTPCookieDiscard];
860 return [NSHTTPCookie cookieWithProperties:properties.get()];
864 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
869 INFO_LOG(LOGIDENTIFIER);
871 setDelayCallbacks(true);
873 RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);
875 [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
877 RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
879 String referrer = player()->referrer();
880 if (!referrer.isEmpty())
881 [headerFields.get() setObject:referrer forKey:@"Referer"];
883 String userAgent = player()->userAgent();
884 if (!userAgent.isEmpty())
885 [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
887 if ([headerFields.get() count])
888 [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
890 if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
891 [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
893 if (canLoadAVURLAssetUseClientURLLoadingExclusively())
894 [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
895 #if PLATFORM(IOS_FAMILY)
896 else if (canLoadAVURLAssetRequiresCustomURLLoadingKey())
897 [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
898 // FIXME: rdar://problem/20354688
899 String identifier = player()->sourceApplicationIdentifier();
900 if (!identifier.isEmpty() && canLoadAVURLAssetClientBundleIdentifierKey())
901 [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
904 auto type = player()->contentMIMEType();
905 if (canLoadAVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
906 auto codecs = player()->contentTypeCodecs();
907 if (!codecs.isEmpty()) {
908 NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
909 [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
911 [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
914 #if ENABLE(AVF_CAPTIONS)
915 const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
916 if (!outOfBandTrackSources.isEmpty()) {
917 RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
918 for (auto& trackSource : outOfBandTrackSources) {
919 RetainPtr<CFStringRef> label = trackSource->label().createCFString();
920 RetainPtr<CFStringRef> language = trackSource->language().createCFString();
921 RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
922 RetainPtr<CFStringRef> url = trackSource->url().createCFString();
923 [outOfBandTracks.get() addObject:@{
924 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
925 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
926 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
927 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
928 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
929 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
933 [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
937 #if PLATFORM(IOS_FAMILY)
938 String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
939 if (!networkInterfaceName.isEmpty())
940 [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
943 #if PLATFORM(IOS_FAMILY)
944 Vector<Cookie> cookies;
945 if (player()->getRawCookies(url, cookies)) {
946 RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
947 for (auto& cookie : cookies)
948 [nsCookies addObject:toNSHTTPCookie(cookie)];
950 if (canLoadAVURLAssetHTTPCookiesKey())
951 [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
955 bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
956 [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
958 if (usePersistentCache)
959 [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
961 NSURL *cocoaURL = canonicalURL(url);
962 m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
964 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
965 AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
966 [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
968 if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
969 && [resourceLoader respondsToSelector:@selector(setURLSession:)]
970 && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
971 && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
972 RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
973 if (mediaResourceLoader)
974 resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
979 m_haveCheckedPlayability = false;
981 setDelayCallbacks(false);
984 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
989 if (pthread_main_np()) {
990 [m_avPlayer replaceCurrentItemWithPlayerItem:item];
994 RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
995 RetainPtr<AVPlayerItemType> strongItem = item;
996 dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
997 [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1001 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1006 INFO_LOG(LOGIDENTIFIER);
1008 setDelayCallbacks(true);
1010 m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1011 for (NSString *keyName in playerKVOProperties())
1012 [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1014 setShouldObserveTimeControlStatus(true);
1016 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1017 [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1020 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1021 updateDisableExternalPlayback();
1022 [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1025 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
1026 if (m_shouldPlayToPlaybackTarget) {
1027 // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1028 m_shouldPlayToPlaybackTarget = false;
1029 setShouldPlayToPlaybackTarget(true);
1033 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(IOSMAC)
1034 setShouldDisableSleep(player()->shouldDisableSleep());
1038 // Clear m_muted so setMuted doesn't return without doing anything.
1040 [m_avPlayer.get() setMuted:m_muted];
1043 if (player()->client().mediaPlayerIsVideo())
1044 createAVPlayerLayer();
1047 setAVPlayerItem(m_avPlayerItem.get());
1049 setDelayCallbacks(false);
1052 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1057 INFO_LOG(LOGIDENTIFIER);
1059 setDelayCallbacks(true);
1061 // Create the player item so we can load media data.
1062 m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1064 [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1066 NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1067 for (NSString *keyName in itemKVOProperties())
1068 [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1070 [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1073 setAVPlayerItem(m_avPlayerItem.get());
1075 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1076 const NSTimeInterval legibleOutputAdvanceInterval = 2;
1078 RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1079 m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1080 [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1082 [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1083 [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1084 [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1085 [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1088 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1090 m_provider->setPlayerItem(m_avPlayerItem.get());
1091 m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1095 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1096 createVideoOutput();
1099 setDelayCallbacks(false);
1102 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1104 if (m_haveCheckedPlayability)
1106 m_haveCheckedPlayability = true;
1108 INFO_LOG(LOGIDENTIFIER);
1109 auto weakThis = makeWeakPtr(*this);
1111 [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1112 callOnMainThread([weakThis] {
1114 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1119 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1121 INFO_LOG(LOGIDENTIFIER);
1123 OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1124 dispatch_group_enter(metadataLoadingGroup.get());
1125 auto weakThis = makeWeakPtr(*this);
1126 [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1128 callOnMainThread([weakThis, metadataLoadingGroup] {
1129 if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1130 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1131 dispatch_group_enter(metadataLoadingGroup.get());
1132 [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1133 dispatch_group_leave(metadataLoadingGroup.get());
1137 dispatch_group_leave(metadataLoadingGroup.get());
1141 dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1142 callOnMainThread([weakThis] {
1144 [weakThis->m_objcObserver.get() metadataLoaded];
1149 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1151 if (!m_avPlayerItem)
1152 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1154 if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1155 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1156 if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1157 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1158 if (m_cachedLikelyToKeepUp)
1159 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1160 if (m_cachedBufferFull)
1161 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1162 if (m_cachedBufferEmpty)
1163 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1165 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1168 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1170 return m_videoFullscreenLayerManager->videoInlineLayer();
1173 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1175 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1176 updateLastImage(UpdateType::UpdateSynchronously);
1177 m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1181 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1183 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1184 updateLastImage(UpdateType::UpdateSynchronously);
1185 m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1187 m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1189 updateDisableExternalPlayback();
1192 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1194 m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1197 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1199 m_videoFullscreenGravity = gravity;
1204 NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1205 if (gravity == MediaPlayer::VideoGravityResize)
1206 videoGravity = AVLayerVideoGravityResize;
1207 else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1208 videoGravity = AVLayerVideoGravityResizeAspect;
1209 else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1210 videoGravity = AVLayerVideoGravityResizeAspectFill;
1212 ASSERT_NOT_REACHED();
1214 if ([m_videoLayer videoGravity] == videoGravity)
1217 [m_videoLayer setVideoGravity:videoGravity];
1218 syncTextTrackBounds();
1221 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1223 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1224 if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1225 [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1226 updateDisableExternalPlayback();
1232 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1234 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1235 updateDisableExternalPlayback();
1239 #if PLATFORM(IOS_FAMILY)
1240 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1242 if (m_currentMetaData)
1243 return m_currentMetaData.get();
1247 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1249 if (!m_avPlayerItem)
1250 return emptyString();
1252 AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1253 RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1255 return logString.get();
1258 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1260 if (!m_avPlayerItem)
1261 return emptyString();
1263 AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1264 RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1266 return logString.get();
1270 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1272 m_requestedPlaying = false;
1273 MediaPlayerPrivateAVFoundation::didEnd();
1276 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1278 [CATransaction begin];
1279 [CATransaction setDisableActions:YES];
1281 [m_videoLayer.get() setHidden:!isVisible];
1282 [CATransaction commit];
1285 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1287 INFO_LOG(LOGIDENTIFIER);
1288 if (!metaDataAvailable())
1291 m_requestedPlaying = true;
1292 setPlayerRate(m_requestedRate);
1295 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1297 INFO_LOG(LOGIDENTIFIER);
1298 if (!metaDataAvailable())
1301 m_requestedPlaying = false;
1305 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1307 return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1310 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1312 // Do not ask the asset for duration before it has been loaded or it will fetch the
1313 // answer synchronously.
1314 if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1315 return MediaTime::invalidTime();
1319 // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1320 if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1321 cmDuration = [m_avPlayerItem.get() duration];
1323 cmDuration = [m_avAsset.get() duration];
1325 if (CMTIME_IS_NUMERIC(cmDuration))
1326 return PAL::toMediaTime(cmDuration);
1328 if (CMTIME_IS_INDEFINITE(cmDuration))
1329 return MediaTime::positiveInfiniteTime();
1331 INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1332 return MediaTime::invalidTime();
1335 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1337 if (!metaDataAvailable() || !m_avPlayerItem)
1338 return MediaTime::zeroTime();
1340 CMTime itemTime = [m_avPlayerItem.get() currentTime];
1341 if (CMTIME_IS_NUMERIC(itemTime))
1342 return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1344 return MediaTime::zeroTime();
1347 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1349 // setCurrentTime generates several event callbacks, update afterwards.
1350 setDelayCallbacks(true);
1352 if (m_metadataTrack)
1353 m_metadataTrack->flushPartialCues();
1355 CMTime cmTime = PAL::toCMTime(time);
1356 CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1357 CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1359 // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1360 if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1361 cmBefore = kCMTimeZero;
1363 auto weakThis = makeWeakPtr(*this);
1365 setShouldObserveTimeControlStatus(false);
1366 [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1367 callOnMainThread([weakThis, finished] {
1368 auto _this = weakThis.get();
1372 _this->setShouldObserveTimeControlStatus(true);
1373 _this->seekCompleted(finished);
1377 setDelayCallbacks(false);
1380 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1382 #if PLATFORM(IOS_FAMILY)
1383 if ([[PAL::getUIDeviceClass() currentDevice] userInterfaceIdiom] != UIUserInterfaceIdiomPad)
1390 [m_avPlayer.get() setVolume:volume];
1393 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1395 if (m_muted == muted)
1398 INFO_LOG(LOGIDENTIFIER, "- ", muted);
1405 [m_avPlayer.get() setMuted:m_muted];
1408 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1410 UNUSED_PARAM(closedCaptionsVisible);
1412 if (!metaDataAvailable())
1415 INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1418 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1420 m_requestedRate = rate;
1421 if (m_requestedPlaying)
1422 setPlayerRate(rate);
1425 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1427 setDelayCallbacks(true);
1428 m_cachedRate = rate;
1429 setShouldObserveTimeControlStatus(false);
1430 [m_avPlayer setRate:rate];
1431 m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1432 setShouldObserveTimeControlStatus(true);
1433 setDelayCallbacks(false);
1436 double MediaPlayerPrivateAVFoundationObjC::rate() const
1438 if (!metaDataAvailable())
1441 return m_cachedRate;
1444 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1446 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1447 return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1453 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1455 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1456 return [m_avPlayerItem liveUpdateInterval];
1462 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1465 [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1468 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1470 auto timeRanges = std::make_unique<PlatformTimeRanges>();
1472 if (!m_avPlayerItem)
1475 for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1476 CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1477 if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1478 timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1483 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1485 if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1486 return MediaTime::zeroTime();
1488 MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1489 bool hasValidRange = false;
1490 for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1491 CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1492 if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1495 hasValidRange = true;
1496 MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1497 if (minTimeSeekable > startOfRange)
1498 minTimeSeekable = startOfRange;
1500 return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1503 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1505 if (!m_cachedSeekableRanges)
1506 m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1508 MediaTime maxTimeSeekable;
1509 for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1510 CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1511 if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1514 MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1515 if (maxTimeSeekable < endOfRange)
1516 maxTimeSeekable = endOfRange;
1518 return maxTimeSeekable;
1521 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1523 if (!m_cachedLoadedRanges)
1524 return MediaTime::zeroTime();
1526 MediaTime maxTimeLoaded;
1527 for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1528 CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1529 if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1532 MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1533 if (maxTimeLoaded < endOfRange)
1534 maxTimeLoaded = endOfRange;
1537 return maxTimeLoaded;
1540 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1542 if (!metaDataAvailable())
1545 if (m_cachedTotalBytes)
1546 return m_cachedTotalBytes;
1548 for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1549 m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1551 return m_cachedTotalBytes;
1554 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1556 m_avAsset = WTFMove(asset);
1559 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1562 return MediaPlayerAVAssetStatusDoesNotExist;
1564 for (NSString *keyName in assetMetadataKeyNames()) {
1565 NSError *error = nil;
1566 AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1569 ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1571 if (keyStatus < AVKeyValueStatusLoaded)
1572 return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1574 if (keyStatus == AVKeyValueStatusFailed)
1575 return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1577 if (keyStatus == AVKeyValueStatusCancelled)
1578 return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1581 if (!player()->shouldCheckHardwareSupport())
1582 m_tracksArePlayable = true;
1584 if (!m_tracksArePlayable) {
1585 m_tracksArePlayable = true;
1586 for (AVAssetTrack *track in [m_avAsset tracks]) {
1587 if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1588 m_tracksArePlayable = false;
1594 if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1595 return MediaPlayerAVAssetStatusPlayable;
1597 return MediaPlayerAVAssetStatusLoaded;
1600 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1605 NSError *error = nil;
1606 [m_avAsset statusOfValueForKey:@"playable" error:&error];
1607 return [error code];
1610 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1612 if (!metaDataAvailable() || context.paintingDisabled())
1615 setDelayCallbacks(true);
1616 BEGIN_BLOCK_OBJC_EXCEPTIONS;
1618 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1619 if (videoOutputHasAvailableFrame())
1620 paintWithVideoOutput(context, rect);
1623 paintWithImageGenerator(context, rect);
1625 END_BLOCK_OBJC_EXCEPTIONS;
1626 setDelayCallbacks(false);
1628 m_videoFrameHasDrawn = true;
1631 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1633 if (!metaDataAvailable() || context.paintingDisabled())
1636 // We can ignore the request if we are already rendering to a layer.
1637 if (currentRenderingMode() == MediaRenderingToLayer)
1640 // paint() is best effort, so only paint if we already have an image generator or video output available.
1641 if (!hasContextRenderer())
1644 paintCurrentFrameInContext(context, rect);
1647 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1649 INFO_LOG(LOGIDENTIFIER);
1651 RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1653 GraphicsContextStateSaver stateSaver(context);
1654 context.translate(rect.x(), rect.y() + rect.height());
1655 context.scale(FloatSize(1.0f, -1.0f));
1656 context.setImageInterpolationQuality(InterpolationLow);
1657 IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1658 CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1662 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1664 if (!m_imageGenerator)
1665 createImageGenerator();
1666 ASSERT(m_imageGenerator);
1668 #if !RELEASE_LOG_DISABLED
1669 MonotonicTime start = MonotonicTime::now();
1672 [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1673 RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1674 RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1676 #if !RELEASE_LOG_DISABLED
1677 DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1683 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1685 supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1688 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1689 static bool keySystemIsSupported(const String& keySystem)
1691 if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1697 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1699 #if ENABLE(MEDIA_SOURCE)
1700 if (parameters.isMediaSource)
1701 return MediaPlayer::IsNotSupported;
1703 #if ENABLE(MEDIA_STREAM)
1704 if (parameters.isMediaStream)
1705 return MediaPlayer::IsNotSupported;
1708 auto containerType = parameters.type.containerType();
1709 if (isUnsupportedMIMEType(containerType))
1710 return MediaPlayer::IsNotSupported;
1712 if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(containerType))
1713 return MediaPlayer::IsNotSupported;
1716 // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1717 if (parameters.type.codecs().isEmpty())
1718 return MediaPlayer::MayBeSupported;
1720 if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1721 return MediaPlayer::IsNotSupported;
1723 NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1724 return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1727 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1729 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1730 if (!keySystem.isEmpty()) {
1731 // "Clear Key" is only supported with HLS:
1732 if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1733 return MediaPlayer::IsNotSupported;
1735 if (!keySystemIsSupported(keySystem))
1738 if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1741 if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType))
1747 UNUSED_PARAM(keySystem);
1748 UNUSED_PARAM(mimeType);
1753 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1754 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1755 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1757 if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1758 [infoRequest setContentLength:keyData->byteLength()];
1759 [infoRequest setByteRangeAccessSupported:YES];
1762 if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1763 long long start = [dataRequest currentOffset];
1764 long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1766 if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1767 [request finishLoadingWithError:nil];
1771 ASSERT(start <= std::numeric_limits<int>::max());
1772 ASSERT(end <= std::numeric_limits<int>::max());
1773 auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1774 RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1775 [dataRequest respondWithData:nsData.get()];
1778 [request finishLoading];
1782 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1784 String scheme = [[[avRequest request] URL] scheme];
1785 String keyURI = [[[avRequest request] URL] absoluteString];
1787 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1788 if (scheme == "skd") {
1789 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1790 // Create an initData with the following layout:
1791 // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1792 unsigned keyURISize = keyURI.length() * sizeof(UChar);
1793 auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1794 unsigned byteLength = initDataBuffer->byteLength();
1795 auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1796 initDataView->set<uint32_t>(0, keyURISize, true);
1798 auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1799 keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1801 auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1802 if (!player()->keyNeeded(initData.ptr()))
1805 m_keyURIToRequestMap.set(keyURI, avRequest);
1806 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1810 RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1811 m_keyID = SharedBuffer::create(keyURIData.get());
1812 player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1813 setWaitingForKey(true);
1818 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1819 if (scheme == "clearkey") {
1820 String keyID = [[[avRequest request] URL] resourceSpecifier];
1821 auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1823 auto initData = Uint8Array::create(encodedKeyId.size());
1824 initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1826 auto keyData = player()->cachedKeyForKeyId(keyID);
1828 fulfillRequestWithKeyData(avRequest, keyData.get());
1832 if (!player()->keyNeeded(initData.ptr()))
1835 m_keyURIToRequestMap.set(keyID, avRequest);
1841 auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1842 m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1843 resourceLoader->startLoading();
1847 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1849 String scheme = [[[avRequest request] URL] scheme];
1851 WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1854 resourceLoader->stopLoading();
1857 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1859 m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1863 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1865 return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1868 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1870 if (!metaDataAvailable())
1873 // FIXME - impossible to implement until rdar://8721510 is fixed.
1877 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1882 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1887 // Do not attempt to change the video gravity while in full screen mode.
1888 // See setVideoFullscreenGravity().
1889 if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1892 [CATransaction begin];
1893 [CATransaction setDisableActions:YES];
1894 NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1895 [m_videoLayer.get() setVideoGravity:gravity];
1896 [CATransaction commit];
1899 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1901 NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1902 return [static_cast<AVAssetTrack*>(obj) isEnabled];
1904 if (index == NSNotFound)
1906 return [tracks objectAtIndex:index];
1909 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1911 String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1912 m_languageOfPrimaryAudioTrack = String();
1917 setDelayCharacteristicsChangedNotification(true);
1919 bool haveCCTrack = false;
1920 bool hasCaptions = false;
1922 // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1923 // asked about those fairly fequently.
1924 if (!m_avPlayerItem) {
1925 // We don't have a player item yet, so check with the asset because some assets support inspection
1926 // prior to becoming ready to play.
1927 AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1928 setHasVideo(firstEnabledVideoTrack);
1929 setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1930 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1931 hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1933 auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1934 // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1935 // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1936 if (size.width() < 0)
1937 size.setWidth(-size.width());
1938 if (size.height() < 0)
1939 size.setHeight(-size.height());
1940 presentationSizeDidChange(size);
1942 bool hasVideo = false;
1943 bool hasAudio = false;
1944 bool hasMetaData = false;
1945 for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1946 if ([track isEnabled]) {
1947 AVAssetTrack *assetTrack = [track assetTrack];
1948 NSString *mediaType = [assetTrack mediaType];
1949 if ([mediaType isEqualToString:AVMediaTypeVideo])
1951 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1953 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1954 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1958 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1964 #if ENABLE(VIDEO_TRACK)
1965 updateAudioTracks();
1966 updateVideoTracks();
1968 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1969 hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1970 hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1974 // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1975 // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1977 setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1979 setHasAudio(hasAudio);
1980 #if ENABLE(DATACUE_VALUE)
1982 processMetadataTrack();
1986 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1987 AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1988 if (legibleGroup && m_cachedTracks) {
1989 hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1991 processMediaSelectionOptions();
1995 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1996 if (!hasCaptions && haveCCTrack)
1997 processLegacyClosedCaptionsTracks();
1998 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2000 processLegacyClosedCaptionsTracks();
2003 setHasClosedCaptions(hasCaptions);
2005 INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
2009 if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2010 characteristicsChanged();
2012 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2014 m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2017 setDelayCharacteristicsChangedNotification(false);
2020 #if ENABLE(VIDEO_TRACK)
2022 template <typename RefT, typename PassRefT>
2023 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2025 RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2026 return [[[track assetTrack] mediaType] isEqualToString:trackType];
2028 RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2030 for (auto& oldItem : oldItems) {
2031 if (oldItem->playerItemTrack())
2032 [oldTracks addObject:oldItem->playerItemTrack()];
2035 // Find the added & removed AVPlayerItemTracks:
2036 RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2037 [removedTracks minusSet:newTracks.get()];
2039 RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2040 [addedTracks minusSet:oldTracks.get()];
2042 typedef Vector<RefT> ItemVector;
2043 ItemVector replacementItems;
2044 ItemVector addedItems;
2045 ItemVector removedItems;
2046 for (auto& oldItem : oldItems) {
2047 if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2048 removedItems.append(oldItem);
2050 replacementItems.append(oldItem);
2053 for (AVPlayerItemTrack* track in addedTracks.get())
2054 addedItems.append(itemFactory(track));
2056 replacementItems.appendVector(addedItems);
2057 oldItems.swap(replacementItems);
2059 for (auto& removedItem : removedItems)
2060 (player->*removedFunction)(*removedItem);
2062 for (auto& addedItem : addedItems)
2063 (player->*addedFunction)(*addedItem);
2066 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2068 template <typename RefT, typename PassRefT>
2069 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2071 group->updateOptions(characteristics);
2073 ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2074 for (auto& option : group->options()) {
2077 AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2080 newSelectionOptions.add(option);
2083 ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2084 for (auto& oldItem : oldItems) {
2085 if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2086 oldSelectionOptions.add(option);
2089 // Find the added & removed AVMediaSelectionOptions:
2090 ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2091 for (auto& oldOption : oldSelectionOptions) {
2092 if (!newSelectionOptions.contains(oldOption))
2093 removedSelectionOptions.add(oldOption);
2096 ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2097 for (auto& newOption : newSelectionOptions) {
2098 if (!oldSelectionOptions.contains(newOption))
2099 addedSelectionOptions.add(newOption);
2102 typedef Vector<RefT> ItemVector;
2103 ItemVector replacementItems;
2104 ItemVector addedItems;
2105 ItemVector removedItems;
2106 for (auto& oldItem : oldItems) {
2107 if (!oldItem->mediaSelectionOption())
2108 removedItems.append(oldItem);
2109 else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2110 removedItems.append(oldItem);
2112 replacementItems.append(oldItem);
2115 for (auto& option : addedSelectionOptions)
2116 addedItems.append(itemFactory(*option.get()));
2118 replacementItems.appendVector(addedItems);
2119 oldItems.swap(replacementItems);
2121 for (auto& removedItem : removedItems)
2122 (player->*removedFunction)(*removedItem);
2124 for (auto& addedItem : addedItems)
2125 (player->*addedFunction)(*addedItem);
2130 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2132 #if !RELEASE_LOG_DISABLED
2133 size_t count = m_audioTracks.size();
2136 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2137 Vector<String> characteristics = player()->preferredAudioCharacteristics();
2138 if (!m_audibleGroup) {
2139 if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2140 m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2144 determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2147 determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2149 for (auto& track : m_audioTracks)
2150 track->resetPropertiesFromTrack();
2152 #if !RELEASE_LOG_DISABLED
2153 INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2157 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2159 #if !RELEASE_LOG_DISABLED
2160 size_t count = m_videoTracks.size();
2163 determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2165 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2166 if (!m_visualGroup) {
2167 if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2168 m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2172 determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2175 for (auto& track : m_audioTracks)
2176 track->resetPropertiesFromTrack();
2178 #if !RELEASE_LOG_DISABLED
2179 INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2183 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2185 return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2188 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2190 m_videoFullscreenLayerManager->syncTextTrackBounds();
2193 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2195 m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2198 #endif // ENABLE(VIDEO_TRACK)
2200 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2202 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2205 m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2206 m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2208 return m_provider.get();
2213 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2218 setNaturalSize(m_cachedPresentationSize);
2221 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2223 setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2226 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2228 AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2229 if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2230 || ![resourceLoader respondsToSelector:@selector(URLSession)])
2233 WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2234 if ([session isKindOfClass:[WebCoreNSURLSession class]])
2235 return session.didPassCORSAccessChecks;
2240 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2242 AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2243 if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2244 || ![resourceLoader respondsToSelector:@selector(URLSession)])
2247 WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2248 if ([session isKindOfClass:[WebCoreNSURLSession class]])
2249 return [session wouldTaintOrigin:origin];
2251 return WTF::nullopt;
2255 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2257 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2259 INFO_LOG(LOGIDENTIFIER);
2261 if (!m_avPlayerItem || m_videoOutput)
2264 #if USE(VIDEOTOOLBOX)
2265 NSDictionary* attributes = nil;
2267 NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2269 m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2270 ASSERT(m_videoOutput);
2272 [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2274 [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2277 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2283 [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2285 INFO_LOG(LOGIDENTIFIER);
2290 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2292 if (!m_avPlayerItem)
2296 createVideoOutput();
2297 ASSERT(m_videoOutput);
2299 CMTime currentTime = [m_avPlayerItem.get() currentTime];
2301 if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2304 m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2305 m_lastImage = nullptr;
2309 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2311 if (!m_avPlayerItem)
2318 createVideoOutput();
2320 return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2323 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2325 #if HAVE(CORE_VIDEO)
2326 if (!m_avPlayerItem)
2329 if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2330 waitForVideoOutputMediaDataWillChange();
2332 // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2333 // for the requested time has already been retrieved. In this case, the last valid image (if any)
2334 // should be displayed.
2335 if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2338 if (!m_pixelBufferConformer) {
2339 #if USE(VIDEOTOOLBOX)
2340 NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2342 NSDictionary *attributes = nil;
2344 m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2347 #if !RELEASE_LOG_DISABLED
2348 MonotonicTime start = MonotonicTime::now();
2351 m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2353 #if !RELEASE_LOG_DISABLED
2354 DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2356 #endif // HAVE(CORE_VIDEO)
2359 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2361 updateLastImage(UpdateType::UpdateSynchronously);
2365 AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2366 if (!firstEnabledVideoTrack)
2369 INFO_LOG(LOGIDENTIFIER);
2371 GraphicsContextStateSaver stateSaver(context);
2372 FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2373 AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2374 FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2376 context.concatCTM(videoTransform);
2377 context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2379 // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2380 // video frame, destroy it now that it is no longer needed.
2381 if (m_imageGenerator)
2382 destroyImageGenerator();
2386 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2390 updateLastPixelBuffer();
2391 if (!m_lastPixelBuffer)
2394 size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2395 size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2397 if (!m_videoTextureCopier)
2398 m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2400 return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2403 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2409 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2411 [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2413 // Wait for 1 second.
2414 bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2416 ERROR_LOG(LOGIDENTIFIER, "timed out");
2419 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2421 m_videoOutputSemaphore.signal();
2426 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2428 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2430 return m_keyURIToRequestMap.take(keyURI);
2433 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2435 Vector<String> fulfilledKeyIds;
2437 for (auto& pair : m_keyURIToRequestMap) {
2438 const String& keyId = pair.key;
2439 const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2441 auto keyData = player()->cachedKeyForKeyId(keyId);
2445 fulfillRequestWithKeyData(request.get(), keyData.get());
2446 fulfilledKeyIds.append(keyId);
2449 for (auto& keyId : fulfilledKeyIds)
2450 m_keyURIToRequestMap.remove(keyId);
2453 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2455 ASSERT_UNUSED(session, &session == m_session);
2456 m_session = nullptr;
2459 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2461 if (!keySystemIsSupported(keySystem))
2463 auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2464 m_session = makeWeakPtr(*session);
2465 return WTFMove(session);
2469 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2470 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2472 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2473 if (m_session && newValue)
2474 m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2477 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2479 m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2480 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2481 UNUSED_PARAM(newValue);
2486 #if ENABLE(ENCRYPTED_MEDIA)
2487 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2489 #if HAVE(AVCONTENTKEYSESSION)
2490 if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2493 auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2494 if (&fpsInstance == m_cdmInstance)
2498 cdmInstanceDetached(*m_cdmInstance);
2500 m_cdmInstance = &fpsInstance;
2502 UNUSED_PARAM(instance);
2506 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2508 #if HAVE(AVCONTENTKEYSESSION)
2509 ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2510 m_cdmInstance = nullptr;
2512 UNUSED_PARAM(instance);
2516 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2518 #if HAVE(AVCONTENTKEYSESSION)
2519 if (!m_keyID || !m_cdmInstance)
2522 auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2523 if (!instanceSession)
2526 [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2528 auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2529 for (auto& request : keyURIToRequestMap.values()) {
2530 if (auto *infoRequest = request.get().contentInformationRequest)
2531 infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2532 [request finishLoading];
2534 setWaitingForKey(false);
2538 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2540 if (m_waitingForKey == waitingForKey)
2543 m_waitingForKey = waitingForKey;
2544 player()->waitingForKeyChanged();
2548 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2550 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2552 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2553 [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2556 Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2557 for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2559 AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2560 if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2563 bool newCCTrack = true;
2564 for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2565 if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2568 RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2569 if (track->avPlayerItemTrack() == playerItemTrack) {
2570 removedTextTracks.remove(i - 1);
2579 m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2582 processNewAndRemovedTextTracks(removedTextTracks);
2587 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2592 if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2595 return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2598 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2600 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2605 if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2611 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2613 if (!hasLoadedMediaSelectionGroups())
2616 return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2619 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2621 if (!hasLoadedMediaSelectionGroups())
2624 return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2627 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2629 if (!hasLoadedMediaSelectionGroups())
2632 return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2635 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2637 AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2638 if (!legibleGroup) {
2639 INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2643 // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2644 // but set the selected legible track to nil so text tracks will not be automatically configured.
2645 if (!m_textTracks.size())
2646 [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2648 Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2649 NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2650 for (AVMediaSelectionOptionType *option in legibleOptions) {
2651 bool newTrack = true;
2652 for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2653 if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2656 RetainPtr<AVMediaSelectionOptionType> currentOption;
2657 #if ENABLE(AVF_CAPTIONS)
2658 if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2659 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2660 currentOption = track->mediaSelectionOption();
2664 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2665 currentOption = track->mediaSelectionOption();
2668 if ([currentOption.get() isEqual:option]) {
2669 removedTextTracks.remove(i - 1);
2677 #if ENABLE(AVF_CAPTIONS)
2678 if ([option outOfBandSource]) {
2679 m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2680 m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2685 m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2688 processNewAndRemovedTextTracks(removedTextTracks);
2691 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2693 if (m_metadataTrack)
2696 m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2697 m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2698 player()->addTextTrack(*m_metadataTrack);
2701 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2703 ASSERT(time >= MediaTime::zeroTime());
2705 if (!m_currentTextTrack)
2708 m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2711 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2713 INFO_LOG(LOGIDENTIFIER);
2715 if (!m_currentTextTrack)
2718 m_currentTextTrack->resetCueValues();
2721 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2723 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2725 if (m_currentTextTrack == track)
2728 INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2730 m_currentTextTrack = track;
2733 if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2734 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2735 [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2736 ALLOW_DEPRECATED_DECLARATIONS_END
2737 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2738 #if ENABLE(AVF_CAPTIONS)
2739 else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2740 [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2743 [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2746 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2747 [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2749 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2750 [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2751 ALLOW_DEPRECATED_DECLARATIONS_END
2756 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2758 if (!m_languageOfPrimaryAudioTrack.isNull())
2759 return m_languageOfPrimaryAudioTrack;
2761 if (!m_avPlayerItem.get())
2762 return emptyString();
2764 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2765 // If AVFoundation has an audible group, return the language of the currently selected audible option.
2766 AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2767 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2768 AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2769 ALLOW_DEPRECATED_DECLARATIONS_END
2770 if (currentlySelectedAudibleOption) {
2771 m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2772 INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2774 return m_languageOfPrimaryAudioTrack;
2776 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2778 // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2779 // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2780 NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2781 if (!tracks || [tracks count] != 1) {
2782 m_languageOfPrimaryAudioTrack = emptyString();
2783 INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2784 return m_languageOfPrimaryAudioTrack;
2787 AVAssetTrack *track = [tracks objectAtIndex:0];
2788 m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2790 INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2792 return m_languageOfPrimaryAudioTrack;
2795 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2796 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2798 bool wirelessTarget = false;
2800 #if !PLATFORM(IOS_FAMILY)
2801 if (m_playbackTarget) {
2802 if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2803 wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2805 wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2808 wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2811 INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2813 return wirelessTarget;
2816 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2819 return MediaPlayer::TargetTypeNone;
2821 #if PLATFORM(IOS_FAMILY)
2822 if (!AVFoundationLibrary())
2823 return MediaPlayer::TargetTypeNone;
2825 switch ([m_avPlayer externalPlaybackType]) {
2826 case AVPlayerExternalPlaybackTypeNone:
2827 return MediaPlayer::TargetTypeNone;
2828 case AVPlayerExternalPlaybackTypeAirPlay:
2829 return MediaPlayer::TargetTypeAirPlay;
2830 case AVPlayerExternalPlaybackTypeTVOut:
2831 return MediaPlayer::TargetTypeTVOut;
2834 ASSERT_NOT_REACHED();
2835 return MediaPlayer::TargetTypeNone;
2838 return MediaPlayer::TargetTypeAirPlay;
2842 #if PLATFORM(IOS_FAMILY)
2843 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2846 if (!AVFoundationLibrary())
2849 if ([getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2850 AVOutputContext *outputContext = [getAVOutputContextClass() sharedAudioPresentationOutputContext];
2852 if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2853 || ![outputContext supportsMultipleOutputDevices]
2854 || ![outputContext respondsToSelector:@selector(outputDevices)])
2855 return [outputContext deviceName];
2857 auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2858 for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2859 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2860 auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2861 ALLOW_DEPRECATED_DECLARATIONS_END
2862 [outputDeviceNames addObject:outputDeviceName.get()];
2865 return [outputDeviceNames componentsJoinedByString:@" + "];
2868 if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2871 NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2872 if (!pickableRoutes.count)
2875 NSString *displayName = nil;
2876 for (NSDictionary *pickableRoute in pickableRoutes) {
2877 if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2880 displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2882 NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2883 if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2886 // The route is a speaker or HDMI out, override the name to be the localized device model.
2887 NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2889 // In cases where a route with that name already exists, prefix the name with the model.
2890 BOOL includeLocalizedDeviceModelName = NO;
2891 for (NSDictionary *otherRoute in pickableRoutes) {
2892 if (otherRoute == pickableRoute)
2895 if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2896 includeLocalizedDeviceModelName = YES;
2901 if (includeLocalizedDeviceModelName)
2902 displayName = [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2904 displayName = localizedDeviceModel;
2911 UNUSED_PARAM(player);
2917 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2920 return emptyString();
2922 String wirelessTargetName;
2923 #if !PLATFORM(IOS_FAMILY)
2924 if (m_playbackTarget)
2925 wirelessTargetName = m_playbackTarget->deviceName();
2927 wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2930 return wirelessTargetName;
2933 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2936 return !m_allowsWirelessVideoPlayback;
2938 m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2939 INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2941 return !m_allowsWirelessVideoPlayback;
2944 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2946 INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2947 m_allowsWirelessVideoPlayback = !disabled;
2951 setDelayCallbacks(true);
2952 [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2953 setDelayCallbacks(false);
2956 #if !PLATFORM(IOS_FAMILY)
2958 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2960 m_playbackTarget = WTFMove(target);
2962 m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2964 INFO_LOG(LOGIDENTIFIER);
2966 if (!m_playbackTarget->hasActiveRoute())
2967 setShouldPlayToPlaybackTarget(false);
2970 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2972 if (m_shouldPlayToPlaybackTarget == shouldPlay)
2975 m_shouldPlayToPlaybackTarget = shouldPlay;
2977 if (!m_playbackTarget)
2980 INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2982 if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2983 AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2988 RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2989 if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2992 setDelayCallbacks(true);
2993 m_avPlayer.get().outputContext = newContext;
2994 setDelayCallbacks(false);
2999 ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
3001 setDelayCallbacks(true);
3002 auto weakThis = makeWeakPtr(*this);
3003 scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
3006 weakThis->playbackTargetIsWirelessDidChange();
3008 setDelayCallbacks(false);
3011 #endif // !PLATFORM(IOS_FAMILY)
3013 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
3015 #if PLATFORM(IOS_FAMILY)
3019 if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
3020 [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
3026 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3028 m_cachedItemStatus = status;
3033 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3035 m_pendingStatusChanges++;
3038 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3040 m_cachedLikelyToKeepUp = likelyToKeepUp;
3042 ASSERT(m_pendingStatusChanges);
3043 if (!--m_pendingStatusChanges)
3047 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3049 m_pendingStatusChanges++;
3052 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3054 m_cachedBufferEmpty = bufferEmpty;
3056 ASSERT(m_pendingStatusChanges);
3057 if (!--m_pendingStatusChanges)
3061 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3063 m_pendingStatusChanges++;