Use NeverDestroyed instead of DEPRECATED_DEFINE_STATIC_LOCAL
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
index e87715d..3d6ce72 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -28,6 +28,7 @@
 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
 #import "MediaPlayerPrivateAVFoundationObjC.h"
 
+#import "AVFoundationSPI.h"
 #import "AVTrackPrivateAVFObjCImpl.h"
 #import "AudioSourceProviderAVFObjC.h"
 #import "AudioTrackPrivateAVFObjC.h"
 #import "CDMSessionAVFoundationObjC.h"
 #import "Cookie.h"
 #import "ExceptionCodePlaceholder.h"
+#import "Extensions3D.h"
 #import "FloatConversion.h"
 #import "FloatConversion.h"
 #import "GraphicsContext.h"
+#import "GraphicsContext3D.h"
 #import "GraphicsContextCG.h"
 #import "InbandMetadataTextTrackPrivateAVF.h"
 #import "InbandTextTrackPrivateAVFObjC.h"
 #import "OutOfBandTextTrackPrivateAVF.h"
 #import "URL.h"
 #import "Logging.h"
+#import "MediaPlaybackTargetMac.h"
+#import "MediaPlaybackTargetMock.h"
 #import "MediaSelectionGroupAVFObjC.h"
 #import "MediaTimeAVFoundation.h"
 #import "PlatformTimeRanges.h"
+#import "QuartzCoreSPI.h"
 #import "SecurityOrigin.h"
 #import "SerializedPlatformRepresentationMac.h"
-#import "SoftLinking.h"
+#import "TextEncoding.h"
 #import "TextTrackRepresentation.h"
 #import "UUID.h"
 #import "VideoTrackPrivateAVFObjC.h"
 #import "WebCoreAVFResourceLoader.h"
 #import "WebCoreCALayerExtras.h"
 #import "WebCoreSystemInterface.h"
+#import <functional>
+#import <map>
 #import <objc/runtime.h>
 #import <runtime/DataView.h>
 #import <runtime/JSCInlines.h>
@@ -66,9 +74,9 @@
 #import <runtime/Uint32Array.h>
 #import <runtime/Uint8Array.h>
 #import <wtf/CurrentTime.h>
-#import <wtf/Functional.h>
 #import <wtf/ListHashSet.h>
 #import <wtf/NeverDestroyed.h>
+#import <wtf/OSObjectPtr.h>
 #import <wtf/text/CString.h>
 #import <wtf/text/StringBuilder.h>
 
 #include "TextTrack.h"
 #endif
 
-#import <Foundation/NSGeometry.h>
 #import <AVFoundation/AVFoundation.h>
 #if PLATFORM(IOS)
+#import "WAKAppKitStubs.h"
 #import <CoreImage/CoreImage.h>
+#import <mach/mach_port.h>
 #else
+#import <Foundation/NSGeometry.h>
 #import <QuartzCore/CoreImage.h>
 #endif
-#import <CoreMedia/CoreMedia.h>
 
 #if USE(VIDEOTOOLBOX)
 #import <CoreVideo/CoreVideo.h>
 #import <VideoToolbox/VideoToolbox.h>
 #endif
 
+#if USE(CFNETWORK)
+#include "CFNSURLConnectionSPI.h"
+#endif
+
+#if PLATFORM(IOS)
+#include <OpenGLES/ES3/glext.h>
+#endif
+
 namespace std {
 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
@@ -107,6 +124,15 @@ template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionA
     for (CALayer* layer in self.sublayers)
         layer.frame = bounds;
 }
+
+- (void)setPosition:(CGPoint)position
+{
+    if (!CATransform3DIsIdentity(self.transform)) {
+        // Pre-apply the transform added in the WebProcess to fix <rdar://problem/18316542> to the position.
+        position = CGPointApplyAffineTransform(position, CATransform3DGetAffineTransform(self.transform));
+    }
+    [super setPosition:position];
+}
 @end
 
 #if ENABLE(AVF_CAPTIONS)
@@ -118,13 +144,6 @@ template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionA
 @end
 #endif
 
-#if PLATFORM(IOS)
-@class AVPlayerItem;
-@interface AVPlayerItem (WebKitExtensions)
-@property (nonatomic, copy) NSString* dataYouTubeID;
-@end
-#endif
-
 @interface AVURLAsset (WebKitExtensions)
 @property (nonatomic, readonly) NSURL *resolvedURL;
 @end
@@ -137,8 +156,13 @@ typedef AVMetadataItem AVMetadataItemType;
 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
 
+#pragma mark - Soft Linking
+
+// Soft-linking headers must be included last since they #define functions, constants, etc.
+#import "CoreMediaSoftLink.h"
+
 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
-SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
+
 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
 
@@ -146,11 +170,6 @@ SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
 #endif
 
-SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
-SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
-SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
-SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
-
 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
@@ -176,6 +195,8 @@ SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
 SOFT_LINK_CLASS(CoreImage, CIContext)
 SOFT_LINK_CLASS(CoreImage, CIImage)
 
+SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
+SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
@@ -193,8 +214,6 @@ SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
 
 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
 
-SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
-
 #define AVPlayer getAVPlayerClass()
 #define AVPlayerItem getAVPlayerItemClass()
 #define AVPlayerLayer getAVPlayerLayerClass()
@@ -202,6 +221,8 @@ SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
 #define AVMetadataItem getAVMetadataItemClass()
 
+#define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
+#define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
@@ -280,11 +301,26 @@ SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
 
 #if PLATFORM(IOS)
 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
-
 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
 #endif
 
-#define kCMTimeZero getkCMTimeZero()
+#if PLATFORM(IOS)
+SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CVEAGLContext eaglContext, CFDictionaryRef textureAttributes, CVOpenGLESTextureCacheRef* cacheOut), (allocator, cacheAttributes, eaglContext, textureAttributes, cacheOut))
+SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLESTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef textureAttributes, GLenum target, GLint internalFormat, GLsizei width, GLsizei height, GLenum format, GLenum type, size_t planeIndex, CVOpenGLESTextureRef* textureOut), (allocator, textureCache, sourceImage, textureAttributes, target, internalFormat, width, height, format, type, planeIndex, textureOut))
+SOFT_LINK(CoreVideo, CVOpenGLESTextureCacheFlush, void, (CVOpenGLESTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
+SOFT_LINK(CoreVideo, CVOpenGLESTextureGetTarget, GLenum, (CVOpenGLESTextureRef image), (image))
+SOFT_LINK(CoreVideo, CVOpenGLESTextureGetName, GLuint, (CVOpenGLESTextureRef image), (image))
+SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey, NSString *)
+#define kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey()
+#else
+SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CGLContextObj cglContext, CGLPixelFormatObj cglPixelFormat, CFDictionaryRef textureAttributes, CVOpenGLTextureCacheRef* cacheOut), (allocator, cacheAttributes, cglContext, cglPixelFormat, textureAttributes, cacheOut))
+SOFT_LINK(CoreVideo, CVOpenGLTextureCacheCreateTextureFromImage, CVReturn, (CFAllocatorRef allocator, CVOpenGLTextureCacheRef textureCache, CVImageBufferRef sourceImage, CFDictionaryRef attributes, CVOpenGLTextureRef* textureOut), (allocator, textureCache, sourceImage, attributes, textureOut))
+SOFT_LINK(CoreVideo, CVOpenGLTextureCacheFlush, void, (CVOpenGLTextureCacheRef textureCache, CVOptionFlags options), (textureCache, options))
+SOFT_LINK(CoreVideo, CVOpenGLTextureGetTarget, GLenum, (CVOpenGLTextureRef image), (image))
+SOFT_LINK(CoreVideo, CVOpenGLTextureGetName, GLuint, (CVOpenGLTextureRef image), (image))
+SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey, NSString *)
+#define kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey getkCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey()
+#endif
 
 using namespace WebCore;
 
@@ -341,7 +377,9 @@ namespace WebCore {
 
 static NSArray *assetMetadataKeyNames();
 static NSArray *itemKVOProperties();
-static NSArray* assetTrackMetadataKeyNames();
+static NSArray *assetTrackMetadataKeyNames();
+static NSArray *playerKVOProperties();
+static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
 
 #if !LOG_DISABLED
 static const char *boolString(bool val)
@@ -354,7 +392,7 @@ static const char *boolString(bool val)
 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
 static PlayerToPrivateMapType& playerToPrivateMap()
 {
-    DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
+    static NeverDestroyed<PlayerToPrivateMapType> map;
     return map;
 };
 #endif
@@ -383,15 +421,63 @@ static dispatch_queue_t globalPullDelegateQueue()
 }
 #endif
 
-PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
-{ 
-    return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
-}
+#if USE(CFNETWORK)
+class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
+public:
+    static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
+    {
+        return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
+    }
+
+    using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
+    using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
+
+private:
+    WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
+        : m_challenge(challenge)
+    {
+        ASSERT(m_challenge);
+    }
+
+    virtual void refAuthenticationClient() override { ref(); }
+    virtual void derefAuthenticationClient() override { deref(); }
+
+    virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential) override
+    {
+        [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
+    }
+
+    virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&) override
+    {
+        [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
+    }
+
+    virtual void receivedCancellation(const AuthenticationChallenge&) override
+    {
+        [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
+    }
+
+    virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&) override
+    {
+        if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
+            [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
+    }
+
+    virtual void receivedChallengeRejection(const AuthenticationChallenge&) override
+    {
+        if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
+            [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
+    }
+
+    RetainPtr<NSURLAuthenticationChallenge> m_challenge;
+};
+#endif
 
 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
 {
     if (isAvailable())
-        registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
+        registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
+            getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
 }
 
 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
@@ -422,7 +508,7 @@ MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlay
     , m_shouldBufferData(true)
     , m_cachedIsReadyForDisplay(false)
     , m_haveBeenAskedToCreateLayer(false)
-#if ENABLE(IOS_AIRPLAY)
+#if ENABLE(WIRELESS_PLAYBACK_TARGET)
     , m_allowsWirelessVideoPlayback(true)
 #endif
 {
@@ -491,10 +577,9 @@ void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
         if (m_timeObserver)
             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
         m_timeObserver = nil;
-        [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
-#if ENABLE(IOS_AIRPLAY)
-        [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
-#endif
+
+        for (NSString *keyName in playerKVOProperties())
+            [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
         m_avPlayer = nil;
     }
 
@@ -512,8 +597,10 @@ void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
     m_cachedTracks = nullptr;
 
 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
-    if (m_provider)
+    if (m_provider) {
         m_provider->setPlayerItem(nullptr);
+        m_provider->setAudioTrack(nullptr);
+    }
 #endif
 
     setIgnoreLoadStateChanges(false);
@@ -563,6 +650,7 @@ void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
 {
 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
     destroyVideoOutput();
+    destroyOpenGLVideoOutput();
 #endif
     destroyImageGenerator();
 }
@@ -594,7 +682,7 @@ void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
         if (!m_videoLayer)
             createAVPlayerLayer();
 
-#if USE(VIDEOTOOLBOX) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
+#if USE(VIDEOTOOLBOX)
         if (!m_videoOutput)
             createVideoOutput();
 #endif
@@ -608,15 +696,16 @@ void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
     if (!m_avPlayer)
         return;
 
-    m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
+    m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
     [m_videoLayer setPlayer:m_avPlayer.get()];
-    [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
+    [m_videoLayer setBackgroundColor:cachedCGColor(Color::black)];
 #ifndef NDEBUG
     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
 #endif
     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
     updateVideoLayerGravity();
-    IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
+    [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
+    IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
 
 #if PLATFORM(IOS)
@@ -633,6 +722,8 @@ void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
     }
+    if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
+        [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
 #else
     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
 #endif
@@ -657,6 +748,21 @@ void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
     m_videoLayer = nil;
 }
 
+MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
+{
+    // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
+    double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
+
+    // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
+    if (!date)
+        return MediaTime::invalidTime();
+
+    double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
+
+    // Rounding due to second offset error when subtracting.
+    return MediaTime::createWithDouble(round(date - currentTime));
+}
+
 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
 {
     if (currentRenderingMode() == MediaRenderingToLayer)
@@ -788,9 +894,12 @@ void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
 
+#if PLATFORM(IOS)
+    // FIXME: rdar://problem/20354688
     String identifier = player()->sourceApplicationIdentifier();
     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
+#endif
 
 #if ENABLE(AVF_CAPTIONS)
     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
@@ -833,7 +942,7 @@ void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
 #endif
 
     NSURL *cocoaURL = canonicalURL(url);
-    m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
+    m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
 
 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
@@ -870,21 +979,24 @@ void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
 
     setDelayCallbacks(true);
 
-    m_avPlayer = adoptNS([[AVPlayer alloc] init]);
-    [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
-#if ENABLE(IOS_AIRPLAY)
-    [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
-    updateDisableExternalPlayback();
-#endif
+    m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
+    for (NSString *keyName in playerKVOProperties())
+        [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
 
 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
 #endif
 
-#if ENABLE(IOS_AIRPLAY)
+#if ENABLE(WIRELESS_PLAYBACK_TARGET)
+    updateDisableExternalPlayback();
     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
 #endif
 
+#if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
+    if (m_shouldPlayToPlaybackTarget)
+        setShouldPlayToPlaybackTarget(true);
+#endif
+
     if (player()->client().mediaPlayerIsVideo())
         createAVPlayerLayer();
 
@@ -904,7 +1016,7 @@ void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
     setDelayCallbacks(true);
 
     // Create the player item so we can load media data. 
-    m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
+    m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
 
     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
 
@@ -912,20 +1024,16 @@ void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
     for (NSString *keyName in itemKVOProperties())
         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
 
+    [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
+
     if (m_avPlayer)
         setAVPlayerItem(m_avPlayerItem.get());
 
-#if PLATFORM(IOS)
-    AtomicString value;
-    if (player()->doesHaveAttribute("data-youtube-id", &value))
-        [m_avPlayerItem.get() setDataYouTubeID: value];
- #endif
-
 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
     const NSTimeInterval legibleOutputAdvanceInterval = 2;
 
     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
-    m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
+    m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
 
     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
@@ -934,15 +1042,11 @@ void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
 #endif
 
-#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
-        [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
-        [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForAudibleMedia()];
-        [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForVisualMedia()];
-#endif
-
 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
-    if (m_provider)
+    if (m_provider) {
         m_provider->setPlayerItem(m_avPlayerItem.get());
+        m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
+    }
 #endif
 
     setDelayCallbacks(false);
@@ -969,31 +1073,29 @@ void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
 {
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
 
-    dispatch_group_t metadataLoadingGroup = dispatch_group_create();
-    dispatch_group_enter(metadataLoadingGroup);
+    OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
+    dispatch_group_enter(metadataLoadingGroup.get());
     auto weakThis = createWeakPtr();
     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
 
         callOnMainThread([weakThis, metadataLoadingGroup] {
             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
-                    dispatch_group_enter(metadataLoadingGroup);
+                    dispatch_group_enter(metadataLoadingGroup.get());
                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
-                        dispatch_group_leave(metadataLoadingGroup);
+                        dispatch_group_leave(metadataLoadingGroup.get());
                     }];
                 }
             }
-            dispatch_group_leave(metadataLoadingGroup);
+            dispatch_group_leave(metadataLoadingGroup.get());
         });
     }];
 
-    dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
+    dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
         callOnMainThread([weakThis] {
             if (weakThis)
                 [weakThis->m_objcObserver.get() metadataLoaded];
         });
-
-        dispatch_release(metadataLoadingGroup);
     });
 }
 
@@ -1044,27 +1146,35 @@ void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer*
 
     [CATransaction begin];
     [CATransaction setDisableActions:YES];
-
+    
+    CAContext *oldContext = [m_videoLayer context];
+    CAContext *newContext = nil;
+    
     if (m_videoFullscreenLayer && m_videoLayer) {
-        [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
-        [m_videoLayer removeFromSuperlayer];
         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
+        [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
+        newContext = [m_videoFullscreenLayer context];
     } else if (m_videoInlineLayer && m_videoLayer) {
         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
         [m_videoLayer removeFromSuperlayer];
         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
+        newContext = [m_videoInlineLayer context];
     } else if (m_videoLayer)
         [m_videoLayer removeFromSuperlayer];
 
+    if (oldContext && newContext && oldContext != newContext) {
+        mach_port_t fencePort = [oldContext createFencePort];
+        [newContext setFencePort:fencePort];
+        mach_port_deallocate(mach_task_self(), fencePort);
+    }
     [CATransaction commit];
 
     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
         syncTextTrackBounds();
         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
     }
-#if ENABLE(IOS_AIRPLAY)
+
     updateDisableExternalPlayback();
-#endif
 }
 
 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
@@ -1074,11 +1184,7 @@ void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame
         return;
 
     if (m_videoLayer) {
-        [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
-        [CATransaction begin];
         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
-        [CATransaction commit];
-        [m_videoLayer web_disableAllActions];
     }
     syncTextTrackBounds();
 }
@@ -1098,8 +1204,18 @@ void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::
         videoGravity = AVLayerVideoGravityResizeAspectFill;
     else
         ASSERT_NOT_REACHED();
+    
+    if ([m_videoLayer videoGravity] == videoGravity)
+        return;
 
     [m_videoLayer setVideoGravity:videoGravity];
+    syncTextTrackBounds();
+}
+
+void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
+{
+    if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
+        [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
 }
 
 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
@@ -1186,7 +1302,7 @@ MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
     if (CMTIME_IS_INDEFINITE(cmDuration))
         return MediaTime::positiveInfiniteTime();
 
-    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, toString(MediaTime::invalidTime()).utf8().data());
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
     return MediaTime::invalidTime();
 }
 
@@ -1216,6 +1332,8 @@ void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const
 
     auto weakThis = createWeakPtr();
 
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::seekToTime(%p) - calling seekToTime", this);
+
     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
         callOnMainThread([weakThis, finished] {
             auto _this = weakThis.get();
@@ -1252,15 +1370,15 @@ void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCap
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
 }
 
-void MediaPlayerPrivateAVFoundationObjC::updateRate()
+void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
 {
     setDelayCallbacks(true);
-    m_cachedRate = requestedRate();
-    [m_avPlayer.get() setRate:requestedRate()];
+    m_cachedRate = rate;
+    [m_avPlayer.get() setRate:rate];
     setDelayCallbacks(false);
 }
 
-float MediaPlayerPrivateAVFoundationObjC::rate() const
+double MediaPlayerPrivateAVFoundationObjC::rate() const
 {
     if (!metaDataAvailable())
         return 0;
@@ -1268,9 +1386,15 @@ float MediaPlayerPrivateAVFoundationObjC::rate() const
     return m_cachedRate;
 }
 
+void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
+{
+    if (m_avPlayerItem)
+        [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
+}
+
 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
 {
-    auto timeRanges = PlatformTimeRanges::create();
+    auto timeRanges = std::make_unique<PlatformTimeRanges>();
 
     if (!m_avPlayerItem)
         return timeRanges;
@@ -1323,14 +1447,6 @@ MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
 
 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
 {
-#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
-    // AVFoundation on Mountain Lion will occasionally not send a KVO notification
-    // when loadedTimeRanges changes when there is no video output. In that case
-    // update the cached value explicitly.
-    if (!hasLayerRenderer() && !hasContextRenderer())
-        m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
-#endif
-
     if (!m_cachedLoadedRanges)
         return MediaTime::zeroTime();
 
@@ -1362,7 +1478,7 @@ unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
     return m_cachedTotalBytes;
 }
 
-void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
+void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
 {
     m_avAsset = asset;
 }
@@ -1396,9 +1512,19 @@ MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::
     return MediaPlayerAVAssetStatusLoaded;
 }
 
-void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
+long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
 {
-    if (!metaDataAvailable() || context->paintingDisabled())
+    if (!m_avAsset)
+        return 0;
+
+    NSError *error = nil;
+    [m_avAsset statusOfValueForKey:@"playable" error:&error];
+    return [error code];
+}
+
+void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
+{
+    if (!metaDataAvailable() || context.paintingDisabled())
         return;
 
     setDelayCallbacks(true);
@@ -1417,9 +1543,9 @@ void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsCont
     m_videoFrameHasDrawn = true;
 }
 
-void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
+void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
 {
-    if (!metaDataAvailable() || context->paintingDisabled())
+    if (!metaDataAvailable() || context.paintingDisabled())
         return;
 
     // We can ignore the request if we are already rendering to a layer.
@@ -1433,39 +1559,39 @@ void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const I
     paintCurrentFrameInContext(context, rect);
 }
 
-void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
+void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
 {
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
 
     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
     if (image) {
-        GraphicsContextStateSaver stateSaver(*context);
-        context->translate(rect.x(), rect.y() + rect.height());
-        context->scale(FloatSize(1.0f, -1.0f));
-        context->setImageInterpolationQuality(InterpolationLow);
+        GraphicsContextStateSaver stateSaver(context);
+        context.translate(rect.x(), rect.y() + rect.height());
+        context.scale(FloatSize(1.0f, -1.0f));
+        context.setImageInterpolationQuality(InterpolationLow);
         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
-        CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
+        CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
         image = 0;
     }
 }
 
-static HashSet<String> mimeTypeCache()
+static const HashSet<String>& avfMIMETypes()
 {
-    DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
-    static bool typeListInitialized = false;
+    static NeverDestroyed<HashSet<String>> cache = [] () {
+        HashSet<String> types;
 
-    if (typeListInitialized)
-        return cache;
-    typeListInitialized = true;
+        NSArray *nsTypes = [AVURLAsset audiovisualMIMETypes];
+        for (NSString *mimeType in nsTypes)
+            types.add([mimeType lowercaseString]);
 
-    NSArray *types = [AVURLAsset audiovisualMIMETypes];
-    for (NSString *mimeType in types)
-        cache.add([mimeType lowercaseString]);
+        return types;
+    }();
 
+    
     return cache;
-} 
+}
 
-RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
+RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
 {
     if (!m_imageGenerator)
         createImageGenerator();
@@ -1477,7 +1603,7 @@ RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRe
 
     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
-    RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
+    RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
 
 #if !LOG_DISABLED
     double duration = monotonicallyIncreasingTime() - start;
@@ -1489,13 +1615,13 @@ RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRe
 
 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
 {
-    supportedTypes = mimeTypeCache();
+    supportedTypes = avfMIMETypes();
 } 
 
 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
 static bool keySystemIsSupported(const String& keySystem)
 {
-    if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
+    if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
         return true;
     return false;
 }
@@ -1510,6 +1636,10 @@ MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const
     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
     //    If keySystem is null, continue to the next step.
     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
+        // "Clear Key" is only supported with HLS:
+        if (equalIgnoringASCIICase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringASCIICase(parameters.type, "application/x-mpegurl"))
+            return MediaPlayer::IsNotSupported;
+
         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
         if (!keySystemIsSupported(parameters.keySystem))
             return MediaPlayer::IsNotSupported;
@@ -1525,8 +1655,14 @@ MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const
     if (parameters.isMediaSource)
         return MediaPlayer::IsNotSupported;
 #endif
+#if ENABLE(MEDIA_STREAM)
+    if (parameters.isMediaStream)
+        return MediaPlayer::IsNotSupported;
+#endif
+    if (isUnsupportedMIMEType(parameters.type))
+        return MediaPlayer::IsNotSupported;
 
-    if (!mimeTypeCache().contains(parameters.type))
+    if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
         return MediaPlayer::IsNotSupported;
 
     // The spec says:
@@ -1535,17 +1671,24 @@ MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const
         return MediaPlayer::MayBeSupported;
 
     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
-    return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
+    return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
 }
 
 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
 {
 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
     if (!keySystem.isEmpty()) {
+        // "Clear Key" is only supported with HLS:
+        if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
+            return MediaPlayer::IsNotSupported;
+
         if (!keySystemIsSupported(keySystem))
             return false;
 
-        if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
+        if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
+            return false;
+
+        if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !avfMIMETypes().contains(mimeType))
             return false;
 
         return true;
@@ -1558,6 +1701,34 @@ bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySyst
 }
 
 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
+#if ENABLE(ENCRYPTED_MEDIA_V2)
+static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
+{
+    if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
+        [infoRequest setContentLength:keyData->byteLength()];
+        [infoRequest setByteRangeAccessSupported:YES];
+    }
+
+    if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
+        long long start = [dataRequest currentOffset];
+        long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
+
+        if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
+            [request finishLoadingWithError:nil];
+            return;
+        }
+
+        ASSERT(start <= std::numeric_limits<int>::max());
+        ASSERT(end <= std::numeric_limits<int>::max());
+        RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
+        RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
+        [dataRequest respondWithData:nsData.get()];
+    }
+
+    [request finishLoading];
+}
+#endif
+
 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
 {
     String scheme = [[[avRequest request] URL] scheme];
@@ -1585,6 +1756,27 @@ bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetR
 
         m_keyURIToRequestMap.set(keyURI, avRequest);
         return true;
+#if ENABLE(ENCRYPTED_MEDIA_V2)
+    } else if (scheme == "clearkey") {
+        String keyID = [[[avRequest request] URL] resourceSpecifier];
+        StringView keyIDView(keyID);
+        CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
+
+        RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
+        initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
+
+        auto keyData = player()->cachedKeyForKeyId(keyID);
+        if (keyData) {
+            fulfillRequestWithKeyData(avRequest, keyData.get());
+            return false;
+        }
+
+        if (!player()->keyNeeded(initData.get()))
+            return false;
+
+        m_keyURIToRequestMap.set(keyID, avRequest);
+        return true;
+#endif
     }
 #endif
 
@@ -1597,14 +1789,14 @@ bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetR
 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
 {
 #if USE(CFNETWORK)
-    UNUSED_PARAM(nsChallenge);
-    // FIXME: <rdar://problem/15799844>
-    return false;
+    RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
+    RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
+    AuthenticationChallenge challenge(cfChallenge.get(), client.get());
 #else
     AuthenticationChallenge challenge(nsChallenge);
+#endif
 
     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
-#endif
 }
 
 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
@@ -1625,7 +1817,7 @@ void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLo
 
 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
 {
-    return AVFoundationLibrary() && CoreMediaLibrary();
+    return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
 }
 
 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
@@ -1637,6 +1829,15 @@ MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaT
     return timeValue;
 }
 
+double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
+{
+#if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
+    return 0;
+#else
+    return 5;
+#endif
+}
+
 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
 {
     if (!m_videoLayer)
@@ -1691,7 +1892,7 @@ void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
 #endif
 
-        presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
+        presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
     } else {
         bool hasVideo = false;
         bool hasAudio = false;
@@ -1764,6 +1965,11 @@ void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
         characteristicsChanged();
 
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+    if (m_provider)
+        m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
+#endif
+
     setDelayCharacteristicsChangedNotification(false);
 }
 
@@ -1814,9 +2020,9 @@ void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* t
 
 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
 template <typename RefT, typename PassRefT>
-void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
+void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
 {
-    group->updateOptions();
+    group->updateOptions(characteristics);
 
     // Only add selection options which do not have an associated persistant track.
     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
@@ -1883,13 +2089,14 @@ void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
 
 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
+    Vector<String> characteristics = player()->preferredAudioCharacteristics();
     if (!m_audibleGroup) {
         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
-            m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
+            m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
     }
 
     if (m_audibleGroup)
-        determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
+        determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
 #endif
 
     for (auto& track : m_audioTracks)
@@ -1911,11 +2118,11 @@ void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
     if (!m_visualGroup) {
         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
-            m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
+            m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
     }
 
     if (m_visualGroup)
-        determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
+        determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
 #endif
 
     for (auto& track : m_audioTracks)
@@ -1974,8 +2181,11 @@ void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRep
 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
 {
-    if (!m_provider)
+    if (!m_provider) {
         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
+        m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
+    }
+
     return m_provider.get();
 }
 #endif
@@ -1985,17 +2195,17 @@ void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
     if (!m_avAsset)
         return;
 
-    setNaturalSize(roundedIntSize(m_cachedPresentationSize));
+    setNaturalSize(m_cachedPresentationSize);
 }
     
 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
 {
-    if (!m_avAsset)
+    if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
         return false;
     
-    RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL([m_avAsset resolvedURL]));
-    RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
-    return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
+    Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
+    Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
+    return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
 }
 
 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
@@ -2007,16 +2217,12 @@ void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
         return;
 
 #if USE(VIDEOTOOLBOX)
-#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
     NSDictionary* attributes = nil;
 #else
-    NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
-#endif
-#else
     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                 nil];
 #endif
-    m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
+    m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
     ASSERT(m_videoOutput);
 
     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
@@ -2127,7 +2333,7 @@ static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBu
     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
 
-    return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
+    return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
 }
 
 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
@@ -2141,9 +2347,9 @@ void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
 }
 
-void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
+void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
 {
-    if (m_videoOutput && !m_lastImage)
+    if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
         waitForVideoOutputMediaDataWillChange();
 
     updateLastImage();
@@ -2157,13 +2363,13 @@ void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* c
 
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
 
-    GraphicsContextStateSaver stateSaver(*context);
+    GraphicsContextStateSaver stateSaver(context);
     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
-    FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
+    FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
 
-    context->concatCTM(videoTransform);
-    context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
+    context.concatCTM(videoTransform);
+    context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
 
     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
     // video frame, destroy it now that it is no longer needed.
@@ -2172,6 +2378,260 @@ void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* c
 
 }
 
+void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
+{
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p)", this);
+
+    if (!m_avPlayerItem || m_openGLVideoOutput)
+        return;
+
+#if PLATFORM(IOS)
+    NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
+#else
+    NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
+#endif
+    m_openGLVideoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
+    ASSERT(m_openGLVideoOutput);
+
+    [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
+
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput(%p) - returning %p", this, m_openGLVideoOutput.get());
+}
+
+void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
+{
+    if (!m_openGLVideoOutput)
+        return;
+
+    if (m_avPlayerItem)
+        [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
+
+    m_openGLVideoOutput = 0;
+}
+
+void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
+{
+    if (!m_openGLVideoOutput)
+        return;
+
+    CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
+    if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
+        return;
+
+    m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
+}
+
+#if !LOG_DISABLED
+
+#define STRINGIFY_PAIR(e) e, #e
+static std::map<uint32_t, const char*>& enumToStringMap()
+{
+    static NeverDestroyed<std::map<uint32_t, const char*>> map;
+    if (map.get().empty()) {
+        std::map<uint32_t, const char*> stringMap;
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
+        map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
+        map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
+        map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
+        map.get().emplace(STRINGIFY_PAIR(GL_R8));
+        map.get().emplace(STRINGIFY_PAIR(GL_R16F));
+        map.get().emplace(STRINGIFY_PAIR(GL_R32F));
+        map.get().emplace(STRINGIFY_PAIR(GL_R8UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_R8I));
+        map.get().emplace(STRINGIFY_PAIR(GL_R16UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_R16I));
+        map.get().emplace(STRINGIFY_PAIR(GL_R32UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_R32I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG8));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG16F));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG32F));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG8UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG8I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG16UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG16I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG32UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG32I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB8));
+        map.get().emplace(STRINGIFY_PAIR(GL_SRGB8));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA8));
+        map.get().emplace(STRINGIFY_PAIR(GL_SRGB8_ALPHA8));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA4));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2));
+        map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT16));
+        map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT24));
+        map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT32F));
+        map.get().emplace(STRINGIFY_PAIR(GL_DEPTH24_STENCIL8));
+        map.get().emplace(STRINGIFY_PAIR(GL_DEPTH32F_STENCIL8));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA));
+        map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE_ALPHA));
+        map.get().emplace(STRINGIFY_PAIR(GL_LUMINANCE));
+        map.get().emplace(STRINGIFY_PAIR(GL_ALPHA));
+        map.get().emplace(STRINGIFY_PAIR(GL_RED));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG_INTEGER));
+        map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_STENCIL));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_BYTE));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_6_5));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_4_4_4_4));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT_5_5_5_1));
+        map.get().emplace(STRINGIFY_PAIR(GL_BYTE));
+        map.get().emplace(STRINGIFY_PAIR(GL_HALF_FLOAT));
+        map.get().emplace(STRINGIFY_PAIR(GL_FLOAT));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_SHORT));
+        map.get().emplace(STRINGIFY_PAIR(GL_SHORT));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT));
+        map.get().emplace(STRINGIFY_PAIR(GL_INT));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_2_10_10_10_REV));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_24_8));
+        map.get().emplace(STRINGIFY_PAIR(GL_FLOAT_32_UNSIGNED_INT_24_8_REV));
+
+#if PLATFORM(IOS)
+        map.get().emplace(STRINGIFY_PAIR(GL_RED_INTEGER));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB_INTEGER));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG8_SNORM));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB565));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB8_SNORM));
+        map.get().emplace(STRINGIFY_PAIR(GL_R11F_G11F_B10F));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB9_E5));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB16F));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB32F));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB8UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB8I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB16UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB16I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB32UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB32I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA8_SNORM));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA16F));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA32F));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA8UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA8I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB10_A2UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA16UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA16I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA32I));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA32UI));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGB5_A1));
+        map.get().emplace(STRINGIFY_PAIR(GL_RG));
+        map.get().emplace(STRINGIFY_PAIR(GL_RGBA_INTEGER));
+        map.get().emplace(STRINGIFY_PAIR(GL_DEPTH_COMPONENT));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_10F_11F_11F_REV));
+        map.get().emplace(STRINGIFY_PAIR(GL_UNSIGNED_INT_5_9_9_9_REV));
+#endif
+    }
+    return map.get();
+}
+
+#endif // !LOG_DISABLED
+
+bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
+{
+    if (flipY || premultiplyAlpha)
+        return false;
+
+    ASSERT(context);
+
+    if (!m_openGLVideoOutput)
+        createOpenGLVideoOutput();
+
+    updateLastOpenGLImage();
+
+    if (!m_lastOpenGLImage)
+        return false;
+
+    if (!m_openGLTextureCache) {
+#if PLATFORM(IOS)
+        CVOpenGLESTextureCacheRef cache = nullptr;
+        CVReturn error = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), nullptr, &cache);
+#else
+        CVOpenGLTextureCacheRef cache = nullptr;
+        CVReturn error = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, nullptr, context->platformGraphicsContext3D(), CGLGetPixelFormat(context->platformGraphicsContext3D()), nullptr, &cache);
+#endif
+        if (error != kCVReturnSuccess)
+            return false;
+        m_openGLTextureCache = adoptCF(cache);
+    }
+
+    size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
+    size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
+
+#if PLATFORM(IOS)
+    CVOpenGLESTextureRef bareVideoTexture = nullptr;
+    if (kCVReturnSuccess != CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, outputTarget, internalFormat, width, height, format, type, level, &bareVideoTexture))
+        return false;
+    RetainPtr<CVOpenGLESTextureRef> videoTexture = adoptCF(bareVideoTexture);
+    Platform3DObject videoTextureName = CVOpenGLESTextureGetName(videoTexture.get());
+    GC3Denum videoTextureTarget = CVOpenGLESTextureGetTarget(videoTexture.get());
+#else
+    CVOpenGLTextureRef bareVideoTexture = nullptr;
+    if (kCVReturnSuccess != CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, m_openGLTextureCache.get(), m_lastOpenGLImage.get(), nullptr, &bareVideoTexture))
+        return false;
+    RetainPtr<CVOpenGLTextureRef> videoTexture = adoptCF(bareVideoTexture);
+    Platform3DObject videoTextureName = CVOpenGLTextureGetName(videoTexture.get());
+    GC3Denum videoTextureTarget = CVOpenGLTextureGetTarget(videoTexture.get());
+#endif
+
+    auto weakThis = createWeakPtr();
+    dispatch_async(dispatch_get_main_queue(), [weakThis] {
+        if (!weakThis)
+            return;
+
+        if (auto cache = weakThis->m_openGLTextureCache.get())
+#if PLATFORM(IOS)
+            CVOpenGLESTextureCacheFlush(cache, 0);
+#else
+            CVOpenGLTextureCacheFlush(cache, 0);
+#endif
+    });
+
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(%p) - internalFormat: %s, format: %s, type: %s", this, enumToStringMap()[internalFormat], enumToStringMap()[format], enumToStringMap()[type]);
+
+    // Save the origial bound texture & framebuffer names so we can re-bind them after copying the video texture.
+    GC3Dint boundTexture = 0;
+    GC3Dint boundReadFramebuffer = 0;
+    context->getIntegerv(GraphicsContext3D::TEXTURE_BINDING_2D, &boundTexture);
+    context->getIntegerv(GraphicsContext3D::READ_FRAMEBUFFER_BINDING, &boundReadFramebuffer);
+
+    context->bindTexture(videoTextureTarget, videoTextureName);
+    context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
+    context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
+    context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
+
+    // Create a framebuffer object to represent the video texture's memory.
+    Platform3DObject readFramebuffer = context->createFramebuffer();
+
+    // Make that framebuffer the read source from which drawing commands will read voxels.
+    context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, readFramebuffer);
+
+    // Allocate uninitialized memory for the output texture.
+    context->bindTexture(outputTarget, outputTexture);
+    context->texParameteri(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_MIN_FILTER, GraphicsContext3D::LINEAR);
+    context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_S, GraphicsContext3D::CLAMP_TO_EDGE);
+    context->texParameterf(GraphicsContext3D::TEXTURE_2D, GraphicsContext3D::TEXTURE_WRAP_T, GraphicsContext3D::CLAMP_TO_EDGE);
+    context->texImage2DDirect(outputTarget, level, internalFormat, width, height, 0, format, type, nullptr);
+
+    // Attach the video texture to the framebuffer.
+    context->framebufferTexture2D(GraphicsContext3D::READ_FRAMEBUFFER, GraphicsContext3D::COLOR_ATTACHMENT0, videoTextureTarget, videoTextureName, level);
+
+    GC3Denum status = context->checkFramebufferStatus(GraphicsContext3D::READ_FRAMEBUFFER);
+    if (status != GraphicsContext3D::FRAMEBUFFER_COMPLETE)
+        return false;
+
+    // Copy texture from the read framebuffer (and thus the video texture) to the output texture.
+    context->copyTexImage2D(outputTarget, level, internalFormat, 0, 0, width, height, 0);
+
+    // Restore the previous texture and framebuffer bindings.
+    context->bindTexture(outputTarget, boundTexture);
+    context->bindFramebuffer(GraphicsContext3D::READ_FRAMEBUFFER, boundReadFramebuffer);
+
+    // Clean up after ourselves.
+    context->deleteFramebuffer(readFramebuffer);
+
+    return !context->getError();
+}
+
 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
 {
     updateLastImage();
@@ -2281,12 +2741,32 @@ RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::tak
     return m_keyURIToRequestMap.take(keyURI);
 }
 
-std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
+void MediaPlayerPrivateAVFoundationObjC::keyAdded()
+{
+    Vector<String> fulfilledKeyIds;
+
+    for (auto& pair : m_keyURIToRequestMap) {
+        const String& keyId = pair.key;
+        const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
+
+        auto keyData = player()->cachedKeyForKeyId(keyId);
+        if (!keyData)
+            continue;
+
+        fulfillRequestWithKeyData(request.get(), keyData.get());
+        fulfilledKeyIds.append(keyId);
+    }
+
+    for (auto& keyId : fulfilledKeyIds)
+        m_keyURIToRequestMap.remove(keyId);
+}
+
+std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, CDMSessionClient* client)
 {
     if (!keySystemIsSupported(keySystem))
         return nullptr;
 
-    return std::make_unique<CDMSessionAVFoundationObjC>(this);
+    return std::make_unique<CDMSessionAVFoundationObjC>(this, client);
 }
 #endif
 
@@ -2327,6 +2807,17 @@ void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
 }
 #endif
 
+NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
+{
+    if (!m_avAsset)
+        return nil;
+
+    if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
+        return nil;
+
+    return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
+}
+
 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
 {
@@ -2519,14 +3010,24 @@ String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
     return m_languageOfPrimaryAudioTrack;
 }
 
-#if ENABLE(IOS_AIRPLAY) && PLATFORM(IOS)
+#if ENABLE(WIRELESS_PLAYBACK_TARGET)
 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
 {
-    if (!m_avPlayer)
-        return false;
+    bool wirelessTarget = false;
+
+#if !PLATFORM(IOS)
+    if (m_playbackTarget) {
+        if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
+            wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
+        else
+            wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
+    }
+#else
+    wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
+#endif
 
-    bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
+
     return wirelessTarget;
 }
 
@@ -2535,6 +3036,7 @@ MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wire
     if (!m_avPlayer)
         return MediaPlayer::TargetTypeNone;
 
+#if PLATFORM(IOS)
     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
     case wkExternalPlaybackTypeNone:
         return MediaPlayer::TargetTypeNone;
@@ -2546,14 +3048,24 @@ MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wire
 
     ASSERT_NOT_REACHED();
     return MediaPlayer::TargetTypeNone;
+
+#else
+    return MediaPlayer::TargetTypeAirPlay;
+#endif
 }
 
 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
 {
     if (!m_avPlayer)
         return emptyString();
-    
-    String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
+
+    String wirelessTargetName;
+#if !PLATFORM(IOS)
+    if (m_playbackTarget)
+        wirelessTargetName = m_playbackTarget->deviceName();
+#else
+    wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
+#endif
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
 
     return wirelessTargetName;
@@ -2563,7 +3075,7 @@ bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
 {
     if (!m_avPlayer)
         return !m_allowsWirelessVideoPlayback;
-    
+
     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
 
@@ -2576,16 +3088,77 @@ void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool d
     m_allowsWirelessVideoPlayback = !disabled;
     if (!m_avPlayer)
         return;
-    
+
+    setDelayCallbacks(true);
     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
+    setDelayCallbacks(false);
+}
+
+#if !PLATFORM(IOS)
+void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
+{
+    m_playbackTarget = WTFMove(target);
+
+    m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
+
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), m_playbackTarget->deviceName().utf8().data());
+
+    if (!m_playbackTarget->hasActiveRoute())
+        setShouldPlayToPlaybackTarget(false);
 }
 
+void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
+{
+    if (m_shouldPlayToPlaybackTarget == shouldPlay)
+        return;
+
+    m_shouldPlayToPlaybackTarget = shouldPlay;
+
+    if (!m_playbackTarget)
+        return;
+
+    if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
+        AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
+
+        LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
+
+        if (!m_avPlayer)
+            return;
+
+        RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
+        if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
+            return;
+
+        setDelayCallbacks(true);
+        m_avPlayer.get().outputContext = newContext;
+        setDelayCallbacks(false);
+
+        return;
+    }
+
+    ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
+
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = {Mock}, shouldPlay = %s", this, boolString(shouldPlay));
+
+    setDelayCallbacks(true);
+    auto weakThis = createWeakPtr();
+    scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
+        if (!weakThis)
+            return;
+        weakThis->playbackTargetIsWirelessDidChange();
+    }));
+    setDelayCallbacks(false);
+}
+#endif // !PLATFORM(IOS)
+
 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
 {
     if (!m_avPlayer)
         return;
 
+#if PLATFORM(IOS)
     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
+#endif
 }
 #endif
 
@@ -2716,14 +3289,14 @@ void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> me
     if (seeking())
         return;
 
+    if (!m_metadataTrack)
+        processMetadataTrack();
+
     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
         return;
     }
 
-    if (!m_metadataTrack)
-        processMetadataTrack();
-
     // Set the duration of all incomplete cues before adding new ones.
     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
     for (AVMetadataItemType *item in m_currentMetaData.get()) {
@@ -2817,7 +3390,7 @@ void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
     rateChanged();
 }
     
-#if ENABLE(IOS_AIRPLAY)
+#if ENABLE(WIRELESS_PLAYBACK_TARGET)
 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
 {
     playbackTargetIsWirelessChanged();
@@ -2834,6 +3407,14 @@ void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newVal
     m_cachedCanPlayFastReverse = newValue;
 }
 
+URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
+{
+    if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
+        return MediaPlayerPrivateAVFoundation::resolvedURL();
+
+    return URL([m_avAsset resolvedURL]);
+}
+
 NSArray* assetMetadataKeyNames()
 {
     static NSArray* keys;
@@ -2844,6 +3425,7 @@ NSArray* assetMetadataKeyNames()
                     @"preferredVolume",
                     @"preferredRate",
                     @"playable",
+                    @"resolvedURL",
                     @"tracks",
                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
                    nil];
@@ -2880,6 +3462,15 @@ NSArray* assetTrackMetadataKeyNames()
     return keys;
 }
 
+NSArray* playerKVOProperties()
+{
+    static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
+#if ENABLE(WIRELESS_PLAYBACK_TARGET)
+                            @"externalPlaybackActive", @"allowsExternalPlayback",
+#endif
+                            nil];
+    return keys;
+}
 } // namespace WebCore
 
 @implementation WebCoreAVFMovieObserver
@@ -2920,81 +3511,88 @@ NSArray* assetTrackMetadataKeyNames()
     UNUSED_PARAM(object);
     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
 
-    LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
-
     if (!m_callback)
         return;
 
     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
 
-    WTF::Function<void ()> function;
+#if !LOG_DISABLED
+    if (willChange)
+        LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
+    else {
+        RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
+        LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
+    }
+#endif
+
+    std::function<void ()> function;
 
     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
         if ([keyPath isEqualToString:@"readyForDisplay"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
     }
 
     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
         if ([keyPath isEqualToString:@"enabled"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
     }
 
     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
         else if ([keyPath isEqualToString:@"playbackBufferFull"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
     }
 
     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
         // A value changed for an AVPlayerItem
         if ([keyPath isEqualToString:@"status"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
         else if ([keyPath isEqualToString:@"playbackBufferFull"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
         else if ([keyPath isEqualToString:@"asset"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
         else if ([keyPath isEqualToString:@"tracks"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
         else if ([keyPath isEqualToString:@"presentationSize"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
         else if ([keyPath isEqualToString:@"duration"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
             MediaTime now;
             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
             if (CMTIME_IS_NUMERIC(itemTime))
                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
         else if ([keyPath isEqualToString:@"canPlayFastForward"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
     }
 
     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
         // A value changed for an AVPlayer.
         if ([keyPath isEqualToString:@"rate"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
-#if ENABLE(IOS_AIRPLAY)
-        else if ([keyPath isEqualToString:@"externalPlaybackActive"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
+#if ENABLE(WIRELESS_PLAYBACK_TARGET)
+        else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
+            function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
 #endif
     }
     
-    if (function.isNull())
+    if (!function)
         return;
 
     auto weakThis = m_callback->createWeakPtr();