[Mac] 10X slower than Chrome when drawing a video into a canvas
authorjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Tue, 19 Nov 2013 21:37:02 +0000 (21:37 +0000)
committerjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Tue, 19 Nov 2013 21:37:02 +0000 (21:37 +0000)
https://bugs.webkit.org/show_bug.cgi?id=124599

Reviewed by Dean Jackson.

Improve performance by creating a CGImageRef which directly references the CVPixelBuffer provided
by AVPlayerItemVideoOutput:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
(WebCore::CVPixelBufferGetBytePointerCallback):
(WebCore::CVPixelBufferReleaseBytePointerCallback):
(WebCore::CVPixelBufferReleaseInfoCallback):
(WebCore::createImageFromPixelBuffer):
(WebCore::MediaPlayerPrivateAVFoundationObjC::updateLastImage):

Additionally, when asked to paint with an AVPlayerItemVideoOutput, block until the output notifies
its delegate that a pixel buffer is available:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
(WebCore::globalPullDelegateQueue):
(WebCore::MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC):
(WebCore::MediaPlayerPrivateAVFoundationObjC::createVideoOutput):
(WebCore::MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput):
(WebCore::MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime):
(WebCore::MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange):
(WebCore::MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange):
(-[WebCoreAVFPullDelegate initWithCallback:]):
(-[WebCoreAVFPullDelegate outputMediaDataWillChange:]):
(-[WebCoreAVFPullDelegate outputSequenceWasFlushed:]):

To further optimize video -> canvas drawing, add a method which can return a PassNativeImage to be
drawn directly onto the canvas, rather than rendering into an intermediary context:
* html/HTMLVideoElement.cpp:
(WebCore::HTMLVideoElement::nativeImageForCurrentTime):
* html/HTMLVideoElement.h:
* html/canvas/CanvasRenderingContext2D.cpp:
(WebCore::CanvasRenderingContext2D::drawImage):
* platform/graphics/MediaPlayer.cpp:
(WebCore::MediaPlayer::nativeImageForCurrentTime):
* platform/graphics/MediaPlayer.h:
* platform/graphics/MediaPlayerPrivate.h:
(WebCore::MediaPlayerPrivateInterface::nativeImageForCurrentTime):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@159518 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/WebCore/ChangeLog
Source/WebCore/html/HTMLVideoElement.cpp
Source/WebCore/html/HTMLVideoElement.h
Source/WebCore/html/canvas/CanvasRenderingContext2D.cpp
Source/WebCore/platform/graphics/MediaPlayer.cpp
Source/WebCore/platform/graphics/MediaPlayer.h
Source/WebCore/platform/graphics/MediaPlayerPrivate.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm

index dadcaec..280ccb3 100644 (file)
@@ -1,3 +1,48 @@
+2013-11-19  Jer Noble  <jer.noble@apple.com>
+
+        [Mac] 10X slower than Chrome when drawing a video into a canvas
+        https://bugs.webkit.org/show_bug.cgi?id=124599
+
+        Reviewed by Dean Jackson.
+
+        Improve performance by creating a CGImageRef which directly references the CVPixelBuffer provided
+        by AVPlayerItemVideoOutput:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
+        (WebCore::CVPixelBufferGetBytePointerCallback):
+        (WebCore::CVPixelBufferReleaseBytePointerCallback):
+        (WebCore::CVPixelBufferReleaseInfoCallback):
+        (WebCore::createImageFromPixelBuffer):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::updateLastImage):
+
+        Additionally, when asked to paint with an AVPlayerItemVideoOutput, block until the output notifies
+        its delegate that a pixel buffer is available:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
+        (WebCore::globalPullDelegateQueue):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::createVideoOutput):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange):
+        (-[WebCoreAVFPullDelegate initWithCallback:]):
+        (-[WebCoreAVFPullDelegate outputMediaDataWillChange:]):
+        (-[WebCoreAVFPullDelegate outputSequenceWasFlushed:]):
+        
+        To further optimize video -> canvas drawing, add a method which can return a PassNativeImage to be
+        drawn directly onto the canvas, rather than rendering into an intermediary context:
+        * html/HTMLVideoElement.cpp:
+        (WebCore::HTMLVideoElement::nativeImageForCurrentTime):
+        * html/HTMLVideoElement.h:
+        * html/canvas/CanvasRenderingContext2D.cpp:
+        (WebCore::CanvasRenderingContext2D::drawImage):
+        * platform/graphics/MediaPlayer.cpp:
+        (WebCore::MediaPlayer::nativeImageForCurrentTime):
+        * platform/graphics/MediaPlayer.h:
+        * platform/graphics/MediaPlayerPrivate.h:
+        (WebCore::MediaPlayerPrivateInterface::nativeImageForCurrentTime):
+
 2013-11-19  Brady Eidson  <beidson@apple.com>
 
         Consolidate IDBBackingStore*Interface and IDBBackingStore*LevelDB
index 09315b4..ca72150 100644 (file)
@@ -268,6 +268,14 @@ bool HTMLVideoElement::hasAvailableVideoFrame() const
     return player()->hasVideo() && player()->hasAvailableVideoFrame();
 }
 
+PassNativeImagePtr HTMLVideoElement::nativeImageForCurrentTime()
+{
+    if (!player())
+        return 0;
+
+    return player()->nativeImageForCurrentTime();
+}
+
 void HTMLVideoElement::webkitEnterFullscreen(ExceptionCode& ec)
 {
     if (isFullscreen())
index ef57f1d..284544e 100644 (file)
@@ -63,6 +63,8 @@ public:
     // Used by canvas to gain raw pixel access
     void paintCurrentFrameInContext(GraphicsContext*, const IntRect&);
 
+    PassNativeImagePtr nativeImageForCurrentTime();
+
     // Used by WebGL to do GPU-GPU textures copy if possible.
     // See more details at MediaPlayer::copyVideoTextureToPlatformTexture() defined in Source/WebCore/platform/graphics/MediaPlayer.h.
     bool copyVideoTextureToPlatformTexture(GraphicsContext3D*, Platform3DObject texture, GC3Dint level, GC3Denum type, GC3Denum internalFormat, bool premultiplyAlpha, bool flipY);
index 5ba78a0..7890eb8 100755 (executable)
@@ -1450,6 +1450,18 @@ void CanvasRenderingContext2D::drawImage(HTMLVideoElement* video, const FloatRec
 
     checkOrigin(video);
 
+#if USE(CG)
+    if (PassNativeImagePtr image = video->nativeImageForCurrentTime()) {
+        c->drawNativeImage(image, FloatSize(video->videoWidth(), video->videoHeight()), ColorSpaceDeviceRGB, dstRect, srcRect);
+        if (rectContainsCanvas(dstRect))
+            didDrawEntireCanvas();
+        else
+            didDraw(dstRect);
+
+        return;
+    }
+#endif
+
     GraphicsContextStateSaver stateSaver(*c);
     c->clip(dstRect);
     c->translate(dstRect.x(), dstRect.y());
index c48fb8a..6a88e43 100644 (file)
@@ -751,6 +751,11 @@ bool MediaPlayer::copyVideoTextureToPlatformTexture(GraphicsContext3D* context,
     return m_private->copyVideoTextureToPlatformTexture(context, texture, level, type, internalFormat, premultiplyAlpha, flipY);
 }
 
+PassNativeImagePtr MediaPlayer::nativeImageForCurrentTime()
+{
+    return m_private->nativeImageForCurrentTime();
+}
+
 MediaPlayer::SupportsType MediaPlayer::supportsType(const MediaEngineSupportParameters& parameters, const MediaPlayerSupportsTypeClient* client)
 {
     // 4.8.10.3 MIME types - The canPlayType(type) method must return the empty string if type is a type that the 
index f63eeb1..2a696f2 100644 (file)
@@ -37,6 +37,7 @@
 #include "IntRect.h"
 #include "URL.h"
 #include "LayoutRect.h"
+#include "NativeImagePtr.h"
 #include "Timer.h"
 #include "VideoTrackPrivate.h"
 #include <runtime/Uint8Array.h>
@@ -376,6 +377,8 @@ public:
     // http://src.chromium.org/viewvc/chrome/trunk/src/gpu/command_buffer/service/gles2_cmd_copy_texture_chromium.cc via shaders.
     bool copyVideoTextureToPlatformTexture(GraphicsContext3D*, Platform3DObject texture, GC3Dint level, GC3Denum type, GC3Denum internalFormat, bool premultiplyAlpha, bool flipY);
 
+    PassNativeImagePtr nativeImageForCurrentTime();
+
     enum NetworkState { Empty, Idle, Loading, Loaded, FormatError, NetworkError, DecodeError };
     NetworkState networkState();
 
index 4f98b56..3d31a45 100644 (file)
@@ -121,6 +121,7 @@ public:
 
     virtual void paintCurrentFrameInContext(GraphicsContext* c, const IntRect& r) { paint(c, r); }
     virtual bool copyVideoTextureToPlatformTexture(GraphicsContext3D*, Platform3DObject, GC3Dint, GC3Denum, GC3Denum, bool, bool) { return false; }
+    virtual PassNativeImagePtr nativeImageForCurrentTime() { return 0; }
 
     virtual void setPreload(MediaPlayer::Preload) { }
 
index f5a78c8..1562944 100644 (file)
@@ -43,6 +43,7 @@ OBJC_CLASS AVPlayerLayer;
 OBJC_CLASS AVURLAsset;
 OBJC_CLASS NSArray;
 OBJC_CLASS WebCoreAVFMovieObserver;
+OBJC_CLASS WebCoreAVFPullDelegate;
 
 typedef struct objc_object* id;
 
@@ -106,6 +107,10 @@ public:
     void durationDidChange(double);
     void rateDidChange(double);
 
+#if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
+    void outputMediaDataWillChange(AVPlayerItemVideoOutput*);
+#endif
+
 private:
     MediaPlayerPrivateAVFoundationObjC(MediaPlayer*);
 
@@ -176,8 +181,11 @@ private:
     void createVideoOutput();
     void destroyVideoOutput();
     RetainPtr<CVPixelBufferRef> createPixelBuffer();
+    void updateLastImage();
     bool videoOutputHasAvailableFrame();
     void paintWithVideoOutput(GraphicsContext*, const IntRect&);
+    virtual PassNativeImagePtr nativeImageForCurrentTime() OVERRIDE;
+    void waitForVideoOutputMediaDataWillChange();
 #endif
 
 #if ENABLE(ENCRYPTED_MEDIA)
@@ -218,7 +226,9 @@ private:
     RetainPtr<AVAssetImageGenerator> m_imageGenerator;
 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
     RetainPtr<AVPlayerItemVideoOutput> m_videoOutput;
-    RetainPtr<CVPixelBufferRef> m_lastImage;
+    RetainPtr<WebCoreAVFPullDelegate> m_videoOutputDelegate;
+    RetainPtr<CGImageRef> m_lastImage;
+    dispatch_semaphore_t m_videoOutputSemaphore;
 #endif
 
 #if USE(VIDEOTOOLBOX)
index 1dbbb3a..ec07c99 100644 (file)
@@ -115,7 +115,6 @@ SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
 
 #define AVPlayer getAVPlayerClass()
 #define AVPlayerItem getAVPlayerItemClass()
-#define AVPlayerItemVideoOutput getAVPlayerItemVideoOutputClass()
 #define AVPlayerLayer getAVPlayerLayerClass()
 #define AVURLAsset getAVURLAssetClass()
 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
@@ -195,6 +194,17 @@ enum MediaPlayerAVFoundationObservationContext {
 @end
 #endif
 
+#if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
+@interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
+    MediaPlayerPrivateAVFoundationObjC* m_callback;
+    dispatch_semaphore_t m_semaphore;
+}
+- (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
+- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
+- (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
+@end
+#endif
+
 namespace WebCore {
 
 static NSArray *assetMetadataKeyNames();
@@ -228,6 +238,18 @@ static dispatch_queue_t globalLoaderDelegateQueue()
 }
 #endif
 
+#if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
+static dispatch_queue_t globalPullDelegateQueue()
+{
+    static dispatch_queue_t globalQueue;
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
+    });
+    return globalQueue;
+}
+#endif
+
 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
 { 
     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
@@ -244,6 +266,10 @@ MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlay
     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
     , m_videoFrameHasDrawn(false)
     , m_haveCheckedPlayability(false)
+#if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
+    , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
+    , m_videoOutputSemaphore(0)
+#endif
 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
 #endif
@@ -1222,11 +1248,15 @@ void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                 nil];
 #endif
-    m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
+    m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
     ASSERT(m_videoOutput);
 
+    [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
+
     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
 
+    waitForVideoOutputMediaDataWillChange();
+
     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
 }
 
@@ -1297,7 +1327,44 @@ bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
 }
 
-void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& rect)
+static const void* CVPixelBufferGetBytePointerCallback(void* info)
+{
+    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)info;
+    CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+    return CVPixelBufferGetBaseAddress(pixelBuffer);
+}
+
+static void CVPixelBufferReleaseBytePointerCallback(void *info, const void *)
+{
+    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)info;
+    CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+}
+
+static void CVPixelBufferReleaseInfoCallback(void *info)
+{
+    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)info;
+    CFRelease(pixelBuffer);
+}
+
+static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
+{
+    // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
+    ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
+
+    size_t width = CVPixelBufferGetWidth(pixelBuffer);
+    size_t height = CVPixelBufferGetHeight(pixelBuffer);
+    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
+    size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
+    CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
+
+    CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
+    CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
+    RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
+
+    return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
+}
+
+void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
 {
     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
 
@@ -1305,7 +1372,12 @@ void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* c
     // for the requested time has already been retrieved. In this case, the last valid image (if any)
     // should be displayed.
     if (pixelBuffer)
-        m_lastImage = pixelBuffer;
+        m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
+}
+
+void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& rect)
+{
+    updateLastImage();
 
     if (m_lastImage) {
         GraphicsContextStateSaver stateSaver(*context);
@@ -1313,22 +1385,9 @@ void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* c
         context->scale(FloatSize(1.0f, -1.0f));
 
         CGRect outputRect = { CGPointZero, rect.size() };
-        CGRect imageRect = CGRectMake(0, 0, CVPixelBufferGetWidth(m_lastImage.get()), CVPixelBufferGetHeight(m_lastImage.get()));
-#if PLATFORM(IOS)
-        // ciContext does not use a RetainPtr for results of contextWithCGContext:, as the returned value
-        // is autoreleased, and there is no non-autoreleased version of that function.
-        CIContext* ciContext = [getCIContextClass() contextWithOptions:nil];
-        RetainPtr<CIImage> image = adoptNS([[getCIImageClass() alloc] initWithCVPixelBuffer:m_lastImage.get()]);
-        RetainPtr<CGImage> cgImage = adoptCF([ciContext createCGImage:image.get() fromRect:imageRect]);
-        context->drawNativeImage(cgImage.get(), FloatSize(imageRect.size), ColorSpaceDeviceRGB, FloatRect(outputRect), FloatRect(imageRect), 1);
-#else
+        CGRect imageRect = CGRectMake(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
 
-        // ciContext does not use a RetainPtr for results of contextWithCGContext:, as the returned value
-        // is autoreleased, and there is no non-autoreleased version of that function.
-        CIContext* ciContext = [CIContext contextWithCGContext:context->platformContext() options:nil];
-        RetainPtr<CIImage> image = adoptNS([[CIImage alloc] initWithCVImageBuffer:m_lastImage.get() options:@{ kCIImageColorSpace: (id)deviceRGBColorSpaceRef() }]);
-        [ciContext drawImage:image.get() inRect:outputRect fromRect:imageRect];
-#endif
+        context->drawNativeImage(m_lastImage.get(), FloatSize(imageRect.size), ColorSpaceDeviceRGB, FloatRect(outputRect), FloatRect(imageRect));
 
         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
         // video frame, destroy it now that it is no longer needed.
@@ -1336,6 +1395,31 @@ void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* c
             destroyImageGenerator();
     }
 }
+
+PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
+{
+    updateLastImage();
+    return m_lastImage.get();
+}
+
+void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
+{
+    if (!m_videoOutputSemaphore)
+        m_videoOutputSemaphore = dispatch_semaphore_create(0);
+
+    [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
+
+    // Wait for 1 second.
+    long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
+
+    if (result)
+        LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
+}
+
+void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
+{
+    dispatch_semaphore_signal(m_videoOutputSemaphore);
+}
 #endif
 
 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
@@ -1987,4 +2071,27 @@ NSArray* itemKVOProperties()
 @end
 #endif
 
+#if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
+@implementation WebCoreAVFPullDelegate
+- (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
+{
+    self = [super init];
+    if (self)
+        m_callback = callback;
+    return self;
+}
+
+- (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
+{
+    m_callback->outputMediaDataWillChange(output);
+}
+
+- (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
+{
+    UNUSED_PARAM(output);
+    // No-op.
+}
+@end
+#endif
+
 #endif