[MSE][Mac] Add AVSampleBufferRendererSynchronizer support.
authorjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 19 Dec 2013 07:01:59 +0000 (07:01 +0000)
committerjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 19 Dec 2013 07:01:59 +0000 (07:01 +0000)
https://bugs.webkit.org/show_bug.cgi?id=125954

Reviewed by NOBODY (OOPS!).

Instead of slaving all the various renderer's CMTimebases to one master timebase,
use AVSampleBufferRenderSynchronizer, which essentially does the same thing.

* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
(WebCore::CMTimebaseEffectiveRateChangedCallback): Added; call effectiveRateChanged().
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC): Set up
    the synchronizer and all the observers.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC): Tear down
    the same.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::isAvailable): Require the
    AVSampleBufferRenderSynchronizer class.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::playInternal): Convert Clock -> Synchronizer.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::paused): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekInternal): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addDisplayLayer): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer): Ditto.

Drive-by fix; audio samples can't be subdivided, and video samples are
rarely combined, so remove the call to CMSampleBufferCallForEachSample:
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
(WebCore::SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@160825 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/WebCore/ChangeLog
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm
Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm

index c407c35..e6d4bf8 100644 (file)
@@ -1,3 +1,41 @@
+2013-12-18  Jer Noble  <jer.noble@apple.com>
+
+        [MSE][Mac] Add AVSampleBufferRendererSynchronizer support.
+        https://bugs.webkit.org/show_bug.cgi?id=125954
+
+        Reviewed by Eric Carlson.
+
+        Instead of slaving all the various renderer's CMTimebases to one master timebase,
+        use AVSampleBufferRenderSynchronizer, which essentially does the same thing.
+
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
+        (WebCore::CMTimebaseEffectiveRateChangedCallback): Added; call effectiveRateChanged().
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC): Set up
+            the synchronizer and all the observers.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC): Tear down
+            the same.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::isAvailable): Require the
+            AVSampleBufferRenderSynchronizer class.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::playInternal): Convert Clock -> Synchronizer.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::paused): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekInternal): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addDisplayLayer): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer): Ditto.
+
+        Drive-by fix; audio samples can't be subdivided, and video samples are
+        rarely combined, so remove the call to CMSampleBufferCallForEachSample:
+        * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
+        (WebCore::SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID):
+
 2013-12-18  Andreas Kling  <akling@apple.com>
 
         CSS: Fall back to cache-less cascade when encountering explicitly inherited value.
index 46993d0..201818e 100644 (file)
@@ -36,6 +36,7 @@
 OBJC_CLASS AVAsset;
 OBJC_CLASS AVSampleBufferAudioRenderer;
 OBJC_CLASS AVSampleBufferDisplayLayer;
+OBJC_CLASS AVSampleBufferRenderSynchronizer;
 
 typedef struct OpaqueCMTimebase* CMTimebaseRef;
 
@@ -67,6 +68,8 @@ public:
     void setHasAvailableVideoFrame(bool flag) { m_hasAvailableVideoFrame = flag; }
     void durationChanged();
 
+    void effectiveRateChanged();
+
 private:
     // MediaPlayerPrivateInterface
     virtual void load(const String& url) OVERRIDE;
@@ -161,9 +164,12 @@ private:
     RetainPtr<AVAsset> m_asset;
     RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
     Vector<RetainPtr<AVSampleBufferAudioRenderer>> m_sampleBufferAudioRenderers;
-    std::unique_ptr<PlatformClockCM> m_clock;
+    RetainPtr<AVSampleBufferRenderSynchronizer> m_synchronizer;
+    RetainPtr<id> m_timeJumpedObserver;
     MediaPlayer::NetworkState m_networkState;
     MediaPlayer::ReadyState m_readyState;
+    double m_rate;
+    bool m_playing;
     bool m_seeking;
     mutable bool m_loadingProgressed;
     bool m_hasAvailableVideoFrame;
index 02ba246..724832a 100644 (file)
@@ -30,6 +30,7 @@
 
 #import "HTMLMediaSource.h"
 #import "MediaSourcePrivateAVFObjC.h"
+#import "MediaTimeMac.h"
 #import "PlatformClockCM.h"
 #import "SoftLinking.h"
 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
@@ -50,10 +51,21 @@ SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
+SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
 
-SOFT_LINK(CoreMedia, FigReadOnlyTimebaseSetTargetTimebase, OSStatus, (CMTimebaseRef timebase, CMTimebaseRef newTargetTimebase), (timebase, newTargetTimebase))
+typedef struct opaqueCMNotificationCenter *CMNotificationCenterRef;
+typedef void (*CMNotificationCallback)(CMNotificationCenterRef inCenter, const void *inListener, CFStringRef inNotificationName, const void *inNotifyingObject, CFTypeRef inNotificationPayload);
+
+SOFT_LINK(CoreMedia, CMNotificationCenterGetDefaultLocalCenter, CMNotificationCenterRef, (void), ());
+SOFT_LINK(CoreMedia, CMNotificationCenterAddListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object, UInt32 flags), (center, listener, callback, notification, object, flags))
+SOFT_LINK(CoreMedia, CMNotificationCenterRemoveListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object), (center, listener, callback, notification, object))
+SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
+SOFT_LINK(CoreMedia, CMTimebaseGetTime, CMTime, (CMTimebaseRef timebase), (timebase))
+
+SOFT_LINK_CONSTANT(CoreMedia, kCMTimebaseNotification_EffectiveRateChanged, CFStringRef)
+#define kCMTimebaseNotification_EffectiveRateChanged getkCMTimebaseNotification_EffectiveRateChanged()
 
 #pragma mark -
 #pragma mark AVVideoPerformanceMetrics
@@ -81,23 +93,63 @@ SOFT_LINK(CoreMedia, FigReadOnlyTimebaseSetTargetTimebase, OSStatus, (CMTimebase
 @end
 #endif
 
+#pragma mark -
+#pragma mark AVSampleBufferRenderSynchronizer
+
+@interface AVSampleBufferRenderSynchronizer : NSObject
+- (CMTimebaseRef)timebase;
+- (float)rate;
+- (void)setRate:(float)rate;
+- (void)setRate:(float)rate time:(CMTime)time;
+- (NSArray *)renderers;
+- (void)addRenderer:(id)renderer;
+- (void)removeRenderer:(id)renderer atTime:(CMTime)time withCompletionHandler:(void (^)(BOOL didRemoveRenderer))completionHandler;
+- (id)addPeriodicTimeObserverForInterval:(CMTime)interval queue:(dispatch_queue_t)queue usingBlock:(void (^)(CMTime time))block;
+- (void)removeTimeObserver:(id)observer;
+@end
+
 namespace WebCore {
 
 #pragma mark -
 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
 
+static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
+{
+    MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)listener;
+    callOnMainThread(bind(&MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged, player));
+}
+
 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
     : m_player(player)
-    , m_clock(new PlatformClockCM())
+    , m_synchronizer(adoptNS([[getAVSampleBufferRenderSynchronizerClass() alloc] init]))
     , m_networkState(MediaPlayer::Empty)
     , m_readyState(MediaPlayer::HaveNothing)
+    , m_rate(1)
+    , m_playing(0)
     , m_seeking(false)
     , m_loadingProgressed(false)
 {
+    CMTimebaseRef timebase = [m_synchronizer timebase];
+    CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
+    CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
+
+    // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
+    // an arbitrarily large time value of once an hour:
+    m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime){
+        if (m_seeking) {
+            m_seeking = false;
+            m_player->timeChanged();
+        }
+    }];
 }
 
 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
 {
+    CMTimebaseRef timebase = [m_synchronizer timebase];
+    CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
+    CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
+
+    [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
 }
 
 #pragma mark -
@@ -116,7 +168,7 @@ PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateMediaSourceAVFObjC::cr
 
 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
 {
-    return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass() && getAVSampleBufferAudioRendererClass();
+    return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass() && getAVSampleBufferAudioRendererClass() && getAVSampleBufferRenderSynchronizerClass();
 }
 
 static HashSet<String> mimeTypeCache()
@@ -206,8 +258,8 @@ void MediaPlayerPrivateMediaSourceAVFObjC::play()
 
 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
 {
-    m_clock->start();
-    m_player->rateChanged();
+    m_playing = true;
+    [m_synchronizer setRate:m_rate];
 }
 
 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
@@ -217,13 +269,13 @@ void MediaPlayerPrivateMediaSourceAVFObjC::pause()
 
 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
 {
-    m_clock->stop();
-    m_player->rateChanged();
+    m_playing = false;
+    [m_synchronizer setRate:0];
 }
 
 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
 {
-    return !m_clock->isRunning();
+    return !m_playing;
 }
 
 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
@@ -271,7 +323,7 @@ double MediaPlayerPrivateMediaSourceAVFObjC::durationDouble() const
 
 double MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble() const
 {
-    return m_clock->currentTime();
+    return CMTimeGetSeconds(CMTimebaseGetTime([m_synchronizer timebase]));
 }
 
 double MediaPlayerPrivateMediaSourceAVFObjC::startTimeDouble() const
@@ -293,9 +345,8 @@ void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(double time, double
 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal(double time, double negativeThreshold, double positiveThreshold)
 {
     MediaTime seekTime = m_mediaSourcePrivate->seekToTime(MediaTime::createWithDouble(time), MediaTime::createWithDouble(positiveThreshold), MediaTime::createWithDouble(negativeThreshold));
-    m_clock->setCurrentMediaTime(seekTime);
-    m_seeking = false;
-    m_player->timeChanged();
+
+    [m_synchronizer setRate:(m_playing ? m_rate : 0) time:toCMTime(seekTime)];
 }
 
 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
@@ -305,8 +356,9 @@ bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
 
 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
 {
-    m_clock->setPlayRate(rate);
-    m_player->rateChanged();
+    m_rate = rate;
+    if (m_playing)
+        [m_synchronizer setRate:m_rate];
 }
 
 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
@@ -437,7 +489,7 @@ void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
         return;
 
     m_sampleBufferDisplayLayer = adoptNS([[getAVSampleBufferDisplayLayerClass() alloc] init]);
-    [m_sampleBufferDisplayLayer setControlTimebase:m_clock->timebase()];
+    [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
 }
 
 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
@@ -445,7 +497,10 @@ void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
     if (!m_sampleBufferDisplayLayer)
         return;
 
-    [m_sampleBufferDisplayLayer setControlTimebase:0];
+    CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
+    [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
+        // No-op.
+    }];
     m_sampleBufferDisplayLayer = nullptr;
 }
 
@@ -454,6 +509,11 @@ void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
     m_player->durationChanged();
 }
 
+void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
+{
+    m_player->rateChanged();
+}
+
 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
 {
     if (m_readyState == readyState)
@@ -479,7 +539,7 @@ void MediaPlayerPrivateMediaSourceAVFObjC::addDisplayLayer(AVSampleBufferDisplay
         return;
 
     m_sampleBufferDisplayLayer = displayLayer;
-    [m_sampleBufferDisplayLayer setControlTimebase:m_clock->timebase()];
+    [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
     m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
 
     // FIXME: move this somewhere appropriate:
@@ -491,6 +551,11 @@ void MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer(AVSampleBufferDisp
     if (displayLayer != m_sampleBufferDisplayLayer)
         return;
 
+    CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
+    [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
+        // No-op.
+    }];
+
     m_sampleBufferDisplayLayer = nullptr;
     m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
 }
@@ -501,7 +566,7 @@ void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioR
         return;
 
     m_sampleBufferAudioRenderers.append(audioRenderer);
-    FigReadOnlyTimebaseSetTargetTimebase([audioRenderer timebase], m_clock->timebase());
+    [m_synchronizer addRenderer:audioRenderer];
     m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
 }
 
@@ -511,6 +576,11 @@ void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAud
     if (pos == notFound)
         return;
 
+    CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
+    [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
+        // No-op.
+    }];
+
     m_sampleBufferAudioRenderers.remove(pos);
     m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
 }
index 7a6d4ff..2102818 100644 (file)
@@ -358,18 +358,11 @@ struct ProcessCodedFrameInfo {
     const String& mediaType;
 };
 
-static OSStatus callProcessCodedFrameForEachSample(CMSampleBufferRef sampleBuffer, CMItemCount, void *refcon)
-{
-    ProcessCodedFrameInfo* info = static_cast<ProcessCodedFrameInfo*>(refcon);
-    return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType) ? noErr : paramErr;
-}
-
 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
 {
     UNUSED_PARAM(flags);
 
-    ProcessCodedFrameInfo info = {this, trackID, mediaType};
-    CMSampleBufferCallForEachSample(sampleBuffer, &callProcessCodedFrameForEachSample, &info);
+    processCodedFrame(trackID, sampleBuffer, mediaType);
 }
 
 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)