+2013-12-18 Jer Noble <jer.noble@apple.com>
+
+ [MSE][Mac] Add AVSampleBufferRendererSynchronizer support.
+ https://bugs.webkit.org/show_bug.cgi?id=125954
+
+ Reviewed by Eric Carlson.
+
+ Instead of slaving all the various renderer's CMTimebases to one master timebase,
+ use AVSampleBufferRenderSynchronizer, which essentially does the same thing.
+
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
+ (WebCore::CMTimebaseEffectiveRateChangedCallback): Added; call effectiveRateChanged().
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC): Set up
+ the synchronizer and all the observers.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC): Tear down
+ the same.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::isAvailable): Require the
+ AVSampleBufferRenderSynchronizer class.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::playInternal): Convert Clock -> Synchronizer.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::paused): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekInternal): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addDisplayLayer): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer): Ditto.
+
+ Drive-by fix; audio samples can't be subdivided, and video samples are
+ rarely combined, so remove the call to CMSampleBufferCallForEachSample:
+ * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
+ (WebCore::SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID):
+
2013-12-18 Andreas Kling <akling@apple.com>
CSS: Fall back to cache-less cascade when encountering explicitly inherited value.
#import "HTMLMediaSource.h"
#import "MediaSourcePrivateAVFObjC.h"
+#import "MediaTimeMac.h"
#import "PlatformClockCM.h"
#import "SoftLinking.h"
#import <AVFoundation/AVSampleBufferDisplayLayer.h>
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
+SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
-SOFT_LINK(CoreMedia, FigReadOnlyTimebaseSetTargetTimebase, OSStatus, (CMTimebaseRef timebase, CMTimebaseRef newTargetTimebase), (timebase, newTargetTimebase))
+typedef struct opaqueCMNotificationCenter *CMNotificationCenterRef;
+typedef void (*CMNotificationCallback)(CMNotificationCenterRef inCenter, const void *inListener, CFStringRef inNotificationName, const void *inNotifyingObject, CFTypeRef inNotificationPayload);
+
+SOFT_LINK(CoreMedia, CMNotificationCenterGetDefaultLocalCenter, CMNotificationCenterRef, (void), ());
+SOFT_LINK(CoreMedia, CMNotificationCenterAddListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object, UInt32 flags), (center, listener, callback, notification, object, flags))
+SOFT_LINK(CoreMedia, CMNotificationCenterRemoveListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object), (center, listener, callback, notification, object))
+SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
+SOFT_LINK(CoreMedia, CMTimebaseGetTime, CMTime, (CMTimebaseRef timebase), (timebase))
+
+SOFT_LINK_CONSTANT(CoreMedia, kCMTimebaseNotification_EffectiveRateChanged, CFStringRef)
+#define kCMTimebaseNotification_EffectiveRateChanged getkCMTimebaseNotification_EffectiveRateChanged()
#pragma mark -
#pragma mark AVVideoPerformanceMetrics
@end
#endif
+#pragma mark -
+#pragma mark AVSampleBufferRenderSynchronizer
+
+@interface AVSampleBufferRenderSynchronizer : NSObject
+- (CMTimebaseRef)timebase;
+- (float)rate;
+- (void)setRate:(float)rate;
+- (void)setRate:(float)rate time:(CMTime)time;
+- (NSArray *)renderers;
+- (void)addRenderer:(id)renderer;
+- (void)removeRenderer:(id)renderer atTime:(CMTime)time withCompletionHandler:(void (^)(BOOL didRemoveRenderer))completionHandler;
+- (id)addPeriodicTimeObserverForInterval:(CMTime)interval queue:(dispatch_queue_t)queue usingBlock:(void (^)(CMTime time))block;
+- (void)removeTimeObserver:(id)observer;
+@end
+
namespace WebCore {
#pragma mark -
#pragma mark MediaPlayerPrivateMediaSourceAVFObjC
+static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
+{
+ MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)listener;
+ callOnMainThread(bind(&MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged, player));
+}
+
MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
: m_player(player)
- , m_clock(new PlatformClockCM())
+ , m_synchronizer(adoptNS([[getAVSampleBufferRenderSynchronizerClass() alloc] init]))
, m_networkState(MediaPlayer::Empty)
, m_readyState(MediaPlayer::HaveNothing)
+ , m_rate(1)
+ , m_playing(0)
, m_seeking(false)
, m_loadingProgressed(false)
{
+ CMTimebaseRef timebase = [m_synchronizer timebase];
+ CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
+ CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
+
+ // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
+ // an arbitrarily large time value of once an hour:
+ m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime){
+ if (m_seeking) {
+ m_seeking = false;
+ m_player->timeChanged();
+ }
+ }];
}
MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
{
+ CMTimebaseRef timebase = [m_synchronizer timebase];
+ CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
+ CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
+
+ [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
}
#pragma mark -
bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
{
- return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass() && getAVSampleBufferAudioRendererClass();
+ return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass() && getAVSampleBufferAudioRendererClass() && getAVSampleBufferRenderSynchronizerClass();
}
static HashSet<String> mimeTypeCache()
void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
{
- m_clock->start();
- m_player->rateChanged();
+ m_playing = true;
+ [m_synchronizer setRate:m_rate];
}
void MediaPlayerPrivateMediaSourceAVFObjC::pause()
void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
{
- m_clock->stop();
- m_player->rateChanged();
+ m_playing = false;
+ [m_synchronizer setRate:0];
}
bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
{
- return !m_clock->isRunning();
+ return !m_playing;
}
void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
double MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble() const
{
- return m_clock->currentTime();
+ return CMTimeGetSeconds(CMTimebaseGetTime([m_synchronizer timebase]));
}
double MediaPlayerPrivateMediaSourceAVFObjC::startTimeDouble() const
void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal(double time, double negativeThreshold, double positiveThreshold)
{
MediaTime seekTime = m_mediaSourcePrivate->seekToTime(MediaTime::createWithDouble(time), MediaTime::createWithDouble(positiveThreshold), MediaTime::createWithDouble(negativeThreshold));
- m_clock->setCurrentMediaTime(seekTime);
- m_seeking = false;
- m_player->timeChanged();
+
+ [m_synchronizer setRate:(m_playing ? m_rate : 0) time:toCMTime(seekTime)];
}
bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
{
- m_clock->setPlayRate(rate);
- m_player->rateChanged();
+ m_rate = rate;
+ if (m_playing)
+ [m_synchronizer setRate:m_rate];
}
MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
return;
m_sampleBufferDisplayLayer = adoptNS([[getAVSampleBufferDisplayLayerClass() alloc] init]);
- [m_sampleBufferDisplayLayer setControlTimebase:m_clock->timebase()];
+ [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
}
void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
if (!m_sampleBufferDisplayLayer)
return;
- [m_sampleBufferDisplayLayer setControlTimebase:0];
+ CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
+ [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
+ // No-op.
+ }];
m_sampleBufferDisplayLayer = nullptr;
}
m_player->durationChanged();
}
+void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
+{
+ m_player->rateChanged();
+}
+
void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
{
if (m_readyState == readyState)
return;
m_sampleBufferDisplayLayer = displayLayer;
- [m_sampleBufferDisplayLayer setControlTimebase:m_clock->timebase()];
+ [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
// FIXME: move this somewhere appropriate:
if (displayLayer != m_sampleBufferDisplayLayer)
return;
+ CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
+ [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
+ // No-op.
+ }];
+
m_sampleBufferDisplayLayer = nullptr;
m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
}
return;
m_sampleBufferAudioRenderers.append(audioRenderer);
- FigReadOnlyTimebaseSetTargetTimebase([audioRenderer timebase], m_clock->timebase());
+ [m_synchronizer addRenderer:audioRenderer];
m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
}
if (pos == notFound)
return;
+ CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
+ [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
+ // No-op.
+ }];
+
m_sampleBufferAudioRenderers.remove(pos);
m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
}