[MSE][Mac] Add AVSampleBufferAudioRenderer support.
authorjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 19 Dec 2013 01:14:26 +0000 (01:14 +0000)
committerjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 19 Dec 2013 01:14:26 +0000 (01:14 +0000)
https://bugs.webkit.org/show_bug.cgi?id=125905

Reviewed by Eric Carlson.

On platforms which support AVSampleBufferAudioRenderer, add support
for playback of audio CMSampleBufferRefs generated by AVStreamDataParser.

* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::isAvailable): Require AVSampleBufferAudioRenderer.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setVolume): Pass through to every audio renderer.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setMuted): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer): Slave the renderer's
    timebase to the master clock.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer):
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h:
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
(WebCore::SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC): Drive by fix; initialize
    m_enabledVideoTrackID.
(WebCore::SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC): Call destroyRenderers().
(WebCore::callProcessCodedFrameForEachSample): Drive by fix; convert the bool return to an OSErr.
(WebCore::SourceBufferPrivateAVFObjC::destroyRenderers): Added; flush and destroy the audio
    renderers.
(WebCore::SourceBufferPrivateAVFObjC::removedFromMediaSource): Call destroyRenderers().
(WebCore::SourceBufferPrivateAVFObjC::trackDidChangeEnabled): Enable or disable the audio
    renderer in response.
(WebCore::SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples): Added audio
    specific version.
(WebCore::SourceBufferPrivateAVFObjC::enqueueSample): Ditto.
(WebCore::SourceBufferPrivateAVFObjC::isReadyForMoreSamples): Ditto.
(WebCore::SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples): Ditto.
(WebCore::SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples): Ditto.

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@160810 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/WebCore/ChangeLog
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm
Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm

index 8dd1470..514d04c 100644 (file)
@@ -1,3 +1,39 @@
+2013-12-17  Jer Noble  <jer.noble@apple.com>
+
+        [MSE][Mac] Add AVSampleBufferAudioRenderer support.
+        https://bugs.webkit.org/show_bug.cgi?id=125905
+
+        Reviewed by Eric Carlson.
+
+        On platforms which support AVSampleBufferAudioRenderer, add support
+        for playback of audio CMSampleBufferRefs generated by AVStreamDataParser.
+
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::isAvailable): Require AVSampleBufferAudioRenderer.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setVolume): Pass through to every audio renderer.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setMuted): Ditto.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer): Slave the renderer's
+            timebase to the master clock.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer):
+        * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h:
+        * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
+        (WebCore::SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC): Drive by fix; initialize
+            m_enabledVideoTrackID.
+        (WebCore::SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC): Call destroyRenderers().
+        (WebCore::callProcessCodedFrameForEachSample): Drive by fix; convert the bool return to an OSErr.
+        (WebCore::SourceBufferPrivateAVFObjC::destroyRenderers): Added; flush and destroy the audio
+            renderers.  
+        (WebCore::SourceBufferPrivateAVFObjC::removedFromMediaSource): Call destroyRenderers().
+        (WebCore::SourceBufferPrivateAVFObjC::trackDidChangeEnabled): Enable or disable the audio
+            renderer in response.
+        (WebCore::SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples): Added audio
+            specific version.
+        (WebCore::SourceBufferPrivateAVFObjC::enqueueSample): Ditto.
+        (WebCore::SourceBufferPrivateAVFObjC::isReadyForMoreSamples): Ditto.
+        (WebCore::SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples): Ditto.
+        (WebCore::SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples): Ditto.
+
 2013-12-18  Seokju Kwon  <seokju@webkit.org>
 
         Web Inspector: Remove leftover code from InspectorController after r108965
index c484f39..46993d0 100644 (file)
 #include "MediaPlayerPrivate.h"
 #include "SourceBufferPrivateClient.h"
 #include <wtf/MediaTime.h>
+#include <wtf/Vector.h>
 
 OBJC_CLASS AVAsset;
+OBJC_CLASS AVSampleBufferAudioRenderer;
 OBJC_CLASS AVSampleBufferDisplayLayer;
 
 typedef struct OpaqueCMTimebase* CMTimebaseRef;
@@ -52,6 +54,9 @@ public:
     void addDisplayLayer(AVSampleBufferDisplayLayer*);
     void removeDisplayLayer(AVSampleBufferDisplayLayer*);
 
+    void addAudioRenderer(AVSampleBufferAudioRenderer*);
+    void removeAudioRenderer(AVSampleBufferAudioRenderer*);
+
     virtual MediaPlayer::NetworkState networkState() const OVERRIDE;
     virtual MediaPlayer::ReadyState readyState() const OVERRIDE;
     void setReadyState(MediaPlayer::ReadyState);
@@ -82,6 +87,10 @@ private:
 
     virtual bool paused() const OVERRIDE;
 
+    virtual void setVolume(float volume) OVERRIDE;
+    virtual bool supportsMuting() const OVERRIDE { return true; }
+    virtual void setMuted(bool) OVERRIDE;
+
     virtual bool supportsScanning() const OVERRIDE;
 
     virtual IntSize naturalSize() const OVERRIDE;
@@ -151,6 +160,7 @@ private:
     RefPtr<MediaSourcePrivateAVFObjC> m_mediaSourcePrivate;
     RetainPtr<AVAsset> m_asset;
     RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
+    Vector<RetainPtr<AVSampleBufferAudioRenderer>> m_sampleBufferAudioRenderers;
     std::unique_ptr<PlatformClockCM> m_clock;
     MediaPlayer::NetworkState m_networkState;
     MediaPlayer::ReadyState m_readyState;
index dafb0b2..02ba246 100644 (file)
@@ -48,10 +48,13 @@ SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
 
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
+SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
 
+SOFT_LINK(CoreMedia, FigReadOnlyTimebaseSetTargetTimebase, OSStatus, (CMTimebaseRef timebase, CMTimebaseRef newTargetTimebase), (timebase, newTargetTimebase))
+
 #pragma mark -
 #pragma mark AVVideoPerformanceMetrics
 
@@ -66,6 +69,18 @@ SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
 - (AVVideoPerformanceMetrics *)videoPerformanceMetrics;
 @end
 
+
+#pragma mark -
+#pragma mark AVSampleBufferAudioRenderer
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
+@interface AVSampleBufferAudioRenderer : NSObject
+- (CMTimebaseRef)timebase;
+- (void)setVolume:(float)volume;
+- (void)setMuted:(BOOL)muted;
+@end
+#endif
+
 namespace WebCore {
 
 #pragma mark -
@@ -101,7 +116,7 @@ PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateMediaSourceAVFObjC::cr
 
 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
 {
-    return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass();
+    return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass() && getAVSampleBufferAudioRendererClass();
 }
 
 static HashSet<String> mimeTypeCache()
@@ -211,11 +226,23 @@ bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
     return !m_clock->isRunning();
 }
 
+void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
+{
+    for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
+        [*it setVolume:volume];
+}
+
 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
 {
     return true;
 }
 
+void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
+{
+    for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
+        [*it setMuted:muted];
+}
+
 IntSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
 {
     // FIXME(125156): Report the intrinsic size of the enabled video track.
@@ -468,6 +495,26 @@ void MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer(AVSampleBufferDisp
     m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
 }
 
+void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
+{
+    if (m_sampleBufferAudioRenderers.contains(audioRenderer))
+        return;
+
+    m_sampleBufferAudioRenderers.append(audioRenderer);
+    FigReadOnlyTimebaseSetTargetTimebase([audioRenderer timebase], m_clock->timebase());
+    m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
+}
+
+void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
+{
+    size_t pos = m_sampleBufferAudioRenderers.find(audioRenderer);
+    if (pos == notFound)
+        return;
+
+    m_sampleBufferAudioRenderers.remove(pos);
+    m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
+}
+
 }
 
 #endif
index a2db3b5..c054c1d 100644 (file)
@@ -29,6 +29,7 @@
 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
 
 #include "SourceBufferPrivate.h"
+#include <map>
 #include <wtf/Deque.h>
 #include <wtf/HashMap.h>
 #include <wtf/MediaTime.h>
@@ -39,6 +40,7 @@
 
 OBJC_CLASS AVAsset;
 OBJC_CLASS AVStreamDataParser;
+OBJC_CLASS AVSampleBufferAudioRenderer;
 OBJC_CLASS AVSampleBufferDisplayLayer;
 OBJC_CLASS NSError;
 OBJC_CLASS NSObject;
@@ -94,6 +96,13 @@ private:
     virtual void enqueueSample(PassRefPtr<MediaSample>, AtomicString trackID) OVERRIDE;
     virtual bool isReadyForMoreSamples(AtomicString trackID) OVERRIDE;
     virtual void setActive(bool) OVERRIDE;
+    virtual void notifyClientWhenReadyForMoreSamples(AtomicString trackID) OVERRIDE;
+
+    void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferAudioRenderer*);
+    void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferDisplayLayer*);
+
+    void didBecomeReadyForMoreSamples(int trackID);
+    void destroyRenderers();
 
     Vector<RefPtr<VideoTrackPrivate>> m_videoTracks;
     Vector<RefPtr<AudioTrackPrivate>> m_audioTracks;
@@ -101,6 +110,7 @@ private:
     RetainPtr<AVStreamDataParser> m_parser;
     RetainPtr<AVAsset> m_asset;
     RetainPtr<AVSampleBufferDisplayLayer> m_displayLayer;
+    std::map<int, RetainPtr<AVSampleBufferAudioRenderer>> m_audioRenderers;
     RetainPtr<NSObject> m_delegate;
 
     MediaSourcePrivateAVFObjC* m_mediaSource;
index edfabc8..7a6d4ff 100644 (file)
@@ -59,6 +59,7 @@ SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
 
 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
+SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
 
 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
@@ -107,9 +108,7 @@ SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStr
 
 #pragma mark -
 #pragma mark AVStreamDataParser
-@class AVStreamDataParserInternal;
 
-NS_CLASS_AVAILABLE(TBD, TBD)
 @interface AVStreamDataParser : NSObject
 - (void)setDelegate:(id)delegate;
 - (void)appendStreamData:(NSData *)data;
@@ -118,6 +117,21 @@ NS_CLASS_AVAILABLE(TBD, TBD)
 @end
 
 #pragma mark -
+#pragma mark AVSampleBufferAudioRenderer
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
+@interface AVSampleBufferAudioRenderer : NSObject
+- (NSInteger)status;
+- (NSError*)error;
+- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+- (void)flush;
+- (BOOL)isReadyForMoreMediaData;
+- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
+- (void)stopRequestingMediaData;
+@end
+#endif
+
+#pragma mark -
 #pragma mark WebAVStreamDataParserListener
 
 @interface WebAVStreamDataParserListener : NSObject {
@@ -290,18 +304,13 @@ SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC
     , m_mediaSource(parent)
     , m_client(0)
     , m_parsingSucceeded(true)
+    , m_enabledVideoTrackID(-1)
 {
 }
 
 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
 {
-    if (m_displayLayer) {
-        if (m_mediaSource)
-            m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
-        [m_displayLayer flushAndRemoveImage];
-        [m_displayLayer stopRequestingMediaData];
-        m_displayLayer = nullptr;
-    }
+    destroyRenderers();
 }
 
 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
@@ -352,7 +361,7 @@ struct ProcessCodedFrameInfo {
 static OSStatus callProcessCodedFrameForEachSample(CMSampleBufferRef sampleBuffer, CMItemCount, void *refcon)
 {
     ProcessCodedFrameInfo* info = static_cast<ProcessCodedFrameInfo*>(refcon);
-    return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType);
+    return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType) ? noErr : paramErr;
 }
 
 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
@@ -401,7 +410,7 @@ void SourceBufferPrivateAVFObjC::abort()
     notImplemented();
 }
 
-void SourceBufferPrivateAVFObjC::removedFromMediaSource()
+void SourceBufferPrivateAVFObjC::destroyRenderers()
 {
     if (m_displayLayer) {
         if (m_mediaSource)
@@ -411,6 +420,21 @@ void SourceBufferPrivateAVFObjC::removedFromMediaSource()
         m_displayLayer = nullptr;
     }
 
+    for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
+        AVSampleBufferAudioRenderer* renderer = it->second.get();
+        if (m_mediaSource)
+            m_mediaSource->player()->removeAudioRenderer(renderer);
+        [renderer flush];
+        [renderer stopRequestingMediaData];
+    }
+
+    m_audioRenderers.clear();
+}
+
+void SourceBufferPrivateAVFObjC::removedFromMediaSource()
+{
+    destroyRenderers();
+
     if (m_mediaSource)
         m_mediaSource->removeSourceBuffer(this);
 }
@@ -468,18 +492,38 @@ void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSou
         if (!m_displayLayer) {
             m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
-                if (m_client)
-                    m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
+                didBecomeReadyForMoreSamples(trackID);
             }];
-            if (m_mediaSource)
-                m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
         }
+        if (m_mediaSource)
+            m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
     }
 }
 
-void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC*)
+void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
 {
-    // No-op.
+    int trackID = track->trackID();
+
+    if (!track->enabled()) {
+        AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
+        [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
+        if (m_mediaSource)
+            m_mediaSource->player()->removeAudioRenderer(renderer);
+    } else {
+        [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
+        AVSampleBufferAudioRenderer* renderer;
+        if (!m_audioRenderers.count(trackID)) {
+            renderer = [[getAVSampleBufferAudioRendererClass() alloc] init];
+            [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+                didBecomeReadyForMoreSamples(trackID);
+            }];
+            m_audioRenderers[trackID] = renderer;
+        } else
+            renderer = m_audioRenderers[trackID].get();
+
+        if (m_mediaSource)
+            m_mediaSource->player()->addAudioRenderer(renderer);
+    }
 }
 
 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
@@ -498,14 +542,20 @@ static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sa
     return adoptCF(newSampleBuffer);
 }
 
-void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackID)
+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
 {
-    if (trackID.toInt() != m_enabledVideoTrackID)
-        return;
+    int trackID = trackIDString.toInt();
+    LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
 
-    LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID.toInt());
+    if (trackID == m_enabledVideoTrackID)
+        flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
+    else if (m_audioRenderers.count(trackID))
+        flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
+}
 
-    [m_displayLayer flush];
+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
+{
+    [renderer flush];
 
     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
         RefPtr<MediaSample>& mediaSample = *it;
@@ -515,16 +565,33 @@ void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefP
 
         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
 
-        [m_displayLayer enqueueSampleBuffer:sampleBuffer.get()];
+        [renderer enqueueSampleBuffer:sampleBuffer.get()];
+    }
+}
+
+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
+{
+    [layer flush];
+
+    for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
+        RefPtr<MediaSample>& mediaSample = *it;
+
+        PlatformSample platformSample = mediaSample->platformSample();
+        ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
+
+        RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
+
+        [layer enqueueSampleBuffer:sampleBuffer.get()];
     }
 
     if (m_mediaSource)
         m_mediaSource->player()->setHasAvailableVideoFrame(false);
 }
 
-void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackID)
+void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
 {
-    if (trackID.toInt() != m_enabledVideoTrackID)
+    int trackID = trackIDString.toInt();
+    if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
         return;
 
     RefPtr<MediaSample> mediaSample = prpMediaSample;
@@ -533,15 +600,25 @@ void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaS
     if (platformSample.type != PlatformSample::CMSampleBufferType)
         return;
 
-    [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
-    if (m_mediaSource)
-        m_mediaSource->player()->setHasAvailableVideoFrame(true);
+    if (trackID == m_enabledVideoTrackID) {
+        [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
+        if (m_mediaSource)
+            m_mediaSource->player()->setHasAvailableVideoFrame(true);
+    } else
+        [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
 }
 
-bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackID)
+bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
 {
-    UNUSED_PARAM(trackID);
-    return [m_displayLayer isReadyForMoreMediaData];
+    int trackID = trackIDString.toInt();
+    if (trackID == m_enabledVideoTrackID)
+        return [m_displayLayer isReadyForMoreMediaData];
+    else if (m_audioRenderers.count(trackID))
+        return [m_audioRenderers[trackID] isReadyForMoreMediaData];
+    else
+        ASSERT_NOT_REACHED();
+
+    return false;
 }
 
 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
@@ -563,6 +640,36 @@ void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
         m_client->sourceBufferPrivateSeekToTime(this, time);
 }
 
+void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
+{
+    if (trackID == m_enabledVideoTrackID)
+        [m_displayLayer stopRequestingMediaData];
+    else if (m_audioRenderers.count(trackID))
+        [m_audioRenderers[trackID] stopRequestingMediaData];
+    else {
+        ASSERT_NOT_REACHED();
+        return;
+    }
+
+    if (m_client)
+        m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
+}
+
+void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
+{
+    int trackID = trackIDString.toInt();
+    if (trackID == m_enabledVideoTrackID) {
+        [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+            didBecomeReadyForMoreSamples(trackID);
+        }];
+    } else if (m_audioRenderers.count(trackID)) {
+        [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+            didBecomeReadyForMoreSamples(trackID);
+        }];
+    } else
+        ASSERT_NOT_REACHED();
+}
+
 }
 
 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)