Seeking an MSE video will begin playing audio long before rendering video
authorjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Mon, 12 Jun 2017 22:57:18 +0000 (22:57 +0000)
committerjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Mon, 12 Jun 2017 22:57:18 +0000 (22:57 +0000)
https://bugs.webkit.org/show_bug.cgi?id=173269

Reviewed by Eric Carlson.

Add a notification listener which will be messaged when a to-be-displayed sample
is decoded to signal that the 'seeked' event should fire. Consolidate all the various
flush methods to funnel into a single flushVideo().

* platform/cf/CoreMediaSoftLink.cpp:
* platform/cf/CoreMediaSoftLink.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h:
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
(-[WebBufferConsumedContext initWithParent:buffer:]):
(-[WebBufferConsumedContext dealloc]):
(-[WebBufferConsumedContext parent]):
(-[WebBufferConsumedContext buffer]):
(WebCore::bufferWasConsumedCallback):
(WebCore::SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC):
(WebCore::SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC):
(WebCore::SourceBufferPrivateAVFObjC::flush):
(WebCore::SourceBufferPrivateAVFObjC::flushVideo):
(WebCore::SourceBufferPrivateAVFObjC::enqueueSample):
(WebCore::SourceBufferPrivateAVFObjC::bufferWasConsumed):
(WebCore::SourceBufferPrivateAVFObjC::willSeek):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@218150 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/WebCore/ChangeLog
Source/WebCore/platform/cf/CoreMediaSoftLink.cpp
Source/WebCore/platform/cf/CoreMediaSoftLink.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm

index 1c6bec1..8a181bc 100644 (file)
@@ -1,3 +1,32 @@
+2017-06-12  Jer Noble  <jer.noble@apple.com>
+
+        Seeking an MSE video will begin playing audio long before rendering video
+        https://bugs.webkit.org/show_bug.cgi?id=173269
+
+        Reviewed by Eric Carlson.
+
+        Add a notification listener which will be messaged when a to-be-displayed sample
+        is decoded to signal that the 'seeked' event should fire. Consolidate all the various
+        flush methods to funnel into a single flushVideo().
+
+        * platform/cf/CoreMediaSoftLink.cpp:
+        * platform/cf/CoreMediaSoftLink.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
+        * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h:
+        * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
+        (-[WebBufferConsumedContext initWithParent:buffer:]):
+        (-[WebBufferConsumedContext dealloc]):
+        (-[WebBufferConsumedContext parent]):
+        (-[WebBufferConsumedContext buffer]):
+        (WebCore::bufferWasConsumedCallback):
+        (WebCore::SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC):
+        (WebCore::SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC):
+        (WebCore::SourceBufferPrivateAVFObjC::flush):
+        (WebCore::SourceBufferPrivateAVFObjC::flushVideo):
+        (WebCore::SourceBufferPrivateAVFObjC::enqueueSample):
+        (WebCore::SourceBufferPrivateAVFObjC::bufferWasConsumed):
+        (WebCore::SourceBufferPrivateAVFObjC::willSeek):
+
 2017-06-12  Eric Carlson  <eric.carlson@apple.com>
 
         [MediaStream iOS] Set audio session mode and options when capturing
index d8b707e..7851458 100644 (file)
@@ -128,8 +128,10 @@ SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleAttachmentKey_NotSync
 SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
 SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
 SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_PostNotificantionWhenConsumed, CFStringRef)
 SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
 SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleAttachmentKey_DisplayImmediately, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMSampleBufferConsumerNotification_BufferConsumed, CFStringRef)
 
 SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMTimebaseNotification_EffectiveRateChanged, CFStringRef)
 SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreMedia, kCMTimebaseNotification_TimeJumped, CFStringRef)
index 9aa4a75..52c51eb 100644 (file)
@@ -156,7 +156,7 @@ SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetSampleTimingI
 SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetTotalSampleSize, size_t, (CMSampleBufferRef sbuf), (sbuf))
 #define CMSampleBufferGetTotalSampleSize softLink_CoreMedia_CMSampleBufferGetTotalSampleSize
 SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
-#define CMSampleBufferGetTotalSampleSize softLink_CoreMedia_CMSampleBufferGetTotalSampleSize
+#define CMSetAttachment softLink_CoreMedia_CMSetAttachment
 SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimebaseCreateWithMasterClock, OSStatus, (CFAllocatorRef allocator, CMClockRef masterClock, CMTimebaseRef *timebaseOut), (allocator, masterClock, timebaseOut))
 #define CMTimebaseCreateWithMasterClock softLink_CoreMedia_CMTimebaseCreateWithMasterClock
 SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimebaseGetTime, CMTime, (CMTimebaseRef timebase), (timebase))
@@ -216,12 +216,16 @@ SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_D
 #define kCMSampleBufferAttachmentKey_DrainAfterDecoding get_CoreMedia_kCMSampleBufferAttachmentKey_DrainAfterDecoding()
 SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
 #define kCMSampleBufferAttachmentKey_EmptyMedia get_CoreMedia_kCMSampleBufferAttachmentKey_EmptyMedia()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_PostNotificantionWhenConsumed, CFStringRef)
+#define kCMSampleBufferAttachmentKey_PostNotificantionWhenConsumed get_CoreMedia_kCMSampleBufferAttachmentKey_PostNotificantionWhenConsumed()
 SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
 #define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding get_CoreMedia_kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
 SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreMedia, kCMTimebaseNotification_EffectiveRateChanged, CFStringRef)
 #define kCMTimebaseNotification_EffectiveRateChanged get_CoreMedia_kCMTimebaseNotification_EffectiveRateChanged()
 SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreMedia, kCMTimebaseNotification_TimeJumped, CFStringRef)
 #define kCMTimebaseNotification_TimeJumped get_CoreMedia_kCMTimebaseNotification_TimeJumped()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreMedia, kCMSampleBufferConsumerNotification_BufferConsumed, CFStringRef)
+#define kCMSampleBufferConsumerNotification_BufferConsumed get_CoreMedia_kCMSampleBufferConsumerNotification_BufferConsumed()
 SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMAudioFormatDescriptionGetStreamBasicDescription, const AudioStreamBasicDescription *, (CMAudioFormatDescriptionRef desc), (desc))
 #define CMAudioFormatDescriptionGetStreamBasicDescription softLink_CoreMedia_CMAudioFormatDescriptionGetStreamBasicDescription
 SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer, OSStatus, (CMSampleBufferRef sbuf, size_t *bufferListSizeNeededOut, AudioBufferList *bufferListOut, size_t bufferListSize, CFAllocatorRef bbufStructAllocator, CFAllocatorRef bbufMemoryAllocator, uint32_t flags, CMBlockBufferRef *blockBufferOut), (sbuf, bufferListSizeNeededOut, bufferListOut, bufferListSize, bbufStructAllocator, bbufMemoryAllocator, flags, blockBufferOut))
index 7c9f2f7..f7d8c76 100644 (file)
@@ -83,6 +83,7 @@ public:
     void seekCompleted();
     void setLoadingProgresssed(bool flag) { m_loadingProgressed = flag; }
     void setHasAvailableVideoFrame(bool);
+    bool hasAvailableVideoFrame() const override;
     void setHasAvailableAudioSample(AVSampleBufferAudioRenderer*, bool);
     bool allRenderersHaveAvailableSamples() const { return m_allRenderersHaveAvailableSamples; }
     void updateAllRenderersHaveAvailableSamples();
@@ -183,8 +184,6 @@ private:
     void paint(GraphicsContext&, const FloatRect&) override;
     void paintCurrentFrameInContext(GraphicsContext&, const FloatRect&) override;
     bool copyVideoTextureToPlatformTexture(GraphicsContext3D*, Platform3DObject, GC3Denum target, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY) override;
-    
-    bool hasAvailableVideoFrame() const override;
 
     bool supportsAcceleratedRendering() const override;
     // called when the rendering system flips the into or out of accelerated rendering mode.
index a0a017e..081aa08 100644 (file)
@@ -114,6 +114,8 @@ public:
     void setVideoLayer(AVSampleBufferDisplayLayer*);
     void setDecompressionSession(WebCoreDecompressionSession*);
 
+    void bufferWasConsumed();
+
 private:
     explicit SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC*);
 
@@ -136,7 +138,7 @@ private:
     void destroyParser();
     void destroyRenderers();
 
-    void flush(AVSampleBufferDisplayLayer *);
+    void flushVideo();
     void flush(AVSampleBufferAudioRenderer *);
 
     WeakPtr<SourceBufferPrivateAVFObjC> createWeakPtr() { return m_weakFactory.createWeakPtr(); }
index 1bdd987..30082b7 100644 (file)
@@ -364,6 +364,30 @@ SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotific
 }
 @end
 
+#pragma mark -
+
+@interface WebBufferConsumedContext : NSObject {
+    WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
+}
+@property (readonly) WebCore::SourceBufferPrivateAVFObjC* parent;
+@end
+
+@implementation WebBufferConsumedContext
+- (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
+{
+    self = [super init];
+    if (self)
+        _parent = parent;
+    return self;
+}
+
+@dynamic parent;
+- (WebCore::SourceBufferPrivateAVFObjC*)parent
+{
+    return _parent.get();
+}
+@end
+
 namespace WebCore {
 
 #pragma mark -
@@ -402,6 +426,29 @@ protected:
 #pragma mark -
 #pragma mark SourceBufferPrivateAVFObjC
 
+static NSString *kBufferConsumedContext = @"BufferConsumedContext";
+
+static void bufferWasConsumedCallback(CMNotificationCenterRef, const void*, CFStringRef notificationName, const void*, CFTypeRef payload)
+{
+    if (!isMainThread()) {
+        callOnMainThread([notificationName, payload = retainPtr(payload)] {
+            bufferWasConsumedCallback(nullptr, nullptr, notificationName, nullptr, payload.get());
+        });
+        return;
+    }
+
+    if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
+        return;
+
+    ASSERT(CFGetTypeID(payload) == CFDictionaryGetTypeID());
+    auto context = (WebBufferConsumedContext *)[(NSDictionary *)payload valueForKey:kBufferConsumedContext];
+    if (!context)
+        return;
+
+    if (auto sourceBuffer = context.parent)
+        sourceBuffer->bufferWasConsumed();
+}
+
 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
 {
     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
@@ -416,6 +463,7 @@ SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC
     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
     , m_mediaSource(parent)
 {
+    CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
 }
 
 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
@@ -424,6 +472,8 @@ SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
     destroyParser();
     destroyRenderers();
 
+    CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
+
     if (m_hasSessionSemaphore)
         dispatch_semaphore_signal(m_hasSessionSemaphore.get());
 }
@@ -799,19 +849,10 @@ void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* ses
 
 void SourceBufferPrivateAVFObjC::flush()
 {
-    if (m_displayLayer)
-        [m_displayLayer flushAndRemoveImage];
-
-    if (m_decompressionSession) {
-        m_decompressionSession->flush();
-        m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
-            if (weakThis && weakThis->m_mediaSource)
-                weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
-        });
-    }
+    flushVideo();
 
     for (auto& renderer : m_audioRenderers.values())
-        [renderer flush];
+        flush(renderer.get());
 }
 
 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
@@ -870,21 +911,23 @@ void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
     LOG(MediaSource, "SourceBufferPrivateAVFObjC::flush(%p) - trackId: %d", this, trackID);
 
     if (trackID == m_enabledVideoTrackID) {
-        flush(m_displayLayer.get());
-        if (m_decompressionSession) {
-            m_decompressionSession->flush();
-            m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
-                if (weakThis && weakThis->m_mediaSource)
-                    weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
-            });
-        }
+        flushVideo();
     } else if (m_audioRenderers.contains(trackID))
         flush(m_audioRenderers.get(trackID).get());
 }
 
-void SourceBufferPrivateAVFObjC::flush(AVSampleBufferDisplayLayer *renderer)
+void SourceBufferPrivateAVFObjC::flushVideo()
 {
-    [renderer flush];
+    [m_displayLayer flush];
+
+    if (m_decompressionSession) {
+        m_decompressionSession->flush();
+        m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
+            if (weakThis && weakThis->m_mediaSource)
+                weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
+        });
+    }
+
     m_cachedSize = std::nullopt;
 
     if (m_mediaSource) {
@@ -932,18 +975,30 @@ void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const
             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
 
         if (m_displayLayer) {
-            [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
-            if (m_mediaSource)
-                m_mediaSource->player()->setHasAvailableVideoFrame(!sample->isNonDisplaying());
+            if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
+                auto context = adoptNS([[WebBufferConsumedContext alloc] initWithParent:createWeakPtr()]);
+                CMSampleBufferRef rawSampleCopy;
+                CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
+                auto sampleCopy = adoptCF(rawSampleCopy);
+                CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, @{kBufferConsumedContext: context.get()}, kCMAttachmentMode_ShouldNotPropagate);
+                [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
+            } else
+                [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
         }
     } else {
         auto renderer = m_audioRenderers.get(trackID);
         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
-        if (m_mediaSource)
-            m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), !sample->isNonDisplaying());
+        if (m_mediaSource && !sample->isNonDisplaying())
+            m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
     }
 }
 
+void SourceBufferPrivateAVFObjC::bufferWasConsumed()
+{
+    if (m_mediaSource)
+        m_mediaSource->player()->setHasAvailableVideoFrame(true);
+}
+
 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
 {
     int trackID = trackIDString.toInt();
@@ -971,11 +1026,7 @@ MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, M
 
 void SourceBufferPrivateAVFObjC::willSeek()
 {
-    if (m_displayLayer)
-        flush(m_displayLayer.get());
-
-    for (auto& renderer : m_audioRenderers.values())
-        flush(renderer.get());
+    flush();
 }
 
 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)