MSE-to-Canvas painting can become "stuck" during heavy workloads
authorjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 31 Aug 2017 18:48:45 +0000 (18:48 +0000)
committerjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 31 Aug 2017 18:48:45 +0000 (18:48 +0000)
https://bugs.webkit.org/show_bug.cgi?id=176170

Reviewed by Eric Carlson.

During heavy workloads, the trigger from CMBufferQueue notifying us that we have dipped below
the "low-water mark" of decoded (and decoding) frames will not fire. Instead of using a trigger
(since it will not fire when the number of "frames being decoded" changes, just the number of
decoded frames), just call maybeBecomeReadyForMoreMediaData() whenever the number of frames in
the decoded queue decreases, or when the number of frames being decoded decreases.

* platform/graphics/cocoa/WebCoreDecompressionSession.h:
* platform/graphics/cocoa/WebCoreDecompressionSession.mm:
(WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData):
(WebCore::WebCoreDecompressionSession::enqueueSample):
(WebCore::WebCoreDecompressionSession::decodeSample):
(WebCore::WebCoreDecompressionSession::handleDecompressionOutput):
(WebCore::WebCoreDecompressionSession::getFirstVideoFrame):
(WebCore::WebCoreDecompressionSession::automaticDequeue):
(WebCore::WebCoreDecompressionSession::imageForTime):
(WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaDataCallback): Deleted.

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@221430 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/WebCore/ChangeLog
Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.h
Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.mm

index 679f038..7eace2b 100644 (file)
@@ -1,3 +1,27 @@
+2017-08-31  Jer Noble  <jer.noble@apple.com>
+
+        MSE-to-Canvas painting can become "stuck" during heavy workloads
+        https://bugs.webkit.org/show_bug.cgi?id=176170
+
+        Reviewed by Eric Carlson.
+
+        During heavy workloads, the trigger from CMBufferQueue notifying us that we have dipped below
+        the "low-water mark" of decoded (and decoding) frames will not fire. Instead of using a trigger
+        (since it will not fire when the number of "frames being decoded" changes, just the number of
+        decoded frames), just call maybeBecomeReadyForMoreMediaData() whenever the number of frames in
+        the decoded queue decreases, or when the number of frames being decoded decreases.
+
+        * platform/graphics/cocoa/WebCoreDecompressionSession.h:
+        * platform/graphics/cocoa/WebCoreDecompressionSession.mm:
+        (WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData):
+        (WebCore::WebCoreDecompressionSession::enqueueSample):
+        (WebCore::WebCoreDecompressionSession::decodeSample):
+        (WebCore::WebCoreDecompressionSession::handleDecompressionOutput):
+        (WebCore::WebCoreDecompressionSession::getFirstVideoFrame):
+        (WebCore::WebCoreDecompressionSession::automaticDequeue):
+        (WebCore::WebCoreDecompressionSession::imageForTime):
+        (WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaDataCallback): Deleted.
+
 2017-08-31  Youenn Fablet  <youenn@apple.com>
 
         Move consume promise from FetchBody to FetchBodyConsumer
index 473dea4..4d8e794 100644 (file)
@@ -38,7 +38,6 @@
 
 typedef CFTypeRef CMBufferRef;
 typedef struct opaqueCMBufferQueue *CMBufferQueueRef;
-typedef struct opaqueCMBufferQueueTriggerToken *CMBufferQueueTriggerToken;
 typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
 typedef struct OpaqueCMTimebase* CMTimebaseRef;
 typedef signed long CMItemCount;
@@ -91,7 +90,6 @@ private:
     static CMTime getPresentationTime(CMBufferRef, void* refcon);
     static CMTime getDuration(CMBufferRef, void* refcon);
     static CFComparisonResult compareBuffers(CMBufferRef buf1, CMBufferRef buf2, void* refcon);
-    static void maybeBecomeReadyForMoreMediaDataCallback(void* refcon, CMBufferQueueTriggerToken);
     void maybeBecomeReadyForMoreMediaData();
 
     static const CMItemCount kMaximumCapacity = 120;
@@ -108,7 +106,6 @@ private:
     OSObjectPtr<dispatch_source_t> m_timerSource;
     std::function<void()> m_notificationCallback;
     std::function<void()> m_hasAvailableFrameCallback;
-    CMBufferQueueTriggerToken m_didBecomeReadyTrigger { nullptr };
 
     bool m_invalidated { false };
     int m_framesBeingDecoded { 0 };
index 61f1543..5bd1da5 100644 (file)
@@ -85,18 +85,13 @@ void WebCoreDecompressionSession::setTimebase(CMTimebaseRef timebase)
     }
 }
 
-void WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaDataCallback(void* refcon, CMBufferQueueTriggerToken)
-{
-    WebCoreDecompressionSession* session = static_cast<WebCoreDecompressionSession*>(refcon);
-    session->maybeBecomeReadyForMoreMediaData();
-}
-
 void WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData()
 {
-    LOG(Media, "WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData(%p) - isReadyForMoreMediaData(%d), hasCallback(%d)", this, isReadyForMoreMediaData(), !!m_notificationCallback);
     if (!isReadyForMoreMediaData() || !m_notificationCallback)
         return;
 
+    LOG(Media, "WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData(%p) - isReadyForMoreMediaData(%d), hasCallback(%d)", this, isReadyForMoreMediaData(), !!m_notificationCallback);
+
     if (isMainThread()) {
         m_notificationCallback();
         return;
@@ -148,8 +143,6 @@ void WebCoreDecompressionSession::enqueueSample(CMSampleBufferRef sampleBuffer,
 #pragma pack(pop)
         CMBufferQueueCreate(kCFAllocatorDefault, kMaximumCapacity, &callbacks.callbacks, &outQueue);
         m_producerQueue = adoptCF(outQueue);
-
-        CMBufferQueueInstallTriggerWithIntegerThreshold(m_producerQueue.get(), maybeBecomeReadyForMoreMediaDataCallback, this, kCMBufferQueueTrigger_WhenBufferCountBecomesLessThan, kLowWaterMark, &m_didBecomeReadyTrigger);
     }
 
     if (!m_consumerQueue) {
@@ -242,6 +235,8 @@ void WebCoreDecompressionSession::decodeSample(CMSampleBufferRef sample, bool di
     if (!shouldDecodeSample(sample, displaying)) {
         ++m_totalVideoFrames;
         ++m_droppedVideoFrames;
+        --m_framesBeingDecoded;
+        maybeBecomeReadyForMoreMediaData();
         return;
     }
 
@@ -264,6 +259,8 @@ void WebCoreDecompressionSession::handleDecompressionOutput(bool displaying, OSS
     CMVideoFormatDescriptionRef rawImageBufferDescription = nullptr;
     if (status != noErr || noErr != CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rawImageBuffer, &rawImageBufferDescription)) {
         ++m_corruptedVideoFrames;
+        --m_framesBeingDecoded;
+        maybeBecomeReadyForMoreMediaData();
         return;
     }
     RetainPtr<CMVideoFormatDescriptionRef> imageBufferDescription = adoptCF(rawImageBufferDescription);
@@ -277,6 +274,8 @@ void WebCoreDecompressionSession::handleDecompressionOutput(bool displaying, OSS
     CMSampleBufferRef rawImageSampleBuffer = nullptr;
     if (noErr != CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, rawImageBuffer, imageBufferDescription.get(), &imageBufferTiming, &rawImageSampleBuffer)) {
         ++m_corruptedVideoFrames;
+        --m_framesBeingDecoded;
+        maybeBecomeReadyForMoreMediaData();
         return;
     }
 
@@ -298,6 +297,9 @@ RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::getFirstVideoFrame()
     RetainPtr<CMSampleBufferRef> currentSample = adoptCF((CMSampleBufferRef)CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
     RetainPtr<CVPixelBufferRef> imageBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(currentSample.get());
     ASSERT(CFGetTypeID(imageBuffer.get()) == CVPixelBufferGetTypeID());
+
+    maybeBecomeReadyForMoreMediaData();
+
     return imageBuffer;
 }
 
@@ -309,12 +311,16 @@ void WebCoreDecompressionSession::automaticDequeue()
     auto time = toMediaTime(CMTimebaseGetTime(m_timebase.get()));
     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - purging all samples before time(%s)", this, toString(time).utf8().data());
 
+    MediaTime nextFireTime = MediaTime::positiveInfiniteTime();
+    bool releasedImageBuffers = false;
+
     while (CMSampleBufferRef firstSample = (CMSampleBufferRef)CMBufferQueueGetHead(m_producerQueue.get())) {
         MediaTime presentationTimestamp = toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
         MediaTime duration = toMediaTime(CMSampleBufferGetDuration(firstSample));
         MediaTime presentationEndTimestamp = presentationTimestamp + duration;
         if (time > presentationEndTimestamp) {
             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
+            releasedImageBuffers = true;
             continue;
         }
 
@@ -323,12 +329,16 @@ void WebCoreDecompressionSession::automaticDequeue()
         auto end = toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
         LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue(%s -> %s)", this, toString(begin).utf8().data(), toString(end).utf8().data());
 #endif
-        CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), toCMTime(presentationEndTimestamp), 0);
-        return;
+
+        nextFireTime = presentationEndTimestamp;
+        break;
     }
 
+    if (releasedImageBuffers)
+        maybeBecomeReadyForMoreMediaData();
+
     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue empty", this, toString(time).utf8().data());
-    CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), kCMTimePositiveInfinity, 0);
+    CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), toCMTime(nextFireTime), 0);
 }
 
 void WebCoreDecompressionSession::enqueueDecodedSample(CMSampleBufferRef sample, bool displaying)
@@ -425,6 +435,8 @@ RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::imageForTime(const Medi
         return nullptr;
     }
 
+    bool releasedImageBuffers = false;
+
     while (CMSampleBufferRef firstSample = (CMSampleBufferRef)CMBufferQueueGetHead(m_producerQueue.get())) {
         MediaTime presentationTimestamp = toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
         MediaTime duration = toMediaTime(CMSampleBufferGetDuration(firstSample));
@@ -433,6 +445,7 @@ RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::imageForTime(const Medi
             return nullptr;
         if (!allowEarlier && presentationEndTimestamp < time) {
             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
+            releasedImageBuffers = true;
             continue;
         }
 
@@ -443,6 +456,8 @@ RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::imageForTime(const Medi
         if (m_timebase)
             CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
 
+        maybeBecomeReadyForMoreMediaData();
+
         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - found sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
         return imageBuffer;
     }
@@ -450,6 +465,9 @@ RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::imageForTime(const Medi
     if (m_timebase)
         CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
 
+    if (releasedImageBuffers)
+        maybeBecomeReadyForMoreMediaData();
+
     LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - no matching sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
     return nullptr;
 }