https://bugs.webkit.org/show_bug.cgi?id=195453
Reviewed by Xabier Rodriguez-Calvar.
The v4l2 video decoder currently requires that downstream users of
the graphics resources complete any pending draw call and release
resources before returning from the DRAIN query.
To accomplish this the player monitors the pipeline and whenever a
v4l2 decoder is added, synchronous video texture flushing support
is enabled. Additionally and for all decoder configurations, a
flush is performed before disposing of the player.
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
(WebCore::MediaPlayerPrivateGStreamer::playbinDeepElementAddedCallback):
Monitor elements added to the decodebin bin.
(WebCore::MediaPlayerPrivateGStreamer::decodebinElementAdded): Set
a flag if a v4l2 decoder was added in decodebin.
(WebCore::MediaPlayerPrivateGStreamer::createGSTPlayBin): Connect
to the deep-element-added signal so as to monitor pipeline
topology updates.
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h:
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp:
(WebCore::MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase):
Flush video texture before disposing of the player.
(WebCore::MediaPlayerPrivateGStreamerBase::flushCurrentBuffer):
Synchronously flush if the pipeline contains a v4l2 decoder.
(WebCore::MediaPlayerPrivateGStreamerBase::createGLAppSink): Monitor push events only.
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h:
* platform/graphics/texmap/TextureMapperPlatformLayerProxy.cpp:
(WebCore::TextureMapperPlatformLayerProxy::pushNextBuffer): New
boolean flag used mostly to trigger synchronous flush conditions.
(WebCore::TextureMapperPlatformLayerProxy::dropCurrentBufferWhilePreservingTexture):
Optionally drop the current buffer in a synchronous manner. By
default the method keeps operating asynchronously.
* platform/graphics/texmap/TextureMapperPlatformLayerProxy.h:
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@242701
268f45cc-cd09-0410-ab3c-
d52691b4dbfc
+2019-03-11 Philippe Normand <pnormand@igalia.com>
+
+ [GStreamer][v4l2] Synchronous video texture flushing support
+ https://bugs.webkit.org/show_bug.cgi?id=195453
+
+ Reviewed by Xabier Rodriguez-Calvar.
+
+ The v4l2 video decoder currently requires that downstream users of
+ the graphics resources complete any pending draw call and release
+ resources before returning from the DRAIN query.
+
+ To accomplish this the player monitors the pipeline and whenever a
+ v4l2 decoder is added, synchronous video texture flushing support
+ is enabled. Additionally and for all decoder configurations, a
+ flush is performed before disposing of the player.
+
+ * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
+ (WebCore::MediaPlayerPrivateGStreamer::playbinDeepElementAddedCallback):
+ Monitor elements added to the decodebin bin.
+ (WebCore::MediaPlayerPrivateGStreamer::decodebinElementAdded): Set
+ a flag if a v4l2 decoder was added in decodebin.
+ (WebCore::MediaPlayerPrivateGStreamer::createGSTPlayBin): Connect
+ to the deep-element-added signal so as to monitor pipeline
+ topology updates.
+ * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h:
+ * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp:
+ (WebCore::MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase):
+ Flush video texture before disposing of the player.
+ (WebCore::MediaPlayerPrivateGStreamerBase::flushCurrentBuffer):
+ Synchronously flush if the pipeline contains a v4l2 decoder.
+ (WebCore::MediaPlayerPrivateGStreamerBase::createGLAppSink): Monitor push events only.
+ * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h:
+ * platform/graphics/texmap/TextureMapperPlatformLayerProxy.cpp:
+ (WebCore::TextureMapperPlatformLayerProxy::pushNextBuffer): New
+ boolean flag used mostly to trigger synchronous flush conditions.
+ (WebCore::TextureMapperPlatformLayerProxy::dropCurrentBufferWhilePreservingTexture):
+ Optionally drop the current buffer in a synchronous manner. By
+ default the method keeps operating asynchronously.
+ * platform/graphics/texmap/TextureMapperPlatformLayerProxy.h:
+
2019-03-11 Antti Koivisto <antti@apple.com>
Rename contentOffsetInCompostingLayer to contentOffsetInCompositingLayer
g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
+ g_signal_connect(GST_BIN_CAST(m_pipeline.get()), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin* subBin, GstElement* element, MediaPlayerPrivateGStreamer* player) {
+ GUniquePtr<char> binName(gst_element_get_name(GST_ELEMENT_CAST(subBin)));
+ if (!g_str_has_prefix(binName.get(), "decodebin"))
+ return;
+
+ GUniquePtr<char> elementName(gst_element_get_name(element));
+ player->m_isVideoDecoderVideo4Linux = g_str_has_prefix(elementName.get(), "v4l2");
+ }), this);
+
g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
if (m_isLegacyPlaybin) {
g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
+ flushCurrentBuffer();
#if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
#endif
void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
{
- GST_DEBUG_OBJECT(pipeline(), "Flushing video sample");
auto sampleLocker = holdLock(m_sampleMutex);
if (m_sample) {
}
auto proxyOperation =
- [](TextureMapperPlatformLayerProxy& proxy)
+ [shouldWait = m_isVideoDecoderVideo4Linux, pipeline = pipeline()](TextureMapperPlatformLayerProxy& proxy)
{
- LockHolder locker(proxy.lock());
+ GST_DEBUG_OBJECT(pipeline, "Flushing video sample %s", shouldWait ? "synchronously" : "");
+ if (!shouldWait)
+ proxy.lock().lock();
if (proxy.isActive())
- proxy.dropCurrentBufferWhilePreservingTexture();
+ proxy.dropCurrentBufferWhilePreservingTexture(shouldWait);
};
#if USE(NICOSIA)
g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
- gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn {
+ gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_PUSH | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn {
// In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the
// pipeline has to be drained before a frame with the new resolution can be decoded.
// In this context, it's important that we don't hold references to any previous frame
HashSet<uint32_t> m_handledProtectionEvents;
bool m_waitingForKey { false };
#endif
+
+ mutable bool m_isVideoDecoderVideo4Linux { false };
};
}
#if USE(GLIB_EVENT_LOOP)
#include <wtf/glib/RunLoopSourcePriority.h>
#endif
+#include <wtf/Scope.h>
static const Seconds releaseUnusedSecondsTolerance { 1_s };
static const Seconds releaseUnusedBuffersTimerInterval = { 500_ms };
{
ASSERT(m_lock.isHeld());
m_pendingBuffer = WTFMove(newBuffer);
+ m_wasBufferDropped = false;
if (m_compositor)
m_compositor->onNewBufferAvailable();
appendToUnusedBuffers(WTFMove(prevBuffer));
}
-void TextureMapperPlatformLayerProxy::dropCurrentBufferWhilePreservingTexture()
+void TextureMapperPlatformLayerProxy::dropCurrentBufferWhilePreservingTexture(bool shouldWait)
{
- ASSERT(m_lock.isHeld());
+ if (!shouldWait)
+ ASSERT(m_lock.isHeld());
if (m_pendingBuffer && m_pendingBuffer->hasManagedTexture()) {
m_usedBuffers.append(WTFMove(m_pendingBuffer));
return;
m_compositorThreadUpdateFunction =
- [this] {
+ [this, shouldWait] {
LockHolder locker(m_lock);
+ auto maybeNotifySynchronousOperation = WTF::makeScopeExit([this, shouldWait]() {
+ if (shouldWait) {
+ LockHolder holder(m_wasBufferDroppedLock);
+ m_wasBufferDropped = true;
+ m_wasBufferDroppedCondition.notifyAll();
+ }
+ });
+
if (!m_compositor || !m_targetLayer || !m_currentBuffer)
return;
if (prevBuffer->hasManagedTexture())
appendToUnusedBuffers(WTFMove(prevBuffer));
};
+
+ if (shouldWait) {
+ LockHolder holder(m_wasBufferDroppedLock);
+ m_wasBufferDropped = false;
+ }
+
m_compositorThreadUpdateTimer->startOneShot(0_s);
+ if (shouldWait) {
+ LockHolder holder(m_wasBufferDroppedLock);
+ m_wasBufferDroppedCondition.wait(m_wasBufferDroppedLock, [this] {
+ return m_wasBufferDropped;
+ });
+ }
}
bool TextureMapperPlatformLayerProxy::scheduleUpdateOnCompositorThread(Function<void()>&& updateFunction)
#if USE(COORDINATED_GRAPHICS)
#include "TextureMapperGLHeaders.h"
+#include <wtf/Condition.h>
#include <wtf/Function.h>
#include <wtf/Lock.h>
#include <wtf/RunLoop.h>
WEBCORE_EXPORT void invalidate();
WEBCORE_EXPORT void swapBuffer();
- void dropCurrentBufferWhilePreservingTexture();
+ void dropCurrentBufferWhilePreservingTexture(bool shouldWait = false);
bool scheduleUpdateOnCompositorThread(Function<void()>&&);
Lock m_lock;
+ Lock m_wasBufferDroppedLock;
+ Condition m_wasBufferDroppedCondition;
+ bool m_wasBufferDropped { false };
+
Vector<std::unique_ptr<TextureMapperPlatformLayerBuffer>> m_usedBuffers;
std::unique_ptr<RunLoop::Timer<TextureMapperPlatformLayerProxy>> m_releaseUnusedBuffersTimer;