PeerConnection should respect tracks that are muted at the time they are added
authorcommit-queue@webkit.org <commit-queue@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 31 May 2017 20:51:12 +0000 (20:51 +0000)
committercommit-queue@webkit.org <commit-queue@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 31 May 2017 20:51:12 +0000 (20:51 +0000)
https://bugs.webkit.org/show_bug.cgi?id=172771

Patch by Youenn Fablet <youenn@apple.com> on 2017-05-31
Reviewed by Eric Carlson.

Source/WebCore:

Tests: webrtc/peer-connection-audio-unmute.html
       webrtc/video-unmute.html

Making sure that muted/enabled state of sources are correctly handled at creation time of the outgoing webrtc sources.
This should trigger silent audio and black frames.

* platform/mediastream/mac/RealtimeOutgoingAudioSource.cpp:
(WebCore::RealtimeOutgoingAudioSource::RealtimeOutgoingAudioSource):
(WebCore::RealtimeOutgoingAudioSource::setSource):
(WebCore::RealtimeOutgoingAudioSource::initializeConverter):
* platform/mediastream/mac/RealtimeOutgoingAudioSource.h:
* platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp:
(WebCore::RealtimeOutgoingVideoSource::RealtimeOutgoingVideoSource):
(WebCore::RealtimeOutgoingVideoSource::setSource):
(WebCore::RealtimeOutgoingVideoSource::sourceMutedChanged):
(WebCore::RealtimeOutgoingVideoSource::sourceEnabledChanged):
(WebCore::RealtimeOutgoingVideoSource::initializeFromSource):
(WebCore::RealtimeOutgoingVideoSource::AddOrUpdateSink):
(WebCore::RealtimeOutgoingVideoSource::RemoveSink):
(WebCore::RealtimeOutgoingVideoSource::sendBlackFramesIfNeeded):
(WebCore::RealtimeOutgoingVideoSource::setSizeFromSource): Deleted.
(WebCore::RealtimeOutgoingVideoSource::sendBlackFrames): Deleted.
* platform/mediastream/mac/RealtimeOutgoingVideoSource.h:

LayoutTests:

* webrtc/audio-replace-track.html:
* webrtc/peer-connection-audio-unmute-expected.txt: Added.
* webrtc/peer-connection-audio-unmute.html: Added.
* webrtc/routines.js:
* webrtc/video-unmute-expected.txt: Added.
* webrtc/video-unmute.html: Added.

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@217624 268f45cc-cd09-0410-ab3c-d52691b4dbfc

12 files changed:
LayoutTests/ChangeLog
LayoutTests/webrtc/audio-replace-track.html
LayoutTests/webrtc/peer-connection-audio-unmute-expected.txt [new file with mode: 0644]
LayoutTests/webrtc/peer-connection-audio-unmute.html [new file with mode: 0644]
LayoutTests/webrtc/routines.js
LayoutTests/webrtc/video-unmute-expected.txt [new file with mode: 0644]
LayoutTests/webrtc/video-unmute.html [new file with mode: 0644]
Source/WebCore/ChangeLog
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingAudioSource.cpp
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingAudioSource.h
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.h

index a4c57ae..b057c87 100644 (file)
@@ -1,3 +1,17 @@
+2017-05-31  Youenn Fablet  <youenn@apple.com>
+
+        PeerConnection should respect tracks that are muted at the time they are added
+        https://bugs.webkit.org/show_bug.cgi?id=172771
+
+        Reviewed by Eric Carlson.
+
+        * webrtc/audio-replace-track.html:
+        * webrtc/peer-connection-audio-unmute-expected.txt: Added.
+        * webrtc/peer-connection-audio-unmute.html: Added.
+        * webrtc/routines.js:
+        * webrtc/video-unmute-expected.txt: Added.
+        * webrtc/video-unmute.html: Added.
+
 2017-05-31  Antti Koivisto  <antti@apple.com>
 
         CrashTracer: Regression : com.apple.WebKit.WebContent at STP responsible :: com.apple.WebCore: WebCore::SharedBuffer::data const + 11
index 7640106..0156dde 100644 (file)
@@ -9,18 +9,6 @@
 </head>
 <body>
     <script>
-    async function doHumAnalysis(stream, expected)
-    {
-        var context = new webkitAudioContext();
-        for (var cptr = 0; cptr < 10; cptr++) {
-            var results = await analyseAudio(stream, 200, context);
-            if (results.heardHum === expected)
-                return true;
-        }
-        await context.close();
-        return false;
-    }
-
     var sender;
     var remoteStream;
     var secondStream;
diff --git a/LayoutTests/webrtc/peer-connection-audio-unmute-expected.txt b/LayoutTests/webrtc/peer-connection-audio-unmute-expected.txt
new file mode 100644 (file)
index 0000000..ef03ecd
--- /dev/null
@@ -0,0 +1,3 @@
+
+PASS Muting a local audio track before adding it should be correctly handled 
+
diff --git a/LayoutTests/webrtc/peer-connection-audio-unmute.html b/LayoutTests/webrtc/peer-connection-audio-unmute.html
new file mode 100644 (file)
index 0000000..fec001a
--- /dev/null
@@ -0,0 +1,45 @@
+<!DOCTYPE html>
+<html>
+<head>
+    <meta charset="utf-8">
+    <title>A muted audio track that is added should not cause audio to be sent</title>
+    <script src="../resources/testharness.js"></script>
+    <script src="../resources/testharnessreport.js"></script>
+</head>
+<body>
+    <script src ="routines.js"></script>
+    <script>
+    promise_test((test) => {
+        if (window.testRunner)
+            testRunner.setUserMediaPermission(true);
+
+        var localTrack;
+        return navigator.mediaDevices.getUserMedia({audio: true}).then((localStream) => {
+            localTrack = localStream.getAudioTracks()[0];
+            localTrack.enabled = false;
+            var remoteStream;
+            return new Promise((resolve, reject) => {
+                createConnections((firstConnection) => {
+                    firstConnection.addTrack(localTrack, localStream);
+                }, (secondConnection) => {
+                    secondConnection.ontrack = (trackEvent) => {
+                        remoteStream = trackEvent.streams[0];
+                        resolve();
+                    };
+                });
+            }).then(() => {
+               return doHumAnalysis(remoteStream, false).then((result) => {
+                     assert_true(result, "Should not hear hum");
+                 });
+            }).then(() => {
+                localTrack.enabled = true;
+            }).then(() => {
+                       return doHumAnalysis(remoteStream, true).then((result) => {
+                     assert_true(result, "Should hear hum");
+                 });
+            });
+        });
+    }, "Muting a local audio track before adding it should be correctly handled");
+    </script>
+</body>
+</html>
index 874edd0..f276544 100644 (file)
@@ -145,3 +145,15 @@ function waitForVideoSize(video, width, height, count)
         return waitForVideoSize(video, width, height, count);
     });
 }
+
+async function doHumAnalysis(stream, expected)
+{
+    var context = new webkitAudioContext();
+    for (var cptr = 0; cptr < 10; cptr++) {
+        var results = await analyseAudio(stream, 200, context);
+        if (results.heardHum === expected)
+            return true;
+    }
+    await context.close();
+    return false;
+}
diff --git a/LayoutTests/webrtc/video-unmute-expected.txt b/LayoutTests/webrtc/video-unmute-expected.txt
new file mode 100644 (file)
index 0000000..57e88e7
--- /dev/null
@@ -0,0 +1,6 @@
+   
+
+PASS Setting video exchange 
+PASS Track is enabled, video should be black 
+PASS Track is enabled, video should not be black 
+
diff --git a/LayoutTests/webrtc/video-unmute.html b/LayoutTests/webrtc/video-unmute.html
new file mode 100644 (file)
index 0000000..a8ca1c0
--- /dev/null
@@ -0,0 +1,90 @@
+<!doctype html>
+<html>
+    <head>
+        <meta charset="utf-8">
+        <title>Testing basic video exchange from offerer to receiver</title>
+        <script src="../resources/testharness.js"></script>
+        <script src="../resources/testharnessreport.js"></script>
+    </head>
+    <body>
+        <video id="localVideo" autoplay playsInline width="320" height="240"></video>
+        <video id="video" autoplay playsInline width="320" height="240"></video>
+        <canvas id="canvas1" width="320" height="240"></canvas>
+        <canvas id="canvas2" width="320" height="240"></canvas>
+        <canvas id="canvas3" width="320" height="240"></canvas>
+        <script src ="routines.js"></script>
+        <script>
+function isVideoBlack(id)
+{
+    var canvas = document.getElementById(id);
+    canvas.width = video.videoWidth;
+    canvas.height = video.videoHeight;
+    canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
+
+    imageData = canvas.getContext('2d').getImageData(0, 0, canvas.width, canvas.height);
+    data = imageData.data;
+    for (var cptr = 0; cptr < canvas.width * canvas.height; ++cptr) {
+        // Approximatively black pixels.
+        if (data[4 * cptr] > 10 || data[4 * cptr + 1] > 10 || data[4 * cptr + 2] > 10)
+            return false;
+    }
+    return true;
+}
+
+function pollVideoBlackCheck(expected, id, resolve)
+{
+    if (isVideoBlack(id) === expected) {
+        resolve();
+        return;
+    }
+
+    setTimeout(() => pollVideoBlackCheck(expected, id, resolve), 50);
+}
+
+function checkVideoBlack(expected, id)
+{
+    return new Promise((resolve, reject) => {
+        pollVideoBlackCheck(expected, id, resolve);
+        setTimeout(() => reject("checkVideoBlack timed out for " + id + " expected " + expected), 5000);
+    });
+}
+
+var track;
+var remoteTrack;
+promise_test((test) => {
+    if (window.testRunner)
+        testRunner.setUserMediaPermission(true);
+
+    return navigator.mediaDevices.getUserMedia({video: {width: 320, height: 240, facingMode: "environment"}}).then((localStream) => {
+        track = localStream.getVideoTracks()[0];
+        track.enabled = false;
+        localVideo.srcObject = localStream;
+        return new Promise((resolve, reject) => {
+            createConnections((firstConnection) => {
+                firstConnection.addTrack(track, localStream);
+            }, (secondConnection) => {
+                secondConnection.ontrack = (trackEvent) => {
+                    remoteTrack = trackEvent.track;
+                    resolve(trackEvent.streams[0]); 
+                };
+            });
+            setTimeout(() => reject("Test timed out"), 5000);
+        });
+    }).then((remoteStream) => {
+        video.srcObject = remoteStream;
+        return video.play();
+    });
+}, "Setting video exchange");
+
+promise_test((test) => {
+    return checkVideoBlack(true, "canvas1");
+}, "Track is enabled, video should be black");
+
+promise_test((test) => {
+    track.enabled = true;
+    return checkVideoBlack(false, "canvas2");
+}, "Track is enabled, video should not be black");
+
+        </script>
+    </body>
+</html>
index d9a2bda..8474a9e 100644 (file)
@@ -1,3 +1,34 @@
+2017-05-31  Youenn Fablet  <youenn@apple.com>
+
+        PeerConnection should respect tracks that are muted at the time they are added
+        https://bugs.webkit.org/show_bug.cgi?id=172771
+
+        Reviewed by Eric Carlson.
+
+        Tests: webrtc/peer-connection-audio-unmute.html
+               webrtc/video-unmute.html
+
+        Making sure that muted/enabled state of sources are correctly handled at creation time of the outgoing webrtc sources.
+        This should trigger silent audio and black frames.
+
+        * platform/mediastream/mac/RealtimeOutgoingAudioSource.cpp:
+        (WebCore::RealtimeOutgoingAudioSource::RealtimeOutgoingAudioSource):
+        (WebCore::RealtimeOutgoingAudioSource::setSource):
+        (WebCore::RealtimeOutgoingAudioSource::initializeConverter):
+        * platform/mediastream/mac/RealtimeOutgoingAudioSource.h:
+        * platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp:
+        (WebCore::RealtimeOutgoingVideoSource::RealtimeOutgoingVideoSource):
+        (WebCore::RealtimeOutgoingVideoSource::setSource):
+        (WebCore::RealtimeOutgoingVideoSource::sourceMutedChanged):
+        (WebCore::RealtimeOutgoingVideoSource::sourceEnabledChanged):
+        (WebCore::RealtimeOutgoingVideoSource::initializeFromSource):
+        (WebCore::RealtimeOutgoingVideoSource::AddOrUpdateSink):
+        (WebCore::RealtimeOutgoingVideoSource::RemoveSink):
+        (WebCore::RealtimeOutgoingVideoSource::sendBlackFramesIfNeeded):
+        (WebCore::RealtimeOutgoingVideoSource::setSizeFromSource): Deleted.
+        (WebCore::RealtimeOutgoingVideoSource::sendBlackFrames): Deleted.
+        * platform/mediastream/mac/RealtimeOutgoingVideoSource.h:
+
 2017-05-31  Antti Koivisto  <antti@apple.com>
 
         CrashTracer: Regression : com.apple.WebKit.WebContent at STP responsible :: com.apple.WebCore: WebCore::SharedBuffer::data const + 11
index 8cd8262..032e481 100644 (file)
@@ -51,6 +51,7 @@ RealtimeOutgoingAudioSource::RealtimeOutgoingAudioSource(Ref<RealtimeMediaSource
     , m_sampleConverter(AudioSampleDataSource::create(LibWebRTCAudioFormat::sampleRate * 2))
 {
     m_audioSource->addObserver(*this);
+    initializeConverter();
 }
 
 bool RealtimeOutgoingAudioSource::setSource(Ref<RealtimeMediaSource>&& newSource)
@@ -59,10 +60,15 @@ bool RealtimeOutgoingAudioSource::setSource(Ref<RealtimeMediaSource>&& newSource
     m_audioSource = WTFMove(newSource);
     m_audioSource->addObserver(*this);
 
+    initializeConverter();
+    return true;
+}
+
+void RealtimeOutgoingAudioSource::initializeConverter()
+{
     m_muted = m_audioSource->muted();
     m_enabled = m_audioSource->enabled();
     m_sampleConverter->setMuted(m_muted || !m_enabled);
-    return true;
 }
 
 void RealtimeOutgoingAudioSource::stop()
index 8dacd67..2794539 100644 (file)
@@ -73,6 +73,8 @@ private:
 
     void pullAudioData();
 
+    void initializeConverter();
+
     Vector<webrtc::AudioTrackSinkInterface*> m_sinks;
     Ref<RealtimeMediaSource> m_audioSource;
     Ref<AudioSampleDataSource> m_sampleConverter;
index 485cd2b..a5b96c7 100644 (file)
@@ -48,7 +48,7 @@ RealtimeOutgoingVideoSource::RealtimeOutgoingVideoSource(Ref<RealtimeMediaSource
     , m_blackFrameTimer(*this, &RealtimeOutgoingVideoSource::sendOneBlackFrame)
 {
     m_videoSource->addObserver(*this);
-    setSizeFromSource();
+    initializeFromSource();
 }
 
 bool RealtimeOutgoingVideoSource::setSource(Ref<RealtimeMediaSource>&& newSource)
@@ -65,9 +65,7 @@ bool RealtimeOutgoingVideoSource::setSource(Ref<RealtimeMediaSource>&& newSource
     m_videoSource = WTFMove(newSource);
     m_videoSource->addObserver(*this);
 
-    setSizeFromSource();
-    m_muted = m_videoSource->muted();
-    m_enabled = m_videoSource->enabled();
+    initializeFromSource();
 
     return true;
 }
@@ -79,18 +77,23 @@ void RealtimeOutgoingVideoSource::stop()
     m_isStopped = true;
 }
 
+void RealtimeOutgoingVideoSource::updateBlackFramesSending()
+{
+    if (!m_muted && m_enabled && m_blackFrameTimer.isActive()) {
+        m_blackFrameTimer.stop();
+        return;
+    }
+
+    sendBlackFramesIfNeeded();
+}
+
 void RealtimeOutgoingVideoSource::sourceMutedChanged()
 {
     ASSERT(m_muted != m_videoSource->muted());
 
     m_muted = m_videoSource->muted();
 
-    if (m_muted && m_sinks.size() && m_enabled) {
-        sendBlackFrames();
-        return;
-    }
-    if (m_blackFrameTimer.isActive())
-        m_blackFrameTimer.stop();
+    updateBlackFramesSending();
 }
 
 void RealtimeOutgoingVideoSource::sourceEnabledChanged()
@@ -99,19 +102,19 @@ void RealtimeOutgoingVideoSource::sourceEnabledChanged()
 
     m_enabled = m_videoSource->enabled();
 
-    if (!m_enabled && m_sinks.size() && !m_muted) {
-        sendBlackFrames();
-        return;
-    }
-    if (m_blackFrameTimer.isActive())
-        m_blackFrameTimer.stop();
+    updateBlackFramesSending();
 }
 
-void RealtimeOutgoingVideoSource::setSizeFromSource()
+void RealtimeOutgoingVideoSource::initializeFromSource()
 {
     const auto& settings = m_videoSource->settings();
     m_width = settings.width();
     m_height = settings.height();
+
+    m_muted = m_videoSource->muted();
+    m_enabled = m_videoSource->enabled();
+
+    sendBlackFramesIfNeeded();
 }
 
 bool RealtimeOutgoingVideoSource::GetStats(Stats*)
@@ -128,15 +131,36 @@ void RealtimeOutgoingVideoSource::AddOrUpdateSink(rtc::VideoSinkInterface<webrtc
 
     if (!m_sinks.contains(sink))
         m_sinks.append(sink);
+
+    callOnMainThread([protectedThis = makeRef(*this)]() {
+        protectedThis->sendBlackFramesIfNeeded();
+    });
 }
 
 void RealtimeOutgoingVideoSource::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
 {
     m_sinks.removeFirst(sink);
+
+    if (m_sinks.size())
+        return;
+
+    callOnMainThread([protectedThis = makeRef(*this)]() {
+        if (protectedThis->m_blackFrameTimer.isActive())
+            protectedThis->m_blackFrameTimer.stop();
+    });
 }
 
-void RealtimeOutgoingVideoSource::sendBlackFrames()
+void RealtimeOutgoingVideoSource::sendBlackFramesIfNeeded()
 {
+    if (m_blackFrameTimer.isActive())
+        return;
+
+    if (!m_sinks.size())
+        return;
+
+    if (!m_muted && m_enabled)
+        return;
+
     if (!m_blackFrame) {
         auto width = m_width;
         auto height = m_height;
index d80e2de..724f571 100644 (file)
@@ -60,9 +60,10 @@ private:
     RealtimeOutgoingVideoSource(Ref<RealtimeMediaSource>&&);
 
     void sendFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer>&&);
-    void sendBlackFrames();
+    void sendBlackFramesIfNeeded();
     void sendOneBlackFrame();
-    void setSizeFromSource();
+    void initializeFromSource();
+    void updateBlackFramesSending();
 
     // Notifier API
     void RegisterObserver(webrtc::ObserverInterface*) final { }
@@ -84,7 +85,7 @@ private:
     // RealtimeMediaSource::Observer API
     void sourceMutedChanged() final;
     void sourceEnabledChanged() final;
-    void sourceSettingsChanged() final { setSizeFromSource(); }
+    void sourceSettingsChanged() final { initializeFromSource(); }
     void videoSampleAvailable(MediaSample&) final;
 
     Vector<rtc::VideoSinkInterface<webrtc::VideoFrame>*> m_sinks;