run-webkit-tests is always creating mock libwebrtc tracks
authorcommit-queue@webkit.org <commit-queue@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 15 Mar 2017 16:38:10 +0000 (16:38 +0000)
committercommit-queue@webkit.org <commit-queue@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 15 Mar 2017 16:38:10 +0000 (16:38 +0000)
https://bugs.webkit.org/show_bug.cgi?id=169658

Patch by Youenn Fablet <youenn@apple.com> on 2017-03-15
Reviewed by Alex Christensen.

Source/WebCore:

Tests: webrtc/peer-connection-audio-mute.html
       webrtc/video-mute.html

Creating real libwebrtc av tracks in case of RealTwoPeerConnections mock factory.

* testing/MockLibWebRTCPeerConnection.cpp:
(WebCore::MockLibWebRTCPeerConnectionFactory::CreateVideoTrack):
(WebCore::MockLibWebRTCPeerConnectionFactory::CreateAudioTrack):
* testing/MockLibWebRTCPeerConnection.h:

LayoutTests:

* TestExpectations:
* webrtc/audio-peer-connection-webaudio.html:
* webrtc/peer-connection-audio-mute-expected.txt: Added.
* webrtc/peer-connection-audio-mute.html: Added.
* webrtc/routines.js:
(analyseAudio):
* webrtc/video-expected.txt:
* webrtc/video-mute-expected.txt: Added.
* webrtc/video-mute.html: Added.
* webrtc/video.html:

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@213983 268f45cc-cd09-0410-ab3c-d52691b4dbfc

14 files changed:
LayoutTests/ChangeLog
LayoutTests/TestExpectations
LayoutTests/TestExpectations.orig
LayoutTests/webrtc/audio-peer-connection-webaudio.html
LayoutTests/webrtc/peer-connection-audio-mute-expected.txt [new file with mode: 0644]
LayoutTests/webrtc/peer-connection-audio-mute.html [new file with mode: 0644]
LayoutTests/webrtc/routines.js
LayoutTests/webrtc/video-expected.txt
LayoutTests/webrtc/video-mute-expected.txt [new file with mode: 0644]
LayoutTests/webrtc/video-mute.html [new file with mode: 0644]
LayoutTests/webrtc/video.html
Source/WebCore/ChangeLog
Source/WebCore/testing/MockLibWebRTCPeerConnection.cpp
Source/WebCore/testing/MockLibWebRTCPeerConnection.h

index 3dc64a6..23a0826 100644 (file)
@@ -1,5 +1,23 @@
 2017-03-15  Youenn Fablet  <youenn@apple.com>
 
+        run-webkit-tests is always creating mock libwebrtc tracks
+        https://bugs.webkit.org/show_bug.cgi?id=169658
+
+        Reviewed by Alex Christensen.
+
+        * TestExpectations:
+        * webrtc/audio-peer-connection-webaudio.html:
+        * webrtc/peer-connection-audio-mute-expected.txt: Added.
+        * webrtc/peer-connection-audio-mute.html: Added.
+        * webrtc/routines.js:
+        (analyseAudio):
+        * webrtc/video-expected.txt:
+        * webrtc/video-mute-expected.txt: Added.
+        * webrtc/video-mute.html: Added.
+        * webrtc/video.html:
+
+2017-03-15  Youenn Fablet  <youenn@apple.com>
+
         Preventive clean-up: ensure RTCPeerConnection stays valid when calling postTask
         https://bugs.webkit.org/show_bug.cgi?id=169661
 
index 57232ec..d261698 100644 (file)
@@ -708,7 +708,7 @@ webkit.org/b/146182 editing/selection/leak-document-with-selection-inside.html [
 media/session [ Skip ]
 
 # WebRTC backend not enabled by default on Mac/iOS release bots.
-# GTK enables some of this tests on their TestExpectations file.
+# GTK enables some of these tests on their TestExpectations file.
 [ Release ] webrtc [ Skip ]
 
 [ Debug ] webrtc/audio-peer-connection-webaudio.html [ Failure ]
index 24a5ece..57232ec 100644 (file)
@@ -733,6 +733,18 @@ imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-content-type.htm
 imported/w3c/web-platform-tests/XMLHttpRequest/open-url-redirected-worker-origin.htm [ Skip ]
 imported/w3c/web-platform-tests/html/webappapis/system-state-and-capabilities/the-navigator-object/NavigatorID.html [ Skip ]
 imported/w3c/web-platform-tests/html/webappapis/system-state-and-capabilities/the-navigator-object/NavigatorID.worker.html [ Skip ]
+imported/w3c/web-platform-tests/XMLHttpRequest/anonymous-mode-unsupported.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/open-after-setrequestheader.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/open-referer.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/send-accept-language.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-allow-empty-value.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-allow-whitespace-in-value.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-case-insensitive.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-header-allowed.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-header-forbidden.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-open-setrequestheader.htm [ Failure ]
+imported/w3c/web-platform-tests/html/dom/interfaces.worker.html [ Failure ]
+imported/w3c/web-platform-tests/html/webappapis/scripting/events/event-handler-attributes-body-window.html [ Failure ]
 
 # Only iOS WK1 has testRunner.setPagePaused.
 fast/dom/timer-fire-after-page-pause.html [ Skip ]
index 6dc2498..5ffa19a 100644 (file)
@@ -7,75 +7,28 @@
     <script src="../resources/testharnessreport.js"></script>
     <script src ="routines.js"></script>
     <script>
-    var test = async_test(() => {
+    promise_test((test) => {
         if (window.testRunner)
             testRunner.setUserMediaPermission(true);
 
-        var heardHum = false;
-        var heardBop = false;
-        var heardBip = false;
-
-        navigator.mediaDevices.getUserMedia({audio: true}).then((stream) => {
+       return navigator.mediaDevices.getUserMedia({audio: true}).then((stream) => {
             if (window.internals)
                 internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
-
-            createConnections((firstConnection) => {
-                firstConnection.addStream(stream);
-            }, (secondConnection) => {
-                secondConnection.onaddstream = (streamEvent) => {
-                    var context = new webkitAudioContext();
-                    var sourceNode = context.createMediaStreamSource(streamEvent.stream);
-                    var analyser = context.createAnalyser();
-                    var gain = context.createGain();
-
-                    analyser.fftSize = 2048;
-                    analyser.smoothingTimeConstant = 0;
-                    analyser.minDecibels = -100;
-                    analyser.maxDecibels = 0;
-                    gain.gain.value = 0;
-
-                    sourceNode.connect(analyser);
-                    analyser.connect(gain);
-                    gain.connect(context.destination);
-
-                    function analyse() {
-                        var freqDomain = new Uint8Array(analyser.frequencyBinCount);
-                        analyser.getByteFrequencyData(freqDomain);
-
-                        var hasFrequency = expectedFrequency => {
-                            var bin = Math.floor(expectedFrequency * analyser.fftSize / context.sampleRate);
-                            return bin < freqDomain.length && freqDomain[bin] >= 150;
-                        };
-
-                        if (!heardHum)
-                            heardHum = hasFrequency(150);
-
-                        if (!heardBip)
-                            heardBip = hasFrequency(1500);
-
-                        if (!heardBop)
-                            heardBop = hasFrequency(500);
-
-                        if (heardHum && heardBip && heardBop)
-                            done();
-                    };
-
-                    var done = test.step_func_done(() => {
-                        clearTimeout(timeout);
-                        clearInterval(interval);
-
-                        assert_true(heardHum, "heard hum");
-                        assert_true(heardBip, "heard bip");
-                        assert_true(heardBop, "heard bop");
-                        test.done();
-                    });
-
-                    var timeout = setTimeout(done, 3000);
-                    var interval = setInterval(analyse, 1000 / 30);
-                    analyse();
-                }
+            return new Promise((resolve, reject) => {
+                createConnections((firstConnection) => {
+                    firstConnection.addStream(stream);
+                }, (secondConnection) => {
+                    secondConnection.onaddstream = (streamEvent) => { resolve(streamEvent.stream); };
+                });
+                setTimeout(() => reject("Test timed out"), 5000);
+            }).then((stream) => {
+                return analyseAudio(stream, 1000);
+            }).then((results) => {
+                assert_true(results.heardHum, "heard hum");
+                assert_true(results.heardBip, "heard bip");
+                assert_true(results.heardBop, "heard bop");
             });
-        });
+         });
     }, "Basic audio playback through a peer connection");
     </script>
 </head>
diff --git a/LayoutTests/webrtc/peer-connection-audio-mute-expected.txt b/LayoutTests/webrtc/peer-connection-audio-mute-expected.txt
new file mode 100644 (file)
index 0000000..d39af1e
--- /dev/null
@@ -0,0 +1,3 @@
+
+FAIL Muting and unmuting an audio track assert_true: heard hum expected true got false
+
diff --git a/LayoutTests/webrtc/peer-connection-audio-mute.html b/LayoutTests/webrtc/peer-connection-audio-mute.html
new file mode 100644 (file)
index 0000000..2796e8b
--- /dev/null
@@ -0,0 +1,65 @@
+<!DOCTYPE html>
+<html>
+<head>
+    <meta charset="utf-8">
+    <title>Testing local audio capture playback causes "playing" event to fire</title>
+    <script src="../resources/testharness.js"></script>
+    <script src="../resources/testharnessreport.js"></script>
+</head>
+<body>
+    <script src ="routines.js"></script>
+    <script>
+    promise_test((test) => {
+        if (window.testRunner)
+            testRunner.setUserMediaPermission(true);
+
+        return navigator.mediaDevices.getUserMedia({audio: true}).then((stream) => {
+            if (window.internals)
+                internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
+
+            var stream;
+            return new Promise((resolve, reject) => {
+                createConnections((firstConnection) => {
+                    firstConnection.addStream(stream);
+                }, (secondConnection) => {
+                    secondConnection.onaddstream = (streamEvent) => {
+                        stream = streamEvent.stream;
+                        resolve();
+                    };
+                });
+            }).then(() => {
+                return waitFor(500);
+            }).then(() => {
+                return analyseAudio(stream, 500).then((results) => {
+                    assert_true(results.heardHum, "heard hum");
+                    assert_true(results.heardBip, "heard bip");
+                    assert_true(results.heardBop, "heard bop");
+                });
+            }).then(() => {
+                stream.getAudioTracks().forEach((track) => {
+                    track.enabled = false;
+                });
+                return waitFor(500);
+            }).then(() => {
+                return analyseAudio(stream, 500).then((results) => {
+                    assert_false(results.heardHum, "heard hum");
+                    assert_false(results.heardBip, "heard bip");
+                    assert_false(results.heardBop, "heard bop");
+                });
+            }).then(() => {
+                stream.getAudioTracks().forEach((track) => {
+                    track.enabled = true;
+                });
+                return waitFor(500);
+            }).then(() => {
+                return analyseAudio(stream, 500).then((results) => {
+                    assert_true(results.heardHum, "heard hum");
+                    assert_true(results.heardBip, "heard bip");
+                    assert_true(results.heardBop, "heard bop");
+                });
+            });
+        });
+    }, "Muting and unmuting an audio track");
+    </script>
+</body>
+</html>
index 54ef33d..b841f11 100644 (file)
@@ -71,3 +71,62 @@ function onAddIceCandidateError(error)
 {
     assert_unreached();
 }
+
+function analyseAudio(stream, duration)
+{
+    return new Promise((resolve, reject) => {
+        var context = new webkitAudioContext();
+        var sourceNode = context.createMediaStreamSource(stream);
+        var analyser = context.createAnalyser();
+        var gain = context.createGain();
+
+        var results = { heardHum: false, heardBip: false, heardBop: false };
+
+        analyser.fftSize = 2048;
+        analyser.smoothingTimeConstant = 0;
+        analyser.minDecibels = -100;
+        analyser.maxDecibels = 0;
+        gain.gain.value = 0;
+
+        sourceNode.connect(analyser);
+        analyser.connect(gain);
+        gain.connect(context.destination);
+
+       function analyse() {
+           var freqDomain = new Uint8Array(analyser.frequencyBinCount);
+           analyser.getByteFrequencyData(freqDomain);
+
+           var hasFrequency = expectedFrequency => {
+                var bin = Math.floor(expectedFrequency * analyser.fftSize / context.sampleRate);
+                return bin < freqDomain.length && freqDomain[bin] >= 150;
+           };
+
+           if (!results.heardHum)
+                results.heardHum = hasFrequency(150);
+
+           if (!results.heardBip)
+               results.heardBip = hasFrequency(1500);
+
+           if (!results.heardBop)
+                results.heardBop = hasFrequency(500);
+
+            if (results.heardHum && results.heardBip && results.heardBop)
+                done();
+        };
+
+       function done() {
+            clearTimeout(timeout);
+            clearInterval(interval);
+            resolve(results);
+       }
+
+        var timeout = setTimeout(done, 3 * duration);
+        var interval = setInterval(analyse, duration / 30);
+        analyse();
+    });
+}
+
+function waitFor(duration)
+{
+    return new Promise((resolve) => setTimeout(resolve, duration));
+}
index 0f27ada..ce31e19 100644 (file)
@@ -1,4 +1,4 @@
 
 
-FAIL Basic video exchange assert_true: expected true got false
+PASS Basic video exchange 
 
diff --git a/LayoutTests/webrtc/video-mute-expected.txt b/LayoutTests/webrtc/video-mute-expected.txt
new file mode 100644 (file)
index 0000000..2d261a5
--- /dev/null
@@ -0,0 +1,4 @@
+
+
+PASS Video muted/unmuted track 
+
diff --git a/LayoutTests/webrtc/video-mute.html b/LayoutTests/webrtc/video-mute.html
new file mode 100644 (file)
index 0000000..1bafaba
--- /dev/null
@@ -0,0 +1,69 @@
+<!doctype html>
+<html>
+    <head>
+        <meta charset="utf-8">
+        <title>Testing basic video exchange from offerer to receiver</title>
+        <script src="../resources/testharness.js"></script>
+        <script src="../resources/testharnessreport.js"></script>
+    </head>
+    <body>
+        <video id="video" autoplay=""></video>
+        <canvas id="canvas" width="640" height="480"></canvas>
+        <script src ="routines.js"></script>
+        <script>
+video = document.getElementById("video");
+canvas = document.getElementById("canvas");
+// FIXME: We should use tracks
+
+function isVideoBlack()
+{
+    canvas.width = video.videoWidth;
+    canvas.height = video.videoHeight;
+    canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
+
+    imageData = canvas.getContext('2d').getImageData(10, 325, 250, 1);
+    data = imageData.data;
+    for (var cptr = 0; cptr < canvas.width * canvas.height; ++cptr) {
+        if (data[4 * cptr] || data[4 * cptr + 1] || data[4 * cptr + 2])
+            return false;
+    }
+    return true;
+}
+
+var track;
+promise_test((test) => {
+    if (window.testRunner)
+        testRunner.setUserMediaPermission(true);
+
+    return navigator.mediaDevices.getUserMedia({ video: true}).then((stream) => {
+        return new Promise((resolve, reject) => {
+            if (window.internals)
+                internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
+
+            createConnections((firstConnection) => {
+                firstConnection.addStream(stream);
+            }, (secondConnection) => {
+                secondConnection.onaddstream = (streamEvent) => { resolve(streamEvent.stream); };
+            });
+            setTimeout(() => reject("Test timed out"), 5000);
+        });
+    }).then((stream) => {
+        video.srcObject = stream;
+        track = stream.getVideoTracks()[0];
+        return video.play();
+    }).then(() => {
+         assert_false(isVideoBlack());
+    }).then(() => {
+        track.enabled = false;
+        return waitFor(500);
+    }).then(() => {
+        assert_true(isVideoBlack());
+        track.enabled = true;
+        return waitFor(500);
+    }).then(() => {
+        assert_false(isVideoBlack());
+    });
+}, "Video muted/unmuted track");
+        </script>
+    </body>
+</html>
index 8e390f8..5aad5dd 100644 (file)
         <canvas id="canvas" width="640" height="480"></canvas>
         <script src ="routines.js"></script>
         <script>
-if (window.internals)
-    internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
-
-if (window.testRunner)
-    testRunner.setUserMediaPermission(true);
-
 video = document.getElementById("video");
 canvas = document.getElementById("canvas");
 // FIXME: We should use tracks
 
 function testImage()
 {
-    try {
-        canvas.width = video.videoWidth;
-        canvas.height = video.videoHeight;
-        canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
-
-        imageData = canvas.getContext('2d').getImageData(10, 325, 250, 1);
-        data = imageData.data;
+    canvas.width = video.videoWidth;
+    canvas.height = video.videoHeight;
+    canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
 
-        var index = 20;
-        assert_true(data[index] < 100);
-        assert_true(data[index + 1] < 100);
-        assert_true(data[index + 2] < 100);
+    imageData = canvas.getContext('2d').getImageData(10, 325, 250, 1);
+    data = imageData.data;
 
-        index = 80;
-        assert_true(data[index] > 200);
-        assert_true(data[index + 1] > 200);
-        assert_true(data[index + 2] > 200);
+    var index = 20;
+    assert_true(data[index] < 100);
+    assert_true(data[index + 1] < 100);
+    assert_true(data[index + 2] < 100);
 
-        index += 80;
-        assert_true(data[index] > 200);
-        assert_true(data[index + 1] > 200);
-        assert_true(data[index + 2] < 100);
-
-        finishTest();
-    } catch(e) {
-        errorTest(e);
-    }
-}
+    index = 80;
+    assert_true(data[index] > 200);
+    assert_true(data[index + 1] > 200);
+    assert_true(data[index + 2] > 200);
 
-function testStream(stream)
-{
-    video.srcObject = stream;
-    // Video may play with black frames
-    video.onplay = setTimeout(() => {
-        testImage();
-    }, 1000);
+    index += 80;
+    assert_true(data[index] > 200);
+    assert_true(data[index + 1] > 200);
+    assert_true(data[index + 2] < 100);
 }
 
-var finishTest, errorTest;
 promise_test((test) => {
+    if (window.testRunner)
+        testRunner.setUserMediaPermission(true);
+
     return navigator.mediaDevices.getUserMedia({ video: true}).then((stream) => {
         return new Promise((resolve, reject) => {
-            finishTest = resolve;
-            errorTest = reject;
+            if (window.internals)
+                internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
+
             createConnections((firstConnection) => {
                 firstConnection.addStream(stream);
             }, (secondConnection) => {
-                secondConnection.onaddstream = (streamEvent) => { testStream(streamEvent.stream); };
+                secondConnection.onaddstream = (streamEvent) => { resolve(streamEvent.stream); };
             });
+            setTimeout(() => reject("Test timed out"), 5000);
         });
+    }).then((stream) => {
+        video.srcObject = stream;
+        return video.play();
+    }).then(() => {
+        testImage();
     });
 }, "Basic video exchange");
         </script>
index 385faca..83077f7 100644 (file)
@@ -1,5 +1,22 @@
 2017-03-15  Youenn Fablet  <youenn@apple.com>
 
+        run-webkit-tests is always creating mock libwebrtc tracks
+        https://bugs.webkit.org/show_bug.cgi?id=169658
+
+        Reviewed by Alex Christensen.
+
+        Tests: webrtc/peer-connection-audio-mute.html
+               webrtc/video-mute.html
+
+        Creating real libwebrtc av tracks in case of RealTwoPeerConnections mock factory.
+
+        * testing/MockLibWebRTCPeerConnection.cpp:
+        (WebCore::MockLibWebRTCPeerConnectionFactory::CreateVideoTrack):
+        (WebCore::MockLibWebRTCPeerConnectionFactory::CreateAudioTrack):
+        * testing/MockLibWebRTCPeerConnection.h:
+
+2017-03-15  Youenn Fablet  <youenn@apple.com>
+
         Preventive clean-up: ensure RTCPeerConnection stays valid when calling postTask
         https://bugs.webkit.org/show_bug.cgi?id=169661
 
index 76e696c..000ccb5 100644 (file)
@@ -189,6 +189,20 @@ rtc::scoped_refptr<webrtc::PeerConnectionInterface> MockLibWebRTCPeerConnectionF
     return new rtc::RefCountedObject<MockLibWebRTCPeerConnection>(*observer);
 }
 
+rtc::scoped_refptr<webrtc::VideoTrackInterface> MockLibWebRTCPeerConnectionFactory::CreateVideoTrack(const std::string& id, webrtc::VideoTrackSourceInterface* source)
+{
+    if (m_testCase == "TwoRealPeerConnections")
+        return realPeerConnectionFactory()->CreateVideoTrack(id, source);
+    return new rtc::RefCountedObject<MockLibWebRTCVideoTrack>(id, source);
+}
+
+rtc::scoped_refptr<webrtc::AudioTrackInterface> MockLibWebRTCPeerConnectionFactory::CreateAudioTrack(const std::string& id, webrtc::AudioSourceInterface* source)
+{
+    if (m_testCase == "TwoRealPeerConnections")
+        return realPeerConnectionFactory()->CreateAudioTrack(id, source);
+    return new rtc::RefCountedObject<MockLibWebRTCAudioTrack>(id, source);
+}
+
 rtc::scoped_refptr<webrtc::MediaStreamInterface> MockLibWebRTCPeerConnectionFactory::CreateLocalMediaStream(const std::string& label)
 {
     return new rtc::RefCountedObject<webrtc::MediaStream>(label);
index ecca299..9f32069 100644 (file)
@@ -244,8 +244,9 @@ private:
     rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> CreateVideoSource(cricket::VideoCapturer*) final { return nullptr; }
     rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> CreateVideoSource(cricket::VideoCapturer*, const webrtc::MediaConstraintsInterface*) final { return nullptr; }
 
-    rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateVideoTrack(const std::string& id, webrtc::VideoTrackSourceInterface* source) final { return new rtc::RefCountedObject<MockLibWebRTCVideoTrack>(id, source); }
-    rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateAudioTrack(const std::string& id, webrtc::AudioSourceInterface* source) final { return new rtc::RefCountedObject<MockLibWebRTCAudioTrack>(id, source); }
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateVideoTrack(const std::string&, webrtc::VideoTrackSourceInterface*) final;
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateAudioTrack(const std::string&, webrtc::AudioSourceInterface*) final;
+
     bool StartAecDump(rtc::PlatformFile, int64_t) final { return false; }
     void StopAecDump() final { }