[iOS] Unset active media capture source when stopped capturing
authorcommit-queue@webkit.org <commit-queue@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 12 May 2017 00:52:45 +0000 (00:52 +0000)
committercommit-queue@webkit.org <commit-queue@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 12 May 2017 00:52:45 +0000 (00:52 +0000)
https://bugs.webkit.org/show_bug.cgi?id=171815
<rdar://problem/32117885>

Patch by Youenn Fablet <youenn@apple.com> on 2017-05-11
Reviewed by Eric Carlson.

Source/WebCore:

Test: platform/ios/mediastream/getUserMedia-single-capture.html

Introducing SingleSourceFactory template class to be used by capture factories for iOS.
This class ensures that only one source is active at a time.
Update all capture sources accordingly.
Ensure sources are no longer considered as active sources when being destroyed.
Add support for mock sources and introducing m_isProducingData for them as well.

Update WebRTC outgoing source classes to handle the case of replaced track and resetting the enabled/mute
state according the new source.

Update the way we handle timestamps for audio data. We now consider that we read/write as a flow.
This allows smooth audio track replacing.

* platform/mediastream/RealtimeMediaSource.h:
* platform/mediastream/mac/AVAudioCaptureSource.mm:
(WebCore::AVAudioCaptureSource::~AVAudioCaptureSource):
(WebCore::AVAudioCaptureSourceFactory::setActiveSource): Deleted.
* platform/mediastream/mac/AVVideoCaptureSource.mm:
(WebCore::AVVideoCaptureSource::~AVVideoCaptureSource):
(WebCore::AVVideoCaptureSourceFactory::setActiveSource): Deleted.
* platform/mediastream/mac/CoreAudioCaptureSource.cpp:
(WebCore::CoreAudioCaptureSource::~CoreAudioCaptureSource):
(WebCore::CoreAudioCaptureSource::stopProducingData):
(WebCore::CoreAudioCaptureSourceFactory::setActiveSource): Deleted.
* platform/mediastream/mac/RealtimeOutgoingAudioSource.cpp:
(WebCore::RealtimeOutgoingAudioSource::setSource):
(WebCore::RealtimeOutgoingAudioSource::audioSamplesAvailable):
(WebCore::RealtimeOutgoingAudioSource::pullAudioData):
* platform/mediastream/mac/RealtimeOutgoingAudioSource.h:
* platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp:
(WebCore::RealtimeOutgoingVideoSource::setSource):
(WebCore::RealtimeOutgoingVideoSource::sourceMutedChanged):
(WebCore::RealtimeOutgoingVideoSource::sourceEnabledChanged):
* platform/mock/MockRealtimeAudioSource.cpp:
(WebCore::mockAudioCaptureSourceFactory):
(WebCore::MockRealtimeAudioSource::factory):
(WebCore::MockRealtimeAudioSource::~MockRealtimeAudioSource):
(WebCore::MockRealtimeAudioSource::startProducingData):
(WebCore::MockRealtimeAudioSource::stopProducingData):
* platform/mock/MockRealtimeAudioSource.h:
* platform/mock/MockRealtimeVideoSource.cpp:
(WebCore::mockVideoCaptureSourceFactory):
(WebCore::MockRealtimeVideoSource::factory):
(WebCore::MockRealtimeVideoSource::~MockRealtimeVideoSource):
(WebCore::MockRealtimeVideoSource::startProducingData):
(WebCore::MockRealtimeVideoSource::stopProducingData):
* platform/mock/MockRealtimeVideoSource.h:
(WebCore::MockRealtimeVideoSource::~MockRealtimeVideoSource): Deleted.

LayoutTests:

Improving the existing tests for better reliability and debugability.
Updating tests to use less webkitAudioContext.

* platform/ios/mediastream/getUserMedia-single-capture-expected.txt: Added.
* platform/ios/mediastream/getUserMedia-single-capture.html: Added.
* webrtc/audio-peer-connection-webaudio.html:
* webrtc/audio-replace-track-expected.txt:
* webrtc/audio-replace-track.html:
* webrtc/peer-connection-audio-mute.html:
* webrtc/peer-connection-audio-mute2.html:
* webrtc/peer-connection-remote-audio-mute.html:
* webrtc/peer-connection-remote-audio-mute2.html:
* webrtc/routines.js:
* webrtc/video-replace-track-expected.txt:
* webrtc/video-replace-track.html:

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@216712 268f45cc-cd09-0410-ab3c-d52691b4dbfc

25 files changed:
LayoutTests/ChangeLog
LayoutTests/platform/ios/mediastream/getUserMedia-single-capture-expected.txt [new file with mode: 0644]
LayoutTests/platform/ios/mediastream/getUserMedia-single-capture.html [new file with mode: 0644]
LayoutTests/webrtc/audio-peer-connection-webaudio.html
LayoutTests/webrtc/audio-replace-track-expected.txt
LayoutTests/webrtc/audio-replace-track.html
LayoutTests/webrtc/peer-connection-audio-mute.html
LayoutTests/webrtc/peer-connection-audio-mute2.html
LayoutTests/webrtc/peer-connection-remote-audio-mute.html
LayoutTests/webrtc/peer-connection-remote-audio-mute2.html
LayoutTests/webrtc/routines.js
LayoutTests/webrtc/video-replace-track-expected.txt
LayoutTests/webrtc/video-replace-track.html
Source/WebCore/ChangeLog
Source/WebCore/platform/mediastream/RealtimeMediaSource.h
Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm
Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm
Source/WebCore/platform/mediastream/mac/CoreAudioCaptureSource.cpp
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingAudioSource.cpp
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingAudioSource.h
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp
Source/WebCore/platform/mock/MockRealtimeAudioSource.cpp
Source/WebCore/platform/mock/MockRealtimeAudioSource.h
Source/WebCore/platform/mock/MockRealtimeVideoSource.cpp
Source/WebCore/platform/mock/MockRealtimeVideoSource.h

index bba005b..43bb88c 100644 (file)
@@ -1,3 +1,27 @@
+2017-05-11  Youenn Fablet  <youenn@apple.com>
+
+        [iOS] Unset active media capture source when stopped capturing
+        https://bugs.webkit.org/show_bug.cgi?id=171815
+        <rdar://problem/32117885>
+
+        Reviewed by Eric Carlson.
+
+        Improving the existing tests for better reliability and debugability.
+        Updating tests to use less webkitAudioContext.
+
+        * platform/ios/mediastream/getUserMedia-single-capture-expected.txt: Added.
+        * platform/ios/mediastream/getUserMedia-single-capture.html: Added.
+        * webrtc/audio-peer-connection-webaudio.html:
+        * webrtc/audio-replace-track-expected.txt:
+        * webrtc/audio-replace-track.html:
+        * webrtc/peer-connection-audio-mute.html:
+        * webrtc/peer-connection-audio-mute2.html:
+        * webrtc/peer-connection-remote-audio-mute.html:
+        * webrtc/peer-connection-remote-audio-mute2.html:
+        * webrtc/routines.js:
+        * webrtc/video-replace-track-expected.txt:
+        * webrtc/video-replace-track.html:
+
 2017-05-11  Matt Lewis  <jlewis3@apple.com>
 
         Unreviewed, rolling out r216677.
diff --git a/LayoutTests/platform/ios/mediastream/getUserMedia-single-capture-expected.txt b/LayoutTests/platform/ios/mediastream/getUserMedia-single-capture-expected.txt
new file mode 100644 (file)
index 0000000..999ecfb
--- /dev/null
@@ -0,0 +1,3 @@
+
+PASS Testing successive getUserMedia calls 
+
diff --git a/LayoutTests/platform/ios/mediastream/getUserMedia-single-capture.html b/LayoutTests/platform/ios/mediastream/getUserMedia-single-capture.html
new file mode 100644 (file)
index 0000000..689f41d
--- /dev/null
@@ -0,0 +1,39 @@
+<!doctype html>
+<html>
+    <head>
+        <meta charset="utf-8">
+        <title>iOS specific constraints of one active capture source at a time</title>
+        <script src="../../../resources/testharness.js"></script>
+        <script src="../../../resources/testharnessreport.js"></script>
+    </head>
+    <body>
+        <script>
+promise_test((test) => {
+    if (window.testRunner)
+        testRunner.setUserMediaPermission(true);
+
+    var firstStream;
+    var audioTrack;
+    var videoTrack;
+    return navigator.mediaDevices.getUserMedia({ audio: true, video: true }).then((stream) => {
+        firstStream = stream;
+        audioTrack = firstStream.getAudioTracks()[0];
+        videoTrack = firstStream.getVideoTracks()[0];
+
+        assert_false(audioTrack.muted, "audio track is active");
+        assert_false(videoTrack.muted, "video track is active");
+
+        return navigator.mediaDevices.getUserMedia({ audio: true});
+    }).then((stream) => {
+        assert_true(audioTrack.muted, "audio track is muted");
+        assert_false(videoTrack.muted, "video track is active");
+
+        return navigator.mediaDevices.getUserMedia({ video: true});
+    }).then((stream) => {
+        assert_true(audioTrack.muted, "audio track is muted");
+        assert_true(videoTrack.muted, "video track is muted");
+    });
+}, "Testing successive getUserMedia calls");
+        </script>
+    </body>
+</html>
index 3d64c53..ce90305 100644 (file)
@@ -7,6 +7,7 @@
     <script src="../resources/testharnessreport.js"></script>
     <script src ="routines.js"></script>
     <script>
+    var context = new webkitAudioContext();
     promise_test((test) => {
         if (window.testRunner)
             testRunner.setUserMediaPermission(true);
                 setTimeout(() => reject("Test timed out"), 5000);
             });
         }).then((remoteStream) => {
-            return analyseAudio(remoteStream, 1000);
+            return analyseAudio(remoteStream, 1000, context);
         }).then((results) => {
             assert_true(results.heardHum, "heard hum");
             assert_true(results.heardBip, "heard bip");
             assert_true(results.heardBop, "heard bop");
+        }).then(() => {
+            return context.close();
         });
     }, "Basic audio playback through a peer connection");
     </script>
index 47b9b81..ee27233 100644 (file)
@@ -1,3 +1,6 @@
 
-PASS Replacing audio track from a peer connection 
+PASS Starting an audio connection 
+PASS Ensuring mock audio source is received on the remote side 
+PASS Using replaceTrack for audio 
+PASS Ensuring remote audio gets the replacing track data 
 
index 193fa5e..7640106 100644 (file)
@@ -6,30 +6,29 @@
     <script src="../resources/testharness.js"></script>
     <script src="../resources/testharnessreport.js"></script>
     <script src ="routines.js"></script>
+</head>
+<body>
     <script>
     async function doHumAnalysis(stream, expected)
     {
+        var context = new webkitAudioContext();
         for (var cptr = 0; cptr < 10; cptr++) {
-            var results = await analyseAudio(stream, 200);
+            var results = await analyseAudio(stream, 200, context);
             if (results.heardHum === expected)
                 return true;
         }
+        await context.close();
         return false;
     }
 
+    var sender;
+    var remoteStream;
+    var secondStream;
     promise_test((test) => {
         if (window.testRunner)
             testRunner.setUserMediaPermission(true);
 
-        var sender;
-        var firsStream;
-        var secondStream;
-        var remoteStream;
-        return navigator.mediaDevices.getUserMedia({ audio: { sampleRate: { exact: 48000 } } }).then((stream) => {
-            firstStream = stream;
-            return navigator.mediaDevices.getUserMedia({ audio: { sampleRate: { exact: 48000 } } });
-        }).then((stream) => {
-            secondStream = stream;
+        return navigator.mediaDevices.getUserMedia({ audio: { sampleRate: { exact: 48000 } } }).then((firstStream) => {
             return new Promise((resolve, reject) => {
                 createConnections((firstConnection) => {
                     sender = firstConnection.addTrack(firstStream.getAudioTracks()[0], firstStream);
             });
         }).then((stream) => {
             remoteStream = stream;
-            return doHumAnalysis(remoteStream, true)
-        }).then((result) => {
+         });
+    }, "Starting an audio connection");
+
+    promise_test(() => {
+        return doHumAnalysis(remoteStream, true).then((result) => {
             assert_true(result, "heard hum 1");
+        });
+    }, "Ensuring mock audio source is received on the remote side");
+
+    promise_test(() => {
+        return navigator.mediaDevices.getUserMedia({ audio: { sampleRate: { exact: 48000 } } }).then((stream) => {
+            secondStream = stream;
             return sender.replaceTrack(secondStream.getAudioTracks()[0], secondStream);
         }).then(() => {
             assert_true(sender.track === secondStream.getAudioTracks()[0]);
             return waitFor(500);
-        }).then((results) => {
-            return doHumAnalysis(remoteStream, true)
-        }).then((results) => {
+        });
+    }, "Using replaceTrack for audio");
+
+    promise_test(() => {
+        return doHumAnalysis(remoteStream, true).then((results) => {
             assert_true(results, "heard hum 2");
         });
-    }, "Replacing audio track from a peer connection");
+    }, "Ensuring remote audio gets the replacing track data");
     </script>
-</head>
-<body>
 </body>
 </html>
index 83ad1e4..199c591 100644 (file)
@@ -9,6 +9,7 @@
 <body>
     <script src ="routines.js"></script>
     <script>
+    var context = new webkitAudioContext();
     promise_test((test) => {
         if (window.testRunner)
             testRunner.setUserMediaPermission(true);
             }).then(() => {
                 return waitFor(500);
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_true(results.heardHum, "heard hum from remote enabled track");
                 });
             }).then(() => {
                 localTrack.enabled = false;
                 return waitFor(500);
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_false(results.heardHum, "not heard hum from remote disabled track");
                 });
+            }).then(() => {
+                return context.close();
             });
         });
     }, "Muting a local audio track and making sure the remote track is silent");
index d906096..78db59f 100644 (file)
@@ -9,6 +9,7 @@
 <body>
     <script src ="routines.js"></script>
     <script>
+    var context = new webkitAudioContext();
     promise_test((test) => {
         if (window.testRunner)
             testRunner.setUserMediaPermission(true);
             }).then(() => {
                 return waitFor(500);
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_true(results.heardHum, "heard hum from remote enabled track");
                 });
             }).then(() => {
                 localTrack.enabled = false;
                 return waitFor(500);
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_false(results.heardHum, "not heard hum from remote disabled track");
                 });
             }).then(() => {
                 localTrack.enabled = true;
                 return waitFor(500);
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_true(results.heardHum, "heard hum from remote reenabled track");
                 });
+            }).then(() => {
+                return context.close();
             });
         });
     }, "Muting and unmuting a local audio track");
index 2d342f4..59456c6 100644 (file)
@@ -9,6 +9,7 @@
 <body>
     <script src ="routines.js"></script>
     <script>
+    var context = new webkitAudioContext();
     promise_test((test) => {
         if (window.testRunner)
             testRunner.setUserMediaPermission(true);
                     };
                 });
             }).then(() => {
-                return analyseAudio(remoteStream, 200).then((results) => {
+                return analyseAudio(remoteStream, 200, context).then((results) => {
                     assert_true(results.heardHum, "heard hum from remote enabled track");
                 });
             }).then(() => {
                 remoteTrack.enabled = false;
                 return waitFor(100);
             }).then(() => {
-                return analyseAudio(remoteStream, 200).then((results) => {
+                return analyseAudio(remoteStream, 200, context).then((results) => {
                     assert_false(results.heardHum, "not heard hum from remote disabled track");
                 });
+            }).then(() => {
+                return context.close();
             });
         });
     }, "Muting an incoming audio track");
index 39247ce..518ec19 100644 (file)
@@ -9,6 +9,7 @@
 <body>
     <script src ="routines.js"></script>
     <script>
+    var context = new webkitAudioContext();
     promise_test((test) => {
         if (window.testRunner)
             testRunner.setUserMediaPermission(true);
                     };
                 });
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_true(results.heardHum, "heard hum from remote enabled track");
                 });
             }).then(() => {
                 remoteTrack.enabled = false;
                 return waitFor(100);
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_false(results.heardHum, "not heard hum from remote disabled track");
                 });
             }).then(() => {
                 remoteTrack.enabled = true;
                 return waitFor(100);
             }).then(() => {
-                return analyseAudio(remoteStream, 500).then((results) => {
+                return analyseAudio(remoteStream, 500, context).then((results) => {
                     assert_true(results.heardHum, "heard hum from remote reenabled track");
                 });
+            }).then(() => {
+                return context.close();
             });
         });
     }, "Muting and unmuting an incoming audio track");
index eee04a9..7e1a2ec 100644 (file)
@@ -72,11 +72,11 @@ function onAddIceCandidateError(error)
     assert_unreached();
 }
 
-function analyseAudio(stream, duration)
+function analyseAudio(stream, duration, context)
 {
     return new Promise((resolve, reject) => {
-        var context = new webkitAudioContext();
         var sourceNode = context.createMediaStreamSource(stream);
+
         var analyser = context.createAnalyser();
         var gain = context.createGain();
 
index 8359913..bbea763 100644 (file)
@@ -1,5 +1,10 @@
 
 PASS Switching from front to back camera 
-FAIL Switching from front to back camera, with lower resolution assert_true: backStream should be smaller expected true got false
-FAIL Switching from front to back camera, with higher resolution assert_true: front stream should be smaller expected true got false
-
+PASS Switching from front to back camera, with lower resolution 
+PASS Switching from front to back camera, with higher resolution 
+PASS testFrontCameraImage test1 
+PASS testBackCameraImage test1 
+PASS testFrontCameraImage test2 
+PASS testBackCameraImage test2 
+PASS testFrontCameraImage test3 
index 22085eb..1272471 100644 (file)
@@ -25,26 +25,30 @@ function grabImagePixels()
     return imageData.data;
  }
 
-function testFrontCameraImage()
+function testFrontCameraImage(testName)
 {
-    data = grabImagePixels();
+    test(() => {
+        data = grabImagePixels();
 
-    assert_true(data[0] < 20);
-    assert_true(data[1] < 20);
-    assert_true(data[2] < 20);
+        assert_true(data[0] < 20, "1");
+        assert_true(data[1] < 20, "2");
+        assert_true(data[2] < 20, "3");
+    }, "testFrontCameraImage " + testName);
 }
 
-function testBackCameraImage()
+function testBackCameraImage(testName)
 {
-    data = grabImagePixels();
+    test(() => {
+        data = grabImagePixels();
 
-    assert_true(data[0] > 100);
-    assert_true(data[1] > 100);
-    assert_true(data[2] > 100);
+        assert_true(data[0] > 100, "1");
+        assert_true(data[1] > 100, "2");
+        assert_true(data[2] > 100, "3");
 
-    assert_true(data[0] < 200);
-    assert_true(data[1] < 200);
-    assert_true(data[2] < 200);
+        assert_true(data[0] < 200, "4");
+        assert_true(data[1] < 200, "5");
+        assert_true(data[2] < 200, "6");
+    }, "testBackCameraImage " + testName);
 }
 
 promise_test((test) => {
@@ -56,14 +60,10 @@ promise_test((test) => {
     var backStream;
     return navigator.mediaDevices.getUserMedia({ video: { facingMode: { exact: ["user"] } } }).then((stream) => {
         frontStream = stream;
-        return navigator.mediaDevices.getUserMedia({ video: { facingMode: { exact: ["environment"] } } });
-    }).then((stream) => {
-        backStream = stream;
-    }).then(() => {
         return new Promise((resolve, reject) => {
             createConnections((firstConnection) => {
                 sender = firstConnection.addTrack(frontStream.getVideoTracks()[0], frontStream);
-            }, (secondConnection) => {
+           }, (secondConnection) => {
                 secondConnection.ontrack = (trackEvent) => {
                     resolve(trackEvent.streams[0]);
                 };
@@ -74,8 +74,10 @@ promise_test((test) => {
         video.srcObject = remoteStream;
         return video.play();
     }).then(() => {
-        testFrontCameraImage();
-    }).then(() => {
+        testFrontCameraImage("test1");
+        return navigator.mediaDevices.getUserMedia({ video: { facingMode: { exact: ["environment"] } } });
+    }).then((stream) => {
+        backStream = stream;
         var currentTrack = sender.track;
         promise = sender.replaceTrack(backStream.getVideoTracks()[0]);
         assert_true(currentTrack === sender.track);
@@ -84,7 +86,7 @@ promise_test((test) => {
         assert_true(sender.track === backStream.getVideoTracks()[0]);
         return waitFor(500);
     }).then(() => {
-        testBackCameraImage();
+        testBackCameraImage("test1");
     });
 }, "Switching from front to back camera");
 
@@ -96,14 +98,9 @@ promise_test((test) => {
     var frontStream;
     var backStream;
 
-    return navigator.mediaDevices.getUserMedia({ video: { height: { min: 400 }, facingMode: { exact: ["user"] } } }).then((stream) => {
+    return navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480, facingMode: { exact: ["user"] } } }).then((stream) => {
         frontStream = stream;
-        return navigator.mediaDevices.getUserMedia({ video: { height: { max: 400 }, facingMode: { exact: ["environment"] } } });
-    }).then((stream) => {
-        backStream = stream;
-        assert_true(frontStream.getVideoTracks()[0].getSettings().height >= 400, "frontStream should be bigger");
-        assert_true(backStream.getVideoTracks()[0].getSettings().height < 400, "backStream should be smaller");
-    }).then(() => {
+        assert_true(frontStream.getVideoTracks()[0].getSettings().height === 480, "frontStream should be big");
         return new Promise((resolve, reject) => {
             createConnections((firstConnection) => {
                 sender = firstConnection.addTrack(frontStream.getVideoTracks()[0], frontStream);
@@ -118,13 +115,16 @@ promise_test((test) => {
         video.srcObject = remoteStream;
         return video.play();
     }).then(() => {
-        testFrontCameraImage();
-    }).then(() => {
+        testFrontCameraImage("test2");
+        return navigator.mediaDevices.getUserMedia({ video: { width: 320, height: 240, facingMode: { exact: ["environment"] } } });
+    }).then((stream) => {
+        backStream = stream;
+        assert_true(backStream.getVideoTracks()[0].getSettings().height === 240, "backStream should be small");
         return sender.replaceTrack(backStream.getVideoTracks()[0]);
     }).then(() => {
         return waitFor(500);
     }).then(() => {
-        testBackCameraImage();
+        testBackCameraImage("test2");
     });
 }, "Switching from front to back camera, with lower resolution");
 
@@ -136,13 +136,9 @@ promise_test((test) => {
     var frontStream;
     var backStream;
 
-    return navigator.mediaDevices.getUserMedia({ video: { height: { max: 400 }, facingMode: { exact: ["user"] } } }).then((stream) => {
+    return navigator.mediaDevices.getUserMedia({ video: { width: 320, height: 240, facingMode: { exact: ["user"] } } }).then((stream) => {
         frontStream = stream;
-        return navigator.mediaDevices.getUserMedia({ video: { height: { min: 400 }, facingMode: { exact: ["environment"] } } });
-    }).then((stream) => {
-        backStream = stream;
-        assert_true(frontStream.getVideoTracks()[0].getSettings().height < 400, "front stream should be smaller");
-        assert_true(backStream.getVideoTracks()[0].getSettings().height >= 400, "back stream should be bigger");
+        assert_true(frontStream.getVideoTracks()[0].getSettings().height === 240, "front stream should be small");
     }).then(() => {
         return new Promise((resolve, reject) => {
             createConnections((firstConnection) => {
@@ -158,13 +154,12 @@ promise_test((test) => {
         video.srcObject = remoteStream;
         return video.play();
     }).then(() => {
-        testFrontCameraImage();
-    }).then(() => {
+        testFrontCameraImage("test3");
+        return navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 , facingMode: { exact: ["environment"] } } });
+    }).then((stream) => {
+        backStream = stream;
+        assert_true(backStream.getVideoTracks()[0].getSettings().height === 480, "back stream should be big");
         return promise_rejects(test, "InvalidModificationError", sender.replaceTrack(backStream.getVideoTracks()[0]));
-    }).then(() => {
-        return waitFor(500);
-    }).then(() => {
-        testBackCameraImage();
     });
 }, "Switching from front to back camera, with higher resolution");
         </script>
index 7fbd3ae..ad960a1 100644 (file)
@@ -1,3 +1,61 @@
+2017-05-11  Youenn Fablet  <youenn@apple.com>
+
+        [iOS] Unset active media capture source when stopped capturing
+        https://bugs.webkit.org/show_bug.cgi?id=171815
+        <rdar://problem/32117885>
+
+        Reviewed by Eric Carlson.
+
+        Test: platform/ios/mediastream/getUserMedia-single-capture.html
+
+        Introducing SingleSourceFactory template class to be used by capture factories for iOS.
+        This class ensures that only one source is active at a time.
+        Update all capture sources accordingly.
+        Ensure sources are no longer considered as active sources when being destroyed.
+        Add support for mock sources and introducing m_isProducingData for them as well.
+
+        Update WebRTC outgoing source classes to handle the case of replaced track and resetting the enabled/mute
+        state according the new source.
+
+        Update the way we handle timestamps for audio data. We now consider that we read/write as a flow.
+        This allows smooth audio track replacing.
+
+        * platform/mediastream/RealtimeMediaSource.h:
+        * platform/mediastream/mac/AVAudioCaptureSource.mm:
+        (WebCore::AVAudioCaptureSource::~AVAudioCaptureSource):
+        (WebCore::AVAudioCaptureSourceFactory::setActiveSource): Deleted.
+        * platform/mediastream/mac/AVVideoCaptureSource.mm:
+        (WebCore::AVVideoCaptureSource::~AVVideoCaptureSource):
+        (WebCore::AVVideoCaptureSourceFactory::setActiveSource): Deleted.
+        * platform/mediastream/mac/CoreAudioCaptureSource.cpp:
+        (WebCore::CoreAudioCaptureSource::~CoreAudioCaptureSource):
+        (WebCore::CoreAudioCaptureSource::stopProducingData):
+        (WebCore::CoreAudioCaptureSourceFactory::setActiveSource): Deleted.
+        * platform/mediastream/mac/RealtimeOutgoingAudioSource.cpp:
+        (WebCore::RealtimeOutgoingAudioSource::setSource):
+        (WebCore::RealtimeOutgoingAudioSource::audioSamplesAvailable):
+        (WebCore::RealtimeOutgoingAudioSource::pullAudioData):
+        * platform/mediastream/mac/RealtimeOutgoingAudioSource.h:
+        * platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp:
+        (WebCore::RealtimeOutgoingVideoSource::setSource):
+        (WebCore::RealtimeOutgoingVideoSource::sourceMutedChanged):
+        (WebCore::RealtimeOutgoingVideoSource::sourceEnabledChanged):
+        * platform/mock/MockRealtimeAudioSource.cpp:
+        (WebCore::mockAudioCaptureSourceFactory):
+        (WebCore::MockRealtimeAudioSource::factory):
+        (WebCore::MockRealtimeAudioSource::~MockRealtimeAudioSource):
+        (WebCore::MockRealtimeAudioSource::startProducingData):
+        (WebCore::MockRealtimeAudioSource::stopProducingData):
+        * platform/mock/MockRealtimeAudioSource.h:
+        * platform/mock/MockRealtimeVideoSource.cpp:
+        (WebCore::mockVideoCaptureSourceFactory):
+        (WebCore::MockRealtimeVideoSource::factory):
+        (WebCore::MockRealtimeVideoSource::~MockRealtimeVideoSource):
+        (WebCore::MockRealtimeVideoSource::startProducingData):
+        (WebCore::MockRealtimeVideoSource::stopProducingData):
+        * platform/mock/MockRealtimeVideoSource.h:
+        (WebCore::MockRealtimeVideoSource::~MockRealtimeVideoSource): Deleted.
+
 2017-05-11  Timothy Horton  <timothy_horton@apple.com>
 
         Fix the iOS build
index 15a8d8a..0367739 100644 (file)
@@ -77,7 +77,7 @@ public:
 
         // Observer state queries.
         virtual bool preventSourceFromStopping() { return false; }
-        
+
         // Called on the main thread.
         virtual void videoSampleAvailable(MediaSample&) { }
 
@@ -85,6 +85,25 @@ public:
         virtual void audioSamplesAvailable(const MediaTime&, const PlatformAudioData&, const AudioStreamDescription&, size_t /*numberOfFrames*/) { }
     };
 
+    template<typename Source> class SingleSourceFactory {
+    public:
+        void setActiveSource(Source& source)
+        {
+            if (m_activeSource && m_activeSource->isProducingData())
+                m_activeSource->setMuted(true);
+            m_activeSource = &source;
+        }
+
+        void unsetActiveSource(Source& source)
+        {
+            if (m_activeSource == &source)
+                m_activeSource = nullptr;
+        }
+
+    private:
+        RealtimeMediaSource* m_activeSource { nullptr };
+    };
+
     class AudioCaptureFactory {
     public:
         virtual ~AudioCaptureFactory() = default;
index 72ab9ff..aeb24fd 100644 (file)
@@ -77,24 +77,16 @@ SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
 
 namespace WebCore {
 
-class AVAudioCaptureSourceFactory : public RealtimeMediaSource::AudioCaptureFactory {
+class AVAudioCaptureSourceFactory : public RealtimeMediaSource::AudioCaptureFactory
+#if PLATFORM(IOS)
+    , public RealtimeMediaSource::SingleSourceFactory<AVAudioCaptureSource>
+#endif
+{
 public:
     CaptureSourceOrError createAudioCaptureSource(const String& deviceID, const MediaConstraints* constraints) final {
         AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:deviceID];
         return device ? AVAudioCaptureSource::create(device, emptyString(), constraints) : CaptureSourceOrError();
     }
-
-#if PLATFORM(IOS)
-    void setActiveSource(AVAudioCaptureSource& source)
-    {
-        if (m_activeSource && m_activeSource->isProducingData())
-            m_activeSource->setMuted(true);
-        m_activeSource = &source;
-    }
-
-private:
-    AVAudioCaptureSource* m_activeSource { nullptr };
-#endif
 };
 
 CaptureSourceOrError AVAudioCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString& id, const MediaConstraints* constraints)
@@ -127,6 +119,9 @@ AVAudioCaptureSource::AVAudioCaptureSource(AVCaptureDeviceTypedef* device, const
 
 AVAudioCaptureSource::~AVAudioCaptureSource()
 {
+#if PLATFORM(IOS)
+    avAudioCaptureSourceFactory().unsetActiveSource(*this);
+#endif
     shutdownCaptureSession();
 }
 
index ff5c348..10600da 100644 (file)
@@ -110,7 +110,11 @@ const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8Planar;
 const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
 #endif
 
-class AVVideoCaptureSourceFactory : public RealtimeMediaSource::VideoCaptureFactory {
+class AVVideoCaptureSourceFactory : public RealtimeMediaSource::VideoCaptureFactory
+#if PLATFORM(IOS)
+    , public RealtimeMediaSource::SingleSourceFactory<AVVideoCaptureSource>
+#endif
+{
 public:
     CaptureSourceOrError createVideoCaptureSource(const String& deviceID, const MediaConstraints* constraints) final {
         AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:deviceID];
@@ -118,18 +122,6 @@ public:
             return { };
         return AVVideoCaptureSource::create(device, emptyString(), constraints);
     }
-
-#if PLATFORM(IOS)
-    void setActiveSource(AVVideoCaptureSource& source)
-    {
-        if (m_activeSource && m_activeSource->isProducingData())
-            m_activeSource->setMuted(true);
-        m_activeSource = &source;
-    }
-
-private:
-    AVVideoCaptureSource* m_activeSource { nullptr };
-#endif
 };
 
 CaptureSourceOrError AVVideoCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString& id, const MediaConstraints* constraints)
@@ -162,6 +154,9 @@ AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, const
 
 AVVideoCaptureSource::~AVVideoCaptureSource()
 {
+#if PLATFORM(IOS)
+    avVideoCaptureSourceFactory().unsetActiveSource(*this);
+#endif
 }
 
 static void updateSizeMinMax(int& min, int& max, int value)
index 222d363..8af81fb 100644 (file)
 
 namespace WebCore {
 
-class CoreAudioCaptureSourceFactory : public RealtimeMediaSource::AudioCaptureFactory {
+class CoreAudioCaptureSourceFactory : public RealtimeMediaSource::AudioCaptureFactory
+#if PLATFORM(IOS)
+    , public RealtimeMediaSource::SingleSourceFactory<CoreAudioCaptureSource>
+#endif
+{
 public:
     CaptureSourceOrError createAudioCaptureSource(const String& deviceID, const MediaConstraints* constraints) final {
         return CoreAudioCaptureSource::create(deviceID, constraints);
     }
-
-#if PLATFORM(IOS)
-    void setActiveSource(CoreAudioCaptureSource& source)
-    {
-        if (m_activeSource && m_activeSource->isProducingData())
-            m_activeSource->setMuted(true);
-        m_activeSource = &source;
-    }
-
-private:
-    CoreAudioCaptureSource* m_activeSource { nullptr };
-#endif
 };
 
 static CoreAudioCaptureSourceFactory& coreAudioCaptureSourceFactory()
@@ -669,6 +661,10 @@ CoreAudioCaptureSource::CoreAudioCaptureSource(const String& deviceID, const Str
 
 CoreAudioCaptureSource::~CoreAudioCaptureSource()
 {
+#if PLATFORM(IOS)
+    coreAudioCaptureSourceFactory().unsetActiveSource(*this);
+#endif
+
     CoreAudioSharedUnit::singleton().removeClient(*this);
 }
 
@@ -707,7 +703,7 @@ void CoreAudioCaptureSource::stopProducingData()
 {
     if (!m_isProducingData)
         return;
-    
+
     CoreAudioSharedUnit::singleton().stopProducingData();
     m_isProducingData = false;
     m_muted = true;
index 87d31e5..fa89fe8 100644 (file)
@@ -58,6 +58,10 @@ bool RealtimeOutgoingAudioSource::setSource(Ref<RealtimeMediaSource>&& newSource
     m_audioSource->removeObserver(*this);
     m_audioSource = WTFMove(newSource);
     m_audioSource->addObserver(*this);
+
+    m_muted = m_audioSource->muted();
+    m_enabled = m_audioSource->enabled();
+    m_sampleConverter->setMuted(m_muted || !m_enabled);
     return true;
 }
 
@@ -78,7 +82,7 @@ void RealtimeOutgoingAudioSource::sourceEnabledChanged()
     m_sampleConverter->setMuted(m_muted || !m_enabled);
 }
 
-void RealtimeOutgoingAudioSource::audioSamplesAvailable(const MediaTime& time, const PlatformAudioData& audioData, const AudioStreamDescription& streamDescription, size_t sampleCount)
+void RealtimeOutgoingAudioSource::audioSamplesAvailable(const MediaTime&, const PlatformAudioData& audioData, const AudioStreamDescription& streamDescription, size_t sampleCount)
 {
     if (m_inputStreamDescription != streamDescription) {
         m_inputStreamDescription = toCAAudioStreamDescription(streamDescription);
@@ -89,7 +93,9 @@ void RealtimeOutgoingAudioSource::audioSamplesAvailable(const MediaTime& time, c
         status = m_sampleConverter->setOutputFormat(m_outputStreamDescription.streamDescription());
         ASSERT(!status);
     }
-    m_sampleConverter->pushSamples(time, audioData, sampleCount);
+
+    m_sampleConverter->pushSamples(MediaTime(m_writeCount, LibWebRTCAudioFormat::sampleRate), audioData, sampleCount);
+    m_writeCount += sampleCount;
 
     LibWebRTCProvider::callOnWebRTCSignalingThread([protectedThis = makeRef(*this)] {
         protectedThis->pullAudioData();
@@ -109,8 +115,8 @@ void RealtimeOutgoingAudioSource::pullAudioData()
     bufferList.mBuffers[0].mDataByteSize = bufferSize;
     bufferList.mBuffers[0].mData = m_audioBuffer.data();
 
-    m_sampleConverter->pullAvalaibleSamplesAsChunks(bufferList, chunkSampleCount, m_startFrame, [this, chunkSampleCount] {
-        m_startFrame += chunkSampleCount;
+    m_sampleConverter->pullAvalaibleSamplesAsChunks(bufferList, chunkSampleCount, m_readCount, [this, chunkSampleCount] {
+        m_readCount += chunkSampleCount;
         for (auto sink : m_sinks)
             sink->OnData(m_audioBuffer.data(), LibWebRTCAudioFormat::sampleSize, m_outputStreamDescription.sampleRate(), m_outputStreamDescription.numberOfChannels(), chunkSampleCount);
     });
index 92add2e..8dacd67 100644 (file)
@@ -80,7 +80,8 @@ private:
     CAAudioStreamDescription m_outputStreamDescription;
 
     Vector<uint8_t> m_audioBuffer;
-    uint64_t m_startFrame { 0 };
+    uint64_t m_readCount { 0 };
+    uint64_t m_writeCount { 0 };
     bool m_muted { false };
     bool m_enabled { true };
 };
index 6967e79..2d18709 100644 (file)
@@ -62,7 +62,11 @@ bool RealtimeOutgoingVideoSource::setSource(Ref<RealtimeMediaSource>&& newSource
     m_videoSource->removeObserver(*this);
     m_videoSource = WTFMove(newSource);
     m_videoSource->addObserver(*this);
+
     setSizeFromSource();
+    m_muted = m_videoSource->muted();
+    m_enabled = m_videoSource->enabled();
+
     return true;
 }
 
@@ -76,6 +80,7 @@ void RealtimeOutgoingVideoSource::stop()
 void RealtimeOutgoingVideoSource::sourceMutedChanged()
 {
     ASSERT(m_muted != m_videoSource->muted());
+
     m_muted = m_videoSource->muted();
 
     if (m_muted && m_sinks.size() && m_enabled)
@@ -85,6 +90,7 @@ void RealtimeOutgoingVideoSource::sourceMutedChanged()
 void RealtimeOutgoingVideoSource::sourceEnabledChanged()
 {
     ASSERT(m_enabled != m_videoSource->enabled());
+
     m_enabled = m_videoSource->enabled();
 
     if (!m_enabled && m_sinks.size() && !m_muted)
index e014852..581be29 100644 (file)
 
 namespace WebCore {
 
-class MockRealtimeAudioSourceFactory : public RealtimeMediaSource::AudioCaptureFactory {
+class MockRealtimeAudioSourceFactory : public RealtimeMediaSource::AudioCaptureFactory
+#if PLATFORM(IOS)
+    , public RealtimeMediaSource::SingleSourceFactory<MockRealtimeAudioSource>
+#endif
+{
 public:
     CaptureSourceOrError createAudioCaptureSource(const String& deviceID, const MediaConstraints* constraints) final {
         for (auto& device : MockRealtimeMediaSource::audioDevices()) {
@@ -70,18 +74,30 @@ RefPtr<MockRealtimeAudioSource> MockRealtimeAudioSource::createMuted(const Strin
 }
 #endif
 
-RealtimeMediaSource::AudioCaptureFactory& MockRealtimeAudioSource::factory()
+static MockRealtimeAudioSourceFactory& mockAudioCaptureSourceFactory()
 {
     static NeverDestroyed<MockRealtimeAudioSourceFactory> factory;
     return factory.get();
 }
 
+RealtimeMediaSource::AudioCaptureFactory& MockRealtimeAudioSource::factory()
+{
+    return mockAudioCaptureSourceFactory();
+}
+
 MockRealtimeAudioSource::MockRealtimeAudioSource(const String& name)
     : MockRealtimeMediaSource(createCanonicalUUIDString(), RealtimeMediaSource::Type::Audio, name)
     , m_timer(RunLoop::current(), this, &MockRealtimeAudioSource::tick)
 {
 }
 
+MockRealtimeAudioSource::~MockRealtimeAudioSource()
+{
+#if PLATFORM(IOS)
+    mockAudioCaptureSourceFactory().unsetActiveSource(*this);
+#endif
+}
+
 void MockRealtimeAudioSource::updateSettings(RealtimeMediaSourceSettings& settings)
 {
     settings.setVolume(volume());
@@ -105,6 +121,15 @@ void MockRealtimeAudioSource::initializeSupportedConstraints(RealtimeMediaSource
 
 void MockRealtimeAudioSource::startProducingData()
 {
+    if (m_isProducingData)
+        return;
+
+    m_isProducingData = true;
+
+#if PLATFORM(IOS)
+    mockAudioCaptureSourceFactory().setActiveSource(*this);
+#endif
+
     if (!sampleRate())
         setSampleRate(!deviceIndex() ? 44100 : 48000);
 
@@ -116,6 +141,11 @@ void MockRealtimeAudioSource::startProducingData()
 
 void MockRealtimeAudioSource::stopProducingData()
 {
+    if (!m_isProducingData)
+        return;
+
+    m_isProducingData = false;
+
     MockRealtimeMediaSource::stopProducingData();
     m_timer.stop();
     m_elapsedTime += monotonicallyIncreasingTime() - m_startTime;
index 364522b..67ee59e 100644 (file)
@@ -47,13 +47,14 @@ public:
 
     static AudioCaptureFactory& factory();
 
-    virtual ~MockRealtimeAudioSource() = default;
+    virtual ~MockRealtimeAudioSource();
 
 protected:
     MockRealtimeAudioSource(const String& name = ASCIILiteral("Mock audio device"));
 
     void startProducingData() final;
     void stopProducingData() final;
+    bool isProducingData() const final { return m_isProducingData; }
 
     virtual void render(double) { }
 
@@ -79,6 +80,7 @@ private:
     double m_startTime { NAN };
     double m_lastRenderTime { NAN };
     double m_elapsedTime { 0 };
+    bool m_isProducingData { false };
 };
 
 } // namespace WebCore
index bd28746..0f17919 100644 (file)
 
 namespace WebCore {
 
-class MockRealtimeVideoSourceFactory : public RealtimeMediaSource::VideoCaptureFactory {
+class MockRealtimeVideoSourceFactory : public RealtimeMediaSource::VideoCaptureFactory
+#if PLATFORM(IOS)
+    , public RealtimeMediaSource::SingleSourceFactory<MockRealtimeVideoSource>
+#endif
+{
 public:
     CaptureSourceOrError createVideoCaptureSource(const String& deviceID, const MediaConstraints* constraints) final {
         for (auto& device : MockRealtimeMediaSource::videoDevices()) {
@@ -77,12 +81,17 @@ RefPtr<MockRealtimeVideoSource> MockRealtimeVideoSource::createMuted(const Strin
 }
 #endif
 
-RealtimeMediaSource::VideoCaptureFactory& MockRealtimeVideoSource::factory()
+static MockRealtimeVideoSourceFactory& mockVideoCaptureSourceFactory()
 {
     static NeverDestroyed<MockRealtimeVideoSourceFactory> factory;
     return factory.get();
 }
 
+RealtimeMediaSource::VideoCaptureFactory& MockRealtimeVideoSource::factory()
+{
+    return mockVideoCaptureSourceFactory();
+}
+
 MockRealtimeVideoSource::MockRealtimeVideoSource(const String& name)
     : MockRealtimeMediaSource(createCanonicalUUIDString(), RealtimeMediaSource::Type::Video, name)
     , m_timer(RunLoop::current(), this, &MockRealtimeVideoSource::generateFrame)
@@ -94,8 +103,24 @@ MockRealtimeVideoSource::MockRealtimeVideoSource(const String& name)
     m_dashWidths.uncheckedAppend(6);
 }
 
+MockRealtimeVideoSource::~MockRealtimeVideoSource()
+{
+#if PLATFORM(IOS)
+    mockVideoCaptureSourceFactory().unsetActiveSource(*this);
+#endif
+}
+
 void MockRealtimeVideoSource::startProducingData()
 {
+    if (m_isProducingData)
+        return;
+    
+    m_isProducingData = true;
+    
+#if PLATFORM(IOS)
+    mockVideoCaptureSourceFactory().setActiveSource(*this);
+#endif
+
     MockRealtimeMediaSource::startProducingData();
     if (size().isEmpty()) {
         setWidth(640);
@@ -108,6 +133,11 @@ void MockRealtimeVideoSource::startProducingData()
 
 void MockRealtimeVideoSource::stopProducingData()
 {
+    if (!m_isProducingData)
+        return;
+    
+    m_isProducingData = false;
+    
     MockRealtimeMediaSource::stopProducingData();
     m_timer.stop();
     m_elapsedTime += monotonicallyIncreasingTime() - m_startTime;
index aa60ec7..c439add 100644 (file)
@@ -51,7 +51,7 @@ public:
 
     static VideoCaptureFactory& factory();
 
-    virtual ~MockRealtimeVideoSource() { }
+    virtual ~MockRealtimeVideoSource();
 
 protected:
     MockRealtimeVideoSource(const String&);
@@ -69,6 +69,7 @@ private:
 
     void startProducingData() final;
     void stopProducingData() final;
+    bool isProducingData() const final { return m_isProducingData; }
 
     void drawAnimation(GraphicsContext&);
     void drawText(GraphicsContext&);
@@ -102,6 +103,7 @@ private:
     unsigned m_frameNumber { 0 };
 
     RunLoop::Timer<MockRealtimeVideoSource> m_timer;
+    bool m_isProducingData { false };
 };
 
 } // namespace WebCore