[MediaStream] A stream's first video frame should be rendered
[WebKit-https.git] / LayoutTests / fast / mediastream / MediaStream-video-element-displays-buffer.html
1 <!DOCTYPE html>
2 <html>
3 <head>
4     <script src="../../resources/js-test-pre.js"></script>
5 </head>
6 <body onload="start()">
7 <p id="description"></p>
8 <div id="console"></div>
9 <video controls width="680" height="360"></video>
10 <video controls width="680" height="360"></video>
11 <canvas width="680" height="360"></canvas>
12 <script>
13     let mediaStream;
14     let videos;
15     let buffer;
16     let currentTest = 0;
17     
18     function isPixelTransparent(pixel)
19     {
20         return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 0;
21     }
22
23     function isPixelBlack(pixel)
24     {
25         return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 255;
26     }
27
28     function isPixelGray(pixel)
29     {
30         return pixel[0] === 128 && pixel[1] === 128 && pixel[2] === 128 && pixel[3] === 255;
31     }
32
33     function verifyFramesBeingDisplayed()
34     {
35         videos[currentTest].removeEventListener('playing', verifyFramesBeingDisplayed, false)
36
37         canvas = document.querySelector('canvas');
38         context = canvas.getContext('2d');
39         
40         context.clearRect(0, 0, canvas.width, canvas.height);
41         let x = canvas.width * .035;
42         let y = canvas.height * 0.6 + 2 + x;
43         buffer = context.getImageData(x, y, 1, 1).data;
44         shouldBeTrue('isPixelTransparent(buffer)');
45         
46         evalAndLog(`context.drawImage(videos[${currentTest}], 0, 0, ${canvas.width}, ${canvas.height})`);
47         buffer = context.getImageData(x, y, 1, 1).data;
48         shouldBeFalse('isPixelTransparent(buffer)');
49         shouldBeFalse('isPixelBlack(buffer)');
50
51         x = canvas.width * .05;
52         y = canvas.height * .05;
53         buffer = context.getImageData(x, y, 1, 1).data;
54         shouldBeFalse('isPixelTransparent(buffer)');
55         if (!currentTest)
56             shouldBeTrue('isPixelBlack(buffer)');
57         else
58             shouldBeTrue('isPixelGray(buffer)');
59
60         if (currentTest >= 1) {
61             finishJSTest();
62             return;
63         }
64         
65         videos[currentTest].pause();
66         ++currentTest;
67         requestNextStream();
68     }
69
70     function setupVideoElement(stream)
71     {
72         mediaStream = stream;
73         testPassed('mediaDevices.getUserMedia generated a stream successfully.');
74         evalAndLog(`videos[${currentTest}].srcObject = mediaStream`);
75     }
76
77     function canplay()
78     {
79         videos[currentTest].removeEventListener('canplay', canplay, false)
80         evalAndLog(`videos[${currentTest}].play()`);
81     }
82     
83     function requestNextStream()
84     {
85         debug(`<br> === checking pixels from ${!currentTest ? "front" : "back"} camera ===`);
86         let constraints = {video : !currentTest ? true : {facingMode: "environment"}};
87         navigator.mediaDevices.getUserMedia(constraints).then(setupVideoElement);
88     }
89     
90     function start()
91     {
92         description("Tests that the stream displays captured buffers to the video element.");
93         if (window.testRunner)
94             testRunner.setUserMediaPermission(true);
95
96         videos = Array.from(document.getElementsByTagName('video'));
97         videos.forEach((video) => {
98             video.addEventListener('canplay', canplay, false);
99             video.addEventListener('playing', verifyFramesBeingDisplayed, false);
100         });
101         requestNextStream();
102     }
103
104     window.jsTestIsAsync = true;
105 </script>
106 <script src="../../resources/js-test-post.js"></script>
107 </body>
108 </html>