1fc708862517693857eabbd837a5ccea188a5aa1
[WebKit-https.git] / LayoutTests / fast / mediastream / MediaStream-video-element-displays-buffer.html
1 <!DOCTYPE html>
2 <html>
3 <head>
4     <script src="../../resources/js-test-pre.js"></script>
5     <script src="./resources/getUserMedia-helper.js"></script>
6 </head>
7 <body onload="start()">
8 <p id="description"></p>
9 <div id="console"></div>
10 <video controls width="680" height="360"></video>
11 <video controls width="680" height="360"></video>
12 <canvas width="680" height="360"></canvas>
13 <script>
14     let mediaStream;
15     let videos;
16     let buffer;
17     let currentTest = 0;
18     
19     function isPixelTransparent(pixel)
20     {
21         return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 0;
22     }
23
24     function isPixelBlack(pixel)
25     {
26         return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 255;
27     }
28
29     function isPixelGray(pixel)
30     {
31         return pixel[0] === 128 && pixel[1] === 128 && pixel[2] === 128 && pixel[3] === 255;
32     }
33
34     function verifyFramesBeingDisplayed()
35     {
36         videos[currentTest].removeEventListener('playing', verifyFramesBeingDisplayed, false)
37
38         canvas = document.querySelector('canvas');
39         context = canvas.getContext('2d');
40         
41         context.clearRect(0, 0, canvas.width, canvas.height);
42         let x = canvas.width * .035;
43         let y = canvas.height * 0.6 + 2 + x;
44         buffer = context.getImageData(x, y, 1, 1).data;
45         shouldBeTrue('isPixelTransparent(buffer)');
46         
47         evalAndLog(`context.drawImage(videos[${currentTest}], 0, 0, ${canvas.width}, ${canvas.height})`);
48         buffer = context.getImageData(x, y, 1, 1).data;
49         shouldBeFalse('isPixelTransparent(buffer)');
50         shouldBeFalse('isPixelBlack(buffer)');
51
52         x = canvas.width * .05;
53         y = canvas.height * .05;
54         buffer = context.getImageData(x, y, 1, 1).data;
55         shouldBeFalse('isPixelTransparent(buffer)');
56         if (!currentTest)
57             shouldBeTrue('isPixelBlack(buffer)');
58         else
59             shouldBeTrue('isPixelGray(buffer)');
60
61         if (currentTest >= 1) {
62             finishJSTest();
63             return;
64         }
65         
66         videos[currentTest].pause();
67         ++currentTest;
68         requestNextStream();
69     }
70
71     function setupVideoElement(stream)
72     {
73         mediaStream = stream;
74         testPassed('mediaDevices.getUserMedia generated a stream successfully.');
75         evalAndLog(`videos[${currentTest}].srcObject = mediaStream`);
76     }
77
78     function canplay()
79     {
80         videos[currentTest].removeEventListener('canplay', canplay, false)
81         evalAndLog(`videos[${currentTest}].play()`);
82     }
83     
84     function requestNextStream()
85     {
86         debug(`<br> === checking pixels from ${!currentTest ? "front" : "back"} camera ===`);
87         let constraints = {video : !currentTest ? true : {facingMode: "environment"}};
88         getUserMedia("allow", constraints, setupVideoElement);
89     }
90     
91     function start()
92     {
93         description("Tests that the stream displays captured buffers to the video element.");
94
95         videos = Array.from(document.getElementsByTagName('video'));
96         videos.forEach((video) => {
97             video.addEventListener('canplay', canplay, false);
98             video.addEventListener('playing', verifyFramesBeingDisplayed, false);
99         });
100         requestNextStream();
101     }
102
103     window.jsTestIsAsync = true;
104 </script>
105 <script src="../../resources/js-test-post.js"></script>
106 </body>
107 </html>