run-perf-tests should record indivisual value instead of statistics
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftest_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 import StringIO
31 import math
32 import unittest
33
34 from webkitpy.common.host_mock import MockHost
35 from webkitpy.common.system.outputcapture import OutputCapture
36 from webkitpy.layout_tests.port.driver import DriverOutput
37 from webkitpy.layout_tests.port.test import TestDriver
38 from webkitpy.layout_tests.port.test import TestPort
39 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
40 from webkitpy.performance_tests.perftest import PageLoadingPerfTest
41 from webkitpy.performance_tests.perftest import PerfTest
42 from webkitpy.performance_tests.perftest import PerfTestFactory
43 from webkitpy.performance_tests.perftest import ReplayPerfTest
44
45
46 class MainTest(unittest.TestCase):
47     def test_parse_output(self):
48         output = DriverOutput('\n'.join([
49             'Running 20 times',
50             'Ignoring warm-up run (1115)',
51             '',
52             'Time:',
53             'values 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 ms',
54             'avg 1100 ms',
55             'median 1101 ms',
56             'stdev 11 ms',
57             'min 1080 ms',
58             'max 1120 ms']), image=None, image_hash=None, audio=None)
59         output_capture = OutputCapture()
60         output_capture.capture_output()
61         try:
62             test = PerfTest(None, 'some-test', '/path/some-dir/some-test')
63             self.assertEqual(test.parse_output(output),
64                 {'some-test': {'avg': 1100.0, 'median': 1101.0, 'min': 1080.0, 'max': 1120.0, 'stdev': 11.0, 'unit': 'ms',
65                     'values': [i for i in range(1, 20)]}})
66         finally:
67             pass
68             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
69         self.assertEqual(actual_stdout, '')
70         self.assertEqual(actual_stderr, '')
71         self.assertEqual(actual_logs, 'RESULT some-test= 1100.0 ms\nmedian= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms\n')
72
73     def test_parse_output_with_failing_line(self):
74         output = DriverOutput('\n'.join([
75             'Running 20 times',
76             'Ignoring warm-up run (1115)',
77             '',
78             'some-unrecognizable-line',
79             '',
80             'Time:'
81             'values 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 ms',
82             'avg 1100 ms',
83             'median 1101 ms',
84             'stdev 11 ms',
85             'min 1080 ms',
86             'max 1120 ms']), image=None, image_hash=None, audio=None)
87         output_capture = OutputCapture()
88         output_capture.capture_output()
89         try:
90             test = PerfTest(None, 'some-test', '/path/some-dir/some-test')
91             self.assertEqual(test.parse_output(output), None)
92         finally:
93             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
94         self.assertEqual(actual_stdout, '')
95         self.assertEqual(actual_stderr, '')
96         self.assertEqual(actual_logs, 'some-unrecognizable-line\n')
97
98
99 class TestPageLoadingPerfTest(unittest.TestCase):
100     class MockDriver(object):
101         def __init__(self, values):
102             self._values = values
103             self._index = 0
104
105         def run_test(self, input, stop_when_done):
106             value = self._values[self._index]
107             self._index += 1
108             if isinstance(value, str):
109                 return DriverOutput('some output', image=None, image_hash=None, audio=None, error=value)
110             else:
111                 return DriverOutput('some output', image=None, image_hash=None, audio=None, test_time=self._values[self._index - 1])
112
113     def test_run(self):
114         test = PageLoadingPerfTest(None, 'some-test', '/path/some-dir/some-test')
115         driver = TestPageLoadingPerfTest.MockDriver(range(1, 21))
116         output_capture = OutputCapture()
117         output_capture.capture_output()
118         try:
119             self.assertEqual(test.run(driver, None),
120                 {'some-test': {'max': 20000, 'avg': 11000.0, 'median': 11000, 'stdev': math.sqrt(570 * 1000 * 1000), 'min': 2000, 'unit': 'ms',
121                     'values': [i * 1000 for i in range(2, 21)]}})
122         finally:
123             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
124         self.assertEqual(actual_stdout, '')
125         self.assertEqual(actual_stderr, '')
126         self.assertEqual(actual_logs, 'RESULT some-test= 11000.0 ms\nmedian= 11000 ms, stdev= 23874.6727726 ms, min= 2000 ms, max= 20000 ms\n')
127
128     def test_run_with_bad_output(self):
129         output_capture = OutputCapture()
130         output_capture.capture_output()
131         try:
132             test = PageLoadingPerfTest(None, 'some-test', '/path/some-dir/some-test')
133             driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 'some error', 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
134             self.assertEqual(test.run(driver, None), None)
135         finally:
136             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
137         self.assertEqual(actual_stdout, '')
138         self.assertEqual(actual_stderr, '')
139         self.assertEqual(actual_logs, 'error: some-test\nsome error\n')
140
141
142 class TestReplayPerfTest(unittest.TestCase):
143
144     class ReplayTestPort(TestPort):
145         def __init__(self, custom_run_test=None):
146
147             class ReplayTestDriver(TestDriver):
148                 def run_test(self, text_input, stop_when_done):
149                     return custom_run_test(text_input, stop_when_done) if custom_run_test else None
150
151             self._custom_driver_class = ReplayTestDriver
152             super(self.__class__, self).__init__(host=MockHost())
153
154         def _driver_class(self):
155             return self._custom_driver_class
156
157     class MockReplayServer(object):
158         def __init__(self, wait_until_ready=True):
159             self.wait_until_ready = lambda: wait_until_ready
160
161         def stop(self):
162             pass
163
164     def _add_file(self, port, dirname, filename, content=True):
165         port.host.filesystem.maybe_make_directory(dirname)
166         port.host.filesystem.write_binary_file(port.host.filesystem.join(dirname, filename), content)
167
168     def _setup_test(self, run_test=None):
169         test_port = self.ReplayTestPort(run_test)
170         self._add_file(test_port, '/path/some-dir', 'some-test.replay', 'http://some-test/')
171         test = ReplayPerfTest(test_port, 'some-test.replay', '/path/some-dir/some-test.replay')
172         test._start_replay_server = lambda archive, record: self.__class__.MockReplayServer()
173         return test, test_port
174
175     def test_run_single(self):
176         output_capture = OutputCapture()
177         output_capture.capture_output()
178
179         loaded_pages = []
180
181         def run_test(test_input, stop_when_done):
182             if test_input.test_name != "about:blank":
183                 self.assertEqual(test_input.test_name, 'http://some-test/')
184             loaded_pages.append(test_input)
185             self._add_file(port, '/path/some-dir', 'some-test.wpr', 'wpr content')
186             return DriverOutput('actual text', 'actual image', 'actual checksum',
187                 audio=None, crash=False, timeout=False, error=False)
188
189         test, port = self._setup_test(run_test)
190         test._archive_path = '/path/some-dir/some-test.wpr'
191         test._url = 'http://some-test/'
192
193         try:
194             driver = port.create_driver(worker_number=1, no_timeout=True)
195             self.assertTrue(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100))
196         finally:
197             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
198
199         self.assertEqual(len(loaded_pages), 1)
200         self.assertEqual(loaded_pages[0].test_name, 'http://some-test/')
201         self.assertEqual(actual_stdout, '')
202         self.assertEqual(actual_stderr, '')
203         self.assertEqual(actual_logs, '')
204         self.assertEqual(port.host.filesystem.read_binary_file('/path/some-dir/some-test-actual.png'), 'actual image')
205
206     def test_run_single_fails_without_webpagereplay(self):
207         output_capture = OutputCapture()
208         output_capture.capture_output()
209
210         test, port = self._setup_test()
211         test._start_replay_server = lambda archive, record: None
212         test._archive_path = '/path/some-dir.wpr'
213         test._url = 'http://some-test/'
214
215         try:
216             driver = port.create_driver(worker_number=1, no_timeout=True)
217             self.assertEqual(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100), None)
218         finally:
219             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
220         self.assertEqual(actual_stdout, '')
221         self.assertEqual(actual_stderr, '')
222         self.assertEqual(actual_logs, "Web page replay didn't start.\n")
223
224     def test_prepare_fails_when_wait_until_ready_fails(self):
225         output_capture = OutputCapture()
226         output_capture.capture_output()
227
228         test, port = self._setup_test()
229         test._start_replay_server = lambda archive, record: self.__class__.MockReplayServer(wait_until_ready=False)
230         test._archive_path = '/path/some-dir.wpr'
231         test._url = 'http://some-test/'
232
233         try:
234             driver = port.create_driver(worker_number=1, no_timeout=True)
235             self.assertEqual(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100), None)
236         finally:
237             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
238
239         self.assertEqual(actual_stdout, '')
240         self.assertEqual(actual_stderr, '')
241         self.assertEqual(actual_logs, "Web page replay didn't start.\n")
242
243     def test_run_single_fails_when_output_has_error(self):
244         output_capture = OutputCapture()
245         output_capture.capture_output()
246
247         loaded_pages = []
248
249         def run_test(test_input, stop_when_done):
250             loaded_pages.append(test_input)
251             self._add_file(port, '/path/some-dir', 'some-test.wpr', 'wpr content')
252             return DriverOutput('actual text', 'actual image', 'actual checksum',
253                 audio=None, crash=False, timeout=False, error='some error')
254
255         test, port = self._setup_test(run_test)
256         test._archive_path = '/path/some-dir.wpr'
257         test._url = 'http://some-test/'
258
259         try:
260             driver = port.create_driver(worker_number=1, no_timeout=True)
261             self.assertEqual(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100), None)
262         finally:
263             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
264
265         self.assertEqual(len(loaded_pages), 1)
266         self.assertEqual(loaded_pages[0].test_name, 'http://some-test/')
267         self.assertEqual(actual_stdout, '')
268         self.assertEqual(actual_stderr, '')
269         self.assertEqual(actual_logs, 'error: some-test.replay\nsome error\n')
270
271     def test_prepare(self):
272         output_capture = OutputCapture()
273         output_capture.capture_output()
274
275         def run_test(test_input, stop_when_done):
276             self._add_file(port, '/path/some-dir', 'some-test.wpr', 'wpr content')
277             return DriverOutput('actual text', 'actual image', 'actual checksum',
278                 audio=None, crash=False, timeout=False, error=False)
279
280         test, port = self._setup_test(run_test)
281
282         try:
283             self.assertEqual(test.prepare(time_out_ms=100), True)
284         finally:
285             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
286
287         self.assertEqual(actual_stdout, '')
288         self.assertEqual(actual_stderr, '')
289         self.assertEqual(actual_logs, 'Preparing replay for some-test.replay\nPrepared replay for some-test.replay\n')
290         self.assertEqual(port.host.filesystem.read_binary_file('/path/some-dir/some-test-expected.png'), 'actual image')
291
292     def test_prepare_calls_run_single(self):
293         output_capture = OutputCapture()
294         output_capture.capture_output()
295         called = [False]
296
297         def run_single(driver, url, time_out_ms, record):
298             self.assertTrue(record)
299             self.assertEqual(url, '/path/some-dir/some-test.wpr')
300             called[0] = True
301             return False
302
303         test, port = self._setup_test()
304         test.run_single = run_single
305
306         try:
307             self.assertEqual(test.prepare(time_out_ms=100), False)
308         finally:
309             actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
310         self.assertTrue(called[0])
311         self.assertEqual(test._archive_path, '/path/some-dir/some-test.wpr')
312         self.assertEqual(test._url, 'http://some-test/')
313         self.assertEqual(actual_stdout, '')
314         self.assertEqual(actual_stderr, '')
315         self.assertEqual(actual_logs, "Preparing replay for some-test.replay\nFailed to prepare a replay for some-test.replay\n")
316
317 class TestPerfTestFactory(unittest.TestCase):
318     def test_regular_test(self):
319         test = PerfTestFactory.create_perf_test(None, 'some-dir/some-test', '/path/some-dir/some-test')
320         self.assertEqual(test.__class__, PerfTest)
321
322     def test_inspector_test(self):
323         test = PerfTestFactory.create_perf_test(None, 'inspector/some-test', '/path/inspector/some-test')
324         self.assertEqual(test.__class__, ChromiumStylePerfTest)
325
326     def test_page_loading_test(self):
327         test = PerfTestFactory.create_perf_test(None, 'PageLoad/some-test', '/path/PageLoad/some-test')
328         self.assertEqual(test.__class__, PageLoadingPerfTest)
329
330
331 if __name__ == '__main__':
332     unittest.main()