run-perf-tests should generate a json file that summaries the result
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2011 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import json
33 import unittest
34
35 from webkitpy.common import array_stream
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
39 from webkitpy.layout_tests.port.test import TestPort
40 from webkitpy.layout_tests.views import printing
41 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
42
43
44 class MainTest(unittest.TestCase):
45     class TestDriver:
46         def run_test(self, driver_input):
47             text = ''
48             timeout = False
49             crash = False
50             if driver_input.test_name.endswith('pass.html'):
51                 text = 'RESULT group_name: test_name= 42 ms'
52             elif driver_input.test_name.endswith('timeout.html'):
53                 timeout = True
54             elif driver_input.test_name.endswith('failed.html'):
55                 text = None
56             elif driver_input.test_name.endswith('tonguey.html'):
57                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
58             elif driver_input.test_name.endswith('crash.html'):
59                 crash = True
60             elif driver_input.test_name.endswith('event-target-wrapper.html'):
61                 text = """Running 20 times
62 Ignoring warm-up run (1502)
63 1504
64 1505
65 1510
66 1504
67 1507
68 1509
69 1510
70 1487
71 1488
72 1472
73 1472
74 1488
75 1473
76 1472
77 1475
78 1487
79 1486
80 1486
81 1475
82 1471
83
84 avg 1489.05
85 median 1487
86 stdev 14.46
87 min 1471
88 max 1510
89 """
90             elif driver_input.test_name.endswith('some-parser.html'):
91                 text = """Running 20 times
92 Ignoring warm-up run (1115)
93
94 avg 1100
95 median 1101
96 stdev 11
97 min 1080
98 max 1120
99 """
100             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
101
102         def stop(self):
103             """do nothing"""
104
105     def create_runner(self, buildbot_output=None, args=[]):
106         buildbot_output = buildbot_output or array_stream.ArrayStream()
107         regular_output = array_stream.ArrayStream()
108
109         options, parsed_args = PerfTestsRunner._parse_args(args)
110         test_port = TestPort(host=MockHost(), options=options)
111         test_port.create_driver = lambda worker_number=None: MainTest.TestDriver()
112
113         runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port)
114         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
115         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
116         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
117         return runner
118
119     def run_test(self, test_name):
120         runner = self.create_runner()
121         driver = MainTest.TestDriver()
122         return runner._run_single_test(test_name, driver, is_chromium_style=True)
123
124     def test_run_passing_test(self):
125         test_failed, driver_need_restart = self.run_test('pass.html')
126         self.assertFalse(test_failed)
127         self.assertFalse(driver_need_restart)
128
129     def test_run_silent_test(self):
130         test_failed, driver_need_restart = self.run_test('silent.html')
131         self.assertTrue(test_failed)
132         self.assertFalse(driver_need_restart)
133
134     def test_run_failed_test(self):
135         test_failed, driver_need_restart = self.run_test('failed.html')
136         self.assertTrue(test_failed)
137         self.assertFalse(driver_need_restart)
138
139     def test_run_tonguey_test(self):
140         test_failed, driver_need_restart = self.run_test('tonguey.html')
141         self.assertTrue(test_failed)
142         self.assertFalse(driver_need_restart)
143
144     def test_run_timeout_test(self):
145         test_failed, driver_need_restart = self.run_test('timeout.html')
146         self.assertTrue(test_failed)
147         self.assertTrue(driver_need_restart)
148
149     def test_run_crash_test(self):
150         test_failed, driver_need_restart = self.run_test('crash.html')
151         self.assertTrue(test_failed)
152         self.assertTrue(driver_need_restart)
153
154     def test_run_test_set(self):
155         buildbot_output = array_stream.ArrayStream()
156         runner = self.create_runner(buildbot_output)
157         dirname = runner._base_path + '/inspector/'
158         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
159             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
160         unexpected_result_count = runner._run_tests_set(tests, runner._port)
161         self.assertEqual(unexpected_result_count, len(tests) - 1)
162         self.assertEqual(len(buildbot_output.get()), 1)
163         self.assertEqual(buildbot_output.get()[0], 'RESULT group_name: test_name= 42 ms\n')
164
165     def test_run_test_set_for_parser_tests(self):
166         buildbot_output = array_stream.ArrayStream()
167         runner = self.create_runner(buildbot_output)
168         tests = [runner._base_path + '/Bindings/event-target-wrapper.html', runner._base_path + '/Parser/some-parser.html']
169         unexpected_result_count = runner._run_tests_set(tests, runner._port)
170         self.assertEqual(unexpected_result_count, 0)
171         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
172         self.assertEqual(buildbot_output.get()[1], 'median= 1487 ms, stdev= 14.46 ms, min= 1471 ms, max= 1510 ms\n')
173         self.assertEqual(buildbot_output.get()[2], 'RESULT Parser: some-parser= 1100 ms\n')
174         self.assertEqual(buildbot_output.get()[3], 'median= 1101 ms, stdev= 11 ms, min= 1080 ms, max= 1120 ms\n')
175
176     def test_run_test_set_with_json_output(self):
177         buildbot_output = array_stream.ArrayStream()
178         runner = self.create_runner(buildbot_output, args=['--output-json-path=/test.checkout/output.json'])
179         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
180         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
181         runner._timestamp = 123456789
182         self.assertEqual(runner.run(), 0)
183         self.assertEqual(len(buildbot_output.get()), 3)
184         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
185         self.assertEqual(buildbot_output.get()[1], 'median= 1487 ms, stdev= 14.46 ms, min= 1471 ms, max= 1510 ms\n')
186         self.assertEqual(buildbot_output.get()[2], 'RESULT group_name: test_name= 42 ms\n')
187
188         self.assertEqual(json.loads(runner._host.filesystem.files['/test.checkout/output.json']), {
189             "timestamp": 123456789, "results":
190             {"event-target-wrapper": {"max": "1510", "avg": "1489.05", "median": "1487", "min": "1471", "stdev": "14.46"},
191             "group_name:test_name": 42},
192             "revision": 1234})
193
194     def test_run_test_set_with_json_source(self):
195         buildbot_output = array_stream.ArrayStream()
196         runner = self.create_runner(buildbot_output, args=['--output-json-path=/test.checkout/output.json',
197             '--source-json-path=/test.checkout/source.json'])
198         runner._host.filesystem.files['/test.checkout/source.json'] = '{"key": "value"}'
199         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
200         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
201         runner._timestamp = 123456789
202         self.assertEqual(runner.run(), 0)
203         self.assertEqual(len(buildbot_output.get()), 3)
204         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
205         self.assertEqual(buildbot_output.get()[1], 'median= 1487 ms, stdev= 14.46 ms, min= 1471 ms, max= 1510 ms\n')
206         self.assertEqual(buildbot_output.get()[2], 'RESULT group_name: test_name= 42 ms\n')
207
208         self.assertEqual(json.loads(runner._host.filesystem.files['/test.checkout/output.json']), {
209             "timestamp": 123456789, "results":
210             {"event-target-wrapper": {"max": "1510", "avg": "1489.05", "median": "1487", "min": "1471", "stdev": "14.46"},
211             "group_name:test_name": 42},
212             "revision": 1234,
213             "key": "value"})
214
215     def test_collect_tests(self):
216         runner = self.create_runner()
217         filename = runner._host.filesystem.join(runner._base_path, 'inspector', 'a_file.html')
218         runner._host.filesystem.files[filename] = 'a content'
219         tests = runner._collect_tests()
220         self.assertEqual(len(tests), 1)
221
222     def test_parse_args(self):
223         runner = self.create_runner()
224         options, args = PerfTestsRunner._parse_args([
225                 '--verbose',
226                 '--build-directory=folder42',
227                 '--platform=platform42',
228                 '--time-out-ms=42',
229                 '--output-json-path=a/output.json',
230                 '--source-json-path=a/source.json',
231                 '--debug', 'an_arg'])
232         self.assertEqual(options.build, True)
233         self.assertEqual(options.verbose, True)
234         self.assertEqual(options.help_printing, None)
235         self.assertEqual(options.build_directory, 'folder42')
236         self.assertEqual(options.platform, 'platform42')
237         self.assertEqual(options.time_out_ms, '42')
238         self.assertEqual(options.configuration, 'Debug')
239         self.assertEqual(options.print_options, None)
240         self.assertEqual(options.output_json_path, 'a/output.json')
241         self.assertEqual(options.source_json_path, 'a/source.json')
242
243
244 if __name__ == '__main__':
245     unittest.main()