6cb355fcf14f18d83469cd66751a93bfbd346183
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2011 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import json
33 import unittest
34
35 from webkitpy.common import array_stream
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
39 from webkitpy.layout_tests.port.test import TestPort
40 from webkitpy.layout_tests.views import printing
41 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
42
43
44 class MainTest(unittest.TestCase):
45     class TestDriver:
46         def run_test(self, driver_input):
47             text = ''
48             timeout = False
49             crash = False
50             if driver_input.test_name.endswith('pass.html'):
51                 text = 'RESULT group_name: test_name= 42 ms'
52             elif driver_input.test_name.endswith('timeout.html'):
53                 timeout = True
54             elif driver_input.test_name.endswith('failed.html'):
55                 text = None
56             elif driver_input.test_name.endswith('tonguey.html'):
57                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
58             elif driver_input.test_name.endswith('crash.html'):
59                 crash = True
60             elif driver_input.test_name.endswith('event-target-wrapper.html'):
61                 text = """Running 20 times
62 Ignoring warm-up run (1502)
63 1504
64 1505
65 1510
66 1504
67 1507
68 1509
69 1510
70 1487
71 1488
72 1472
73 1472
74 1488
75 1473
76 1472
77 1475
78 1487
79 1486
80 1486
81 1475
82 1471
83
84 avg 1489.05
85 median 1487
86 stdev 14.46
87 min 1471
88 max 1510
89 """
90             elif driver_input.test_name.endswith('some-parser.html'):
91                 text = """Running 20 times
92 Ignoring warm-up run (1115)
93
94 avg 1100
95 median 1101
96 stdev 11
97 min 1080
98 max 1120
99 """
100             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
101
102         def stop(self):
103             """do nothing"""
104
105     def create_runner(self, buildbot_output=None, args=[], regular_output=None):
106         buildbot_output = buildbot_output or array_stream.ArrayStream()
107         regular_output = regular_output or array_stream.ArrayStream()
108
109         options, parsed_args = PerfTestsRunner._parse_args(args)
110         test_port = TestPort(host=MockHost(), options=options)
111         test_port.create_driver = lambda worker_number=None, no_timeout=False: MainTest.TestDriver()
112
113         runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port)
114         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
115         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
116         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
117         return runner
118
119     def run_test(self, test_name):
120         runner = self.create_runner()
121         driver = MainTest.TestDriver()
122         return runner._run_single_test(test_name, driver, is_chromium_style=True)
123
124     def test_run_passing_test(self):
125         test_failed, driver_need_restart = self.run_test('pass.html')
126         self.assertFalse(test_failed)
127         self.assertFalse(driver_need_restart)
128
129     def test_run_silent_test(self):
130         test_failed, driver_need_restart = self.run_test('silent.html')
131         self.assertTrue(test_failed)
132         self.assertFalse(driver_need_restart)
133
134     def test_run_failed_test(self):
135         test_failed, driver_need_restart = self.run_test('failed.html')
136         self.assertTrue(test_failed)
137         self.assertFalse(driver_need_restart)
138
139     def test_run_tonguey_test(self):
140         test_failed, driver_need_restart = self.run_test('tonguey.html')
141         self.assertTrue(test_failed)
142         self.assertFalse(driver_need_restart)
143
144     def test_run_timeout_test(self):
145         test_failed, driver_need_restart = self.run_test('timeout.html')
146         self.assertTrue(test_failed)
147         self.assertTrue(driver_need_restart)
148
149     def test_run_crash_test(self):
150         test_failed, driver_need_restart = self.run_test('crash.html')
151         self.assertTrue(test_failed)
152         self.assertTrue(driver_need_restart)
153
154     def test_run_test_set(self):
155         buildbot_output = array_stream.ArrayStream()
156         runner = self.create_runner(buildbot_output)
157         dirname = runner._base_path + '/inspector/'
158         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
159             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
160         unexpected_result_count = runner._run_tests_set(tests, runner._port)
161         self.assertEqual(unexpected_result_count, len(tests) - 1)
162         self.assertEqual(len(buildbot_output.get()), 1)
163         self.assertEqual(buildbot_output.get()[0], 'RESULT group_name: test_name= 42 ms\n')
164
165     def test_run_test_set_for_parser_tests(self):
166         buildbot_output = array_stream.ArrayStream()
167         runner = self.create_runner(buildbot_output)
168         tests = [runner._base_path + '/Bindings/event-target-wrapper.html', runner._base_path + '/Parser/some-parser.html']
169         unexpected_result_count = runner._run_tests_set(tests, runner._port)
170         self.assertEqual(unexpected_result_count, 0)
171         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
172         self.assertEqual(buildbot_output.get()[1], 'median= 1487 ms, stdev= 14.46 ms, min= 1471 ms, max= 1510 ms\n')
173         self.assertEqual(buildbot_output.get()[2], 'RESULT Parser: some-parser= 1100 ms\n')
174         self.assertEqual(buildbot_output.get()[3], 'median= 1101 ms, stdev= 11 ms, min= 1080 ms, max= 1120 ms\n')
175
176     def test_run_test_set_with_json_output(self):
177         buildbot_output = array_stream.ArrayStream()
178         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
179         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
180         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
181         runner._timestamp = 123456789
182         self.assertEqual(runner.run(), 0)
183         self.assertEqual(len(buildbot_output.get()), 3)
184         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
185         self.assertEqual(buildbot_output.get()[1], 'median= 1487 ms, stdev= 14.46 ms, min= 1471 ms, max= 1510 ms\n')
186         self.assertEqual(buildbot_output.get()[2], 'RESULT group_name: test_name= 42 ms\n')
187
188         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
189             "timestamp": 123456789, "results":
190             {"event-target-wrapper": {"max": "1510", "avg": "1489.05", "median": "1487", "min": "1471", "stdev": "14.46"},
191             "group_name:test_name": 42},
192             "revision": 1234})
193
194     def test_run_test_set_with_json_source(self):
195         buildbot_output = array_stream.ArrayStream()
196         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json',
197             '--source-json-path=/mock-checkout/source.json'])
198         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
199         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
200         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
201         runner._timestamp = 123456789
202         self.assertEqual(runner.run(), 0)
203         self.assertEqual(len(buildbot_output.get()), 3)
204         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
205         self.assertEqual(buildbot_output.get()[1], 'median= 1487 ms, stdev= 14.46 ms, min= 1471 ms, max= 1510 ms\n')
206         self.assertEqual(buildbot_output.get()[2], 'RESULT group_name: test_name= 42 ms\n')
207
208         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
209             "timestamp": 123456789, "results":
210             {"event-target-wrapper": {"max": "1510", "avg": "1489.05", "median": "1487", "min": "1471", "stdev": "14.46"},
211             "group_name:test_name": 42},
212             "revision": 1234,
213             "key": "value"})
214
215     def test_run_with_upload_json(self):
216         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
217             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
218         upload_json_is_called = [False]
219         upload_json_returns_true = True
220
221         def mock_upload_json(hostname, json_path):
222             self.assertEqual(hostname, 'some.host')
223             self.assertEqual(json_path, '/mock-checkout/output.json')
224             upload_json_is_called[0] = True
225             return upload_json_returns_true
226
227         runner._upload_json = mock_upload_json
228         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
229         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
230         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
231         runner._timestamp = 123456789
232         self.assertEqual(runner.run(), 0)
233         self.assertEqual(upload_json_is_called[0], True)
234         generated_json = json.loads(runner._host.filesystem.files['/mock-checkout/output.json'])
235         self.assertEqual(generated_json['platform'], 'platform1')
236         self.assertEqual(generated_json['builder-name'], 'builder1')
237         self.assertEqual(generated_json['build-number'], 123)
238         upload_json_returns_true = False
239         self.assertEqual(runner.run(), -3)
240
241     def test_upload_json(self):
242         regular_output = array_stream.ArrayStream()
243         runner = self.create_runner(buildbot_output=regular_output)
244         runner._host.filesystem.files['/mock-checkout/some.json'] = 'some content'
245
246         called = []
247         upload_single_text_file_throws = False
248
249         class MockFileUploader:
250             def __init__(mock, url, timeout):
251                 self.assertEqual(url, 'https://some.host/api/test/report')
252                 self.assertTrue(isinstance(timeout, int) and timeout)
253                 called.append('FileUploader')
254
255             def upload_single_text_file(mock, filesystem, content_type, filename):
256                 self.assertEqual(filesystem, runner._host.filesystem)
257                 self.assertEqual(content_type, 'application/json')
258                 self.assertEqual(filename, 'some.json')
259                 called.append('upload_single_text_file')
260                 if upload_single_text_file_throws:
261                     raise "Some exception"
262
263         runner._upload_json('some.host', 'some.json', MockFileUploader)
264         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
265
266         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
267         called = []
268         upload_single_text_file_throws = True
269         runner._upload_json('some.host', 'some.json', MockFileUploader)
270         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
271
272     def test_collect_tests(self):
273         runner = self.create_runner()
274         filename = runner._host.filesystem.join(runner._base_path, 'inspector', 'a_file.html')
275         runner._host.filesystem.files[filename] = 'a content'
276         tests = runner._collect_tests()
277         self.assertEqual(len(tests), 1)
278
279     def test_collect_tests_with_skipped_list(self):
280         runner = self.create_runner()
281
282         def add_file(dirname, filename, content=True):
283             dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
284             runner._host.filesystem.maybe_make_directory(dirname)
285             runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
286
287         add_file('inspector', 'test1.html')
288         add_file('inspector', 'unsupported_test1.html')
289         add_file('inspector', 'test2.html')
290         add_file('inspector/resources', 'resource_file.html')
291         add_file('unsupported', 'unsupported_test2.html')
292         runner._port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
293         tests = [runner._port.relative_perf_test_filename(test) for test in runner._collect_tests()]
294         self.assertEqual(sorted(tests), ['inspector/test1.html', 'inspector/test2.html'])
295
296     def test_parse_args(self):
297         runner = self.create_runner()
298         options, args = PerfTestsRunner._parse_args([
299                 '--verbose',
300                 '--build-directory=folder42',
301                 '--platform=platform42',
302                 '--builder-name', 'webkit-mac-1',
303                 '--build-number=56',
304                 '--time-out-ms=42',
305                 '--output-json-path=a/output.json',
306                 '--source-json-path=a/source.json',
307                 '--test-results-server=somehost',
308                 '--debug', 'an_arg'])
309         self.assertEqual(options.build, True)
310         self.assertEqual(options.verbose, True)
311         self.assertEqual(options.help_printing, None)
312         self.assertEqual(options.build_directory, 'folder42')
313         self.assertEqual(options.platform, 'platform42')
314         self.assertEqual(options.builder_name, 'webkit-mac-1')
315         self.assertEqual(options.build_number, '56')
316         self.assertEqual(options.time_out_ms, '42')
317         self.assertEqual(options.configuration, 'Debug')
318         self.assertEqual(options.print_options, None)
319         self.assertEqual(options.output_json_path, 'a/output.json')
320         self.assertEqual(options.source_json_path, 'a/source.json')
321         self.assertEqual(options.test_results_server, 'somehost')
322
323
324 if __name__ == '__main__':
325     unittest.main()