Unreviewed, rolling out r112014.
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import StringIO
33 import json
34 import unittest
35
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.common.system.outputcapture import OutputCapture
39 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
40 from webkitpy.layout_tests.port.test import TestPort
41 from webkitpy.layout_tests.views import printing
42 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
43
44
45 class MainTest(unittest.TestCase):
46     def assertWritten(self, stream, contents):
47         self.assertEquals(stream.buflist, contents)
48
49     class TestDriver:
50         def run_test(self, driver_input):
51             text = ''
52             timeout = False
53             crash = False
54             if driver_input.test_name.endswith('pass.html'):
55                 text = 'RESULT group_name: test_name= 42 ms'
56             elif driver_input.test_name.endswith('timeout.html'):
57                 timeout = True
58             elif driver_input.test_name.endswith('failed.html'):
59                 text = None
60             elif driver_input.test_name.endswith('tonguey.html'):
61                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
62             elif driver_input.test_name.endswith('crash.html'):
63                 crash = True
64             elif driver_input.test_name.endswith('event-target-wrapper.html'):
65                 text = """Running 20 times
66 Ignoring warm-up run (1502)
67 1504
68 1505
69 1510
70 1504
71 1507
72 1509
73 1510
74 1487
75 1488
76 1472
77 1472
78 1488
79 1473
80 1472
81 1475
82 1487
83 1486
84 1486
85 1475
86 1471
87
88 avg 1489.05
89 median 1487
90 stdev 14.46
91 min 1471
92 max 1510
93 """
94             elif driver_input.test_name.endswith('some-parser.html'):
95                 text = """Running 20 times
96 Ignoring warm-up run (1115)
97
98 avg 1100
99 median 1101
100 stdev 11
101 min 1080
102 max 1120
103 """
104             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
105
106         def start(self):
107             """do nothing"""
108
109         def stop(self):
110             """do nothing"""
111
112     def create_runner(self, buildbot_output=None, args=[], regular_output=None, driver_class=TestDriver):
113         buildbot_output = buildbot_output or StringIO.StringIO()
114         regular_output = regular_output or StringIO.StringIO()
115
116         options, parsed_args = PerfTestsRunner._parse_args(args)
117         test_port = TestPort(host=MockHost(), options=options)
118         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
119
120         runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port)
121         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
122         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
123         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
124         return runner
125
126     def run_test(self, test_name):
127         runner = self.create_runner()
128         driver = MainTest.TestDriver()
129         return runner._run_single_test(test_name, driver, is_chromium_style=True)
130
131     def test_run_passing_test(self):
132         self.assertTrue(self.run_test('pass.html'))
133
134     def test_run_silent_test(self):
135         self.assertFalse(self.run_test('silent.html'))
136
137     def test_run_failed_test(self):
138         self.assertFalse(self.run_test('failed.html'))
139
140     def test_run_tonguey_test(self):
141         self.assertFalse(self.run_test('tonguey.html'))
142
143     def test_run_timeout_test(self):
144         self.assertFalse(self.run_test('timeout.html'))
145
146     def test_run_crash_test(self):
147         self.assertFalse(self.run_test('crash.html'))
148
149     def test_run_test_set(self):
150         buildbot_output = StringIO.StringIO()
151         runner = self.create_runner(buildbot_output)
152         dirname = runner._base_path + '/inspector/'
153         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
154             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
155         unexpected_result_count = runner._run_tests_set(tests, runner._port)
156         self.assertEqual(unexpected_result_count, len(tests) - 1)
157         self.assertWritten(buildbot_output, ['RESULT group_name: test_name= 42 ms\n'])
158
159     def test_run_test_set_kills_drt_per_run(self):
160
161         class TestDriverWithStopCount(MainTest.TestDriver):
162             stop_count = 0
163
164             def stop(self):
165                 TestDriverWithStopCount.stop_count += 1
166
167         buildbot_output = StringIO.StringIO()
168         runner = self.create_runner(buildbot_output, driver_class=TestDriverWithStopCount)
169
170         dirname = runner._base_path + '/inspector/'
171         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
172             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
173
174         unexpected_result_count = runner._run_tests_set(tests, runner._port)
175         self.assertEqual(TestDriverWithStopCount.stop_count, 6)
176
177     def test_run_test_pause_before_testing(self):
178         class TestDriverWithStartCount(MainTest.TestDriver):
179             start_count = 0
180
181             def start(self):
182                 TestDriverWithStartCount.start_count += 1
183
184         buildbot_output = StringIO.StringIO()
185         runner = self.create_runner(buildbot_output, args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
186
187         dirname = runner._base_path + '/inspector/'
188         tests = [dirname + 'pass.html']
189
190         try:
191             output = OutputCapture()
192             output.capture_output()
193             unexpected_result_count = runner._run_tests_set(tests, runner._port)
194             self.assertEqual(TestDriverWithStartCount.start_count, 1)
195         finally:
196             _, stderr, logs = output.restore_output()
197             self.assertEqual(stderr, "Ready to run test?\n")
198             self.assertTrue("Running inspector/pass.html (1 of 1)" in logs)
199
200     def test_run_test_set_for_parser_tests(self):
201         buildbot_output = StringIO.StringIO()
202         runner = self.create_runner(buildbot_output)
203         tests = [runner._base_path + '/Bindings/event-target-wrapper.html', runner._base_path + '/Parser/some-parser.html']
204         unexpected_result_count = runner._run_tests_set(tests, runner._port)
205         self.assertEqual(unexpected_result_count, 0)
206         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
207                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
208                                              'RESULT Parser: some-parser= 1100.0 ms\n',
209                                              'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms\n'])
210
211     def test_run_test_set_with_json_output(self):
212         buildbot_output = StringIO.StringIO()
213         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
214         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
215         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
216         runner._timestamp = 123456789
217         self.assertEqual(runner.run(), 0)
218         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
219                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
220                                              'RESULT group_name: test_name= 42 ms\n'])
221
222         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
223             "timestamp": 123456789, "results":
224             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46},
225             "group_name:test_name": 42},
226             "webkit-revision": 5678})
227
228     def test_run_test_set_with_json_source(self):
229         buildbot_output = StringIO.StringIO()
230         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json',
231             '--source-json-path=/mock-checkout/source.json'])
232         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
233         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
234         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
235         runner._timestamp = 123456789
236         self.assertEqual(runner.run(), 0)
237         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
238                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
239                                              'RESULT group_name: test_name= 42 ms\n'])
240
241         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
242             "timestamp": 123456789, "results":
243             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46},
244             "group_name:test_name": 42},
245             "webkit-revision": 5678,
246             "key": "value"})
247
248     def test_run_test_set_with_multiple_repositories(self):
249         buildbot_output = StringIO.StringIO()
250         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
251         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
252         runner._timestamp = 123456789
253         runner._port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
254         self.assertEqual(runner.run(), 0)
255
256         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
257             "timestamp": 123456789, "results": {"group_name:test_name": 42.0}, "webkit-revision": 5678, "some-revision": 5678})
258
259     def test_run_with_upload_json(self):
260         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
261             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
262         upload_json_is_called = [False]
263         upload_json_returns_true = True
264
265         def mock_upload_json(hostname, json_path):
266             self.assertEqual(hostname, 'some.host')
267             self.assertEqual(json_path, '/mock-checkout/output.json')
268             upload_json_is_called[0] = True
269             return upload_json_returns_true
270
271         runner._upload_json = mock_upload_json
272         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
273         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
274         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
275         runner._timestamp = 123456789
276         self.assertEqual(runner.run(), 0)
277         self.assertEqual(upload_json_is_called[0], True)
278         generated_json = json.loads(runner._host.filesystem.files['/mock-checkout/output.json'])
279         self.assertEqual(generated_json['platform'], 'platform1')
280         self.assertEqual(generated_json['builder-name'], 'builder1')
281         self.assertEqual(generated_json['build-number'], 123)
282         upload_json_returns_true = False
283
284         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
285             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
286         runner._upload_json = mock_upload_json
287         self.assertEqual(runner.run(), -3)
288
289     def test_upload_json(self):
290         regular_output = StringIO.StringIO()
291         runner = self.create_runner(regular_output=regular_output)
292         runner._host.filesystem.files['/mock-checkout/some.json'] = 'some content'
293
294         called = []
295         upload_single_text_file_throws = False
296         upload_single_text_file_return_value = StringIO.StringIO('OK')
297
298         class MockFileUploader:
299             def __init__(mock, url, timeout):
300                 self.assertEqual(url, 'https://some.host/api/test/report')
301                 self.assertTrue(isinstance(timeout, int) and timeout)
302                 called.append('FileUploader')
303
304             def upload_single_text_file(mock, filesystem, content_type, filename):
305                 self.assertEqual(filesystem, runner._host.filesystem)
306                 self.assertEqual(content_type, 'application/json')
307                 self.assertEqual(filename, 'some.json')
308                 called.append('upload_single_text_file')
309                 if upload_single_text_file_throws:
310                     raise "Some exception"
311                 return upload_single_text_file_return_value
312
313         runner._upload_json('some.host', 'some.json', MockFileUploader)
314         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
315
316         output = OutputCapture()
317         output.capture_output()
318         upload_single_text_file_return_value = StringIO.StringIO('Some error')
319         runner._upload_json('some.host', 'some.json', MockFileUploader)
320         _, _, logs = output.restore_output()
321         self.assertEqual(logs, 'Uploaded JSON but got a bad response:\nSome error\n')
322
323         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
324         called = []
325         upload_single_text_file_throws = True
326         runner._upload_json('some.host', 'some.json', MockFileUploader)
327         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
328
329     def test_collect_tests(self):
330         runner = self.create_runner()
331         filename = runner._host.filesystem.join(runner._base_path, 'inspector', 'a_file.html')
332         runner._host.filesystem.files[filename] = 'a content'
333         tests = runner._collect_tests()
334         self.assertEqual(len(tests), 1)
335
336     def test_collect_tests(self):
337         runner = self.create_runner(args=['PerformanceTests/test1.html', 'test2.html'])
338
339         def add_file(filename):
340             runner._host.filesystem.files[runner._host.filesystem.join(runner._base_path, filename)] = 'some content'
341
342         add_file('test1.html')
343         add_file('test2.html')
344         add_file('test3.html')
345         runner._host.filesystem.chdir(runner._port.perf_tests_dir()[:runner._port.perf_tests_dir().rfind(runner._host.filesystem.sep)])
346         tests = [runner._port.relative_perf_test_filename(test) for test in runner._collect_tests()]
347         self.assertEqual(sorted(tests), ['test1.html', 'test2.html'])
348
349     def test_collect_tests_with_skipped_list(self):
350         runner = self.create_runner()
351
352         def add_file(dirname, filename, content=True):
353             dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
354             runner._host.filesystem.maybe_make_directory(dirname)
355             runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
356
357         add_file('inspector', 'test1.html')
358         add_file('inspector', 'unsupported_test1.html')
359         add_file('inspector', 'test2.html')
360         add_file('inspector/resources', 'resource_file.html')
361         add_file('unsupported', 'unsupported_test2.html')
362         runner._port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
363         tests = [runner._port.relative_perf_test_filename(test) for test in runner._collect_tests()]
364         self.assertEqual(sorted(tests), ['inspector/test1.html', 'inspector/test2.html'])
365
366     def test_parse_args(self):
367         runner = self.create_runner()
368         options, args = PerfTestsRunner._parse_args([
369                 '--verbose',
370                 '--build-directory=folder42',
371                 '--platform=platform42',
372                 '--builder-name', 'webkit-mac-1',
373                 '--build-number=56',
374                 '--time-out-ms=42',
375                 '--output-json-path=a/output.json',
376                 '--source-json-path=a/source.json',
377                 '--test-results-server=somehost',
378                 '--debug', 'an_arg'])
379         self.assertEqual(options.build, True)
380         self.assertEqual(options.verbose, True)
381         self.assertEqual(options.help_printing, None)
382         self.assertEqual(options.build_directory, 'folder42')
383         self.assertEqual(options.platform, 'platform42')
384         self.assertEqual(options.builder_name, 'webkit-mac-1')
385         self.assertEqual(options.build_number, '56')
386         self.assertEqual(options.time_out_ms, '42')
387         self.assertEqual(options.configuration, 'Debug')
388         self.assertEqual(options.print_options, None)
389         self.assertEqual(options.output_json_path, 'a/output.json')
390         self.assertEqual(options.source_json_path, 'a/source.json')
391         self.assertEqual(options.test_results_server, 'somehost')
392
393
394 if __name__ == '__main__':
395     unittest.main()