be925c953763b7389b984c7ca8ffbfc8cefaeaaa
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import StringIO
33 import json
34 import unittest
35
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.common.system.outputcapture import OutputCapture
39 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
40 from webkitpy.layout_tests.port.test import TestPort
41 from webkitpy.layout_tests.views import printing
42 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
43 from webkitpy.performance_tests.perftest import PerfTest
44 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
45
46
47 class MainTest(unittest.TestCase):
48     def assertWritten(self, stream, contents):
49         self.assertEquals(stream.buflist, contents)
50
51     class TestDriver:
52         def run_test(self, driver_input):
53             text = ''
54             timeout = False
55             crash = False
56             if driver_input.test_name.endswith('pass.html'):
57                 text = 'RESULT group_name: test_name= 42 ms'
58             elif driver_input.test_name.endswith('timeout.html'):
59                 timeout = True
60             elif driver_input.test_name.endswith('failed.html'):
61                 text = None
62             elif driver_input.test_name.endswith('tonguey.html'):
63                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
64             elif driver_input.test_name.endswith('crash.html'):
65                 crash = True
66             elif driver_input.test_name.endswith('event-target-wrapper.html'):
67                 text = """Running 20 times
68 Ignoring warm-up run (1502)
69 1504
70 1505
71 1510
72 1504
73 1507
74 1509
75 1510
76 1487
77 1488
78 1472
79 1472
80 1488
81 1473
82 1472
83 1475
84 1487
85 1486
86 1486
87 1475
88 1471
89
90 avg 1489.05
91 median 1487
92 stdev 14.46
93 min 1471
94 max 1510
95 """
96             elif driver_input.test_name.endswith('some-parser.html'):
97                 text = """Running 20 times
98 Ignoring warm-up run (1115)
99
100 avg 1100
101 median 1101
102 stdev 11
103 min 1080
104 max 1120
105 """
106             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
107
108         def start(self):
109             """do nothing"""
110
111         def stop(self):
112             """do nothing"""
113
114     def create_runner(self, args=[], driver_class=TestDriver):
115         options, parsed_args = PerfTestsRunner._parse_args(args)
116         test_port = TestPort(host=MockHost(), options=options)
117         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
118
119         runner = PerfTestsRunner(args=args, port=test_port)
120         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
121         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
122         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
123         return runner
124
125     def run_test(self, test_name):
126         runner = self.create_runner()
127         driver = MainTest.TestDriver()
128         return runner._run_single_test(ChromiumStylePerfTest(test_name, runner._host.filesystem.join('some-dir', test_name)), driver)
129
130     def test_run_passing_test(self):
131         self.assertTrue(self.run_test('pass.html'))
132
133     def test_run_silent_test(self):
134         self.assertFalse(self.run_test('silent.html'))
135
136     def test_run_failed_test(self):
137         self.assertFalse(self.run_test('failed.html'))
138
139     def test_run_tonguey_test(self):
140         self.assertFalse(self.run_test('tonguey.html'))
141
142     def test_run_timeout_test(self):
143         self.assertFalse(self.run_test('timeout.html'))
144
145     def test_run_crash_test(self):
146         self.assertFalse(self.run_test('crash.html'))
147
148     def _tests_for_runner(self, runner, test_names):
149         filesystem = runner._host.filesystem
150         tests = []
151         for test in test_names:
152             path = filesystem.join(runner._base_path, test)
153             dirname = filesystem.dirname(path)
154             if test.startswith('inspector/'):
155                 tests.append(ChromiumStylePerfTest(test, path))
156             else:
157                 tests.append(PerfTest(test, path))
158         return tests
159
160     def test_run_test_set(self):
161         runner = self.create_runner()
162         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
163             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
164         output = OutputCapture()
165         output.capture_output()
166         try:
167             unexpected_result_count = runner._run_tests_set(tests, runner._port)
168         finally:
169             stdout, stderr, log = output.restore_output()
170         self.assertEqual(unexpected_result_count, len(tests) - 1)
171         self.assertTrue('\nRESULT group_name: test_name= 42 ms\n' in log)
172
173     def test_run_test_set_kills_drt_per_run(self):
174
175         class TestDriverWithStopCount(MainTest.TestDriver):
176             stop_count = 0
177
178             def stop(self):
179                 TestDriverWithStopCount.stop_count += 1
180
181         runner = self.create_runner(driver_class=TestDriverWithStopCount)
182
183         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
184             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
185         unexpected_result_count = runner._run_tests_set(tests, runner._port)
186
187         self.assertEqual(TestDriverWithStopCount.stop_count, 6)
188
189     def test_run_test_pause_before_testing(self):
190         class TestDriverWithStartCount(MainTest.TestDriver):
191             start_count = 0
192
193             def start(self):
194                 TestDriverWithStartCount.start_count += 1
195
196         runner = self.create_runner(args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
197         tests = self._tests_for_runner(runner, ['inspector/pass.html'])
198
199         output = OutputCapture()
200         output.capture_output()
201         try:
202             unexpected_result_count = runner._run_tests_set(tests, runner._port)
203             self.assertEqual(TestDriverWithStartCount.start_count, 1)
204         finally:
205             stdout, stderr, log = output.restore_output()
206         self.assertEqual(stderr, "Ready to run test?\n")
207         self.assertEqual(log, "Running inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\n\n")
208
209     def test_run_test_set_for_parser_tests(self):
210         runner = self.create_runner()
211         tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.html', 'Parser/some-parser.html'])
212         output = OutputCapture()
213         output.capture_output()
214         try:
215             unexpected_result_count = runner._run_tests_set(tests, runner._port)
216         finally:
217             stdout, stderr, log = output.restore_output()
218         self.assertEqual(unexpected_result_count, 0)
219         self.assertEqual(log, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
220         'RESULT Bindings: event-target-wrapper= 1489.05 ms',
221         'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
222         '',
223         'Running Parser/some-parser.html (2 of 2)',
224         'RESULT Parser: some-parser= 1100.0 ms',
225         'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms',
226         '', '']))
227
228     def test_run_test_set_with_json_output(self):
229         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json'])
230         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
231         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
232         runner._timestamp = 123456789
233         output_capture = OutputCapture()
234         output_capture.capture_output()
235         try:
236             self.assertEqual(runner.run(), 0)
237         finally:
238             stdout, stderr, logs = output_capture.restore_output()
239
240         self.assertEqual(logs,
241             '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
242                        'RESULT Bindings: event-target-wrapper= 1489.05 ms',
243                        'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
244                        '',
245                        'Running inspector/pass.html (2 of 2)',
246                        'RESULT group_name: test_name= 42 ms',
247                        '', '']))
248
249         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
250             "timestamp": 123456789, "results":
251             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
252             "inspector/pass.html:group_name:test_name": 42},
253             "webkit-revision": 5678})
254
255     def test_run_test_set_with_json_source(self):
256         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json', '--source-json-path=/mock-checkout/source.json'])
257         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
258         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
259         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
260         runner._timestamp = 123456789
261         output_capture = OutputCapture()
262         output_capture.capture_output()
263         try:
264             self.assertEqual(runner.run(), 0)
265         finally:
266             stdout, stderr, logs = output_capture.restore_output()
267
268         self.assertEqual(logs, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
269             'RESULT Bindings: event-target-wrapper= 1489.05 ms',
270             'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
271             '',
272             'Running inspector/pass.html (2 of 2)',
273             'RESULT group_name: test_name= 42 ms',
274             '', '']))
275
276         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
277             "timestamp": 123456789, "results":
278             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
279             "inspector/pass.html:group_name:test_name": 42},
280             "webkit-revision": 5678,
281             "key": "value"})
282
283     def test_run_test_set_with_multiple_repositories(self):
284         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json'])
285         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
286         runner._timestamp = 123456789
287         runner._port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
288         self.assertEqual(runner.run(), 0)
289         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
290             "timestamp": 123456789, "results": {"inspector/pass.html:group_name:test_name": 42.0}, "webkit-revision": 5678, "some-revision": 5678})
291
292     def test_run_with_upload_json(self):
293         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
294             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
295         upload_json_is_called = [False]
296         upload_json_returns_true = True
297
298         def mock_upload_json(hostname, json_path):
299             self.assertEqual(hostname, 'some.host')
300             self.assertEqual(json_path, '/mock-checkout/output.json')
301             upload_json_is_called[0] = True
302             return upload_json_returns_true
303
304         runner._upload_json = mock_upload_json
305         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
306         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
307         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
308         runner._timestamp = 123456789
309         self.assertEqual(runner.run(), 0)
310         self.assertEqual(upload_json_is_called[0], True)
311         generated_json = json.loads(runner._host.filesystem.files['/mock-checkout/output.json'])
312         self.assertEqual(generated_json['platform'], 'platform1')
313         self.assertEqual(generated_json['builder-name'], 'builder1')
314         self.assertEqual(generated_json['build-number'], 123)
315         upload_json_returns_true = False
316
317         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
318             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
319         runner._upload_json = mock_upload_json
320         self.assertEqual(runner.run(), -3)
321
322     def test_upload_json(self):
323         runner = self.create_runner()
324         runner._host.filesystem.files['/mock-checkout/some.json'] = 'some content'
325
326         called = []
327         upload_single_text_file_throws = False
328         upload_single_text_file_return_value = StringIO.StringIO('OK')
329
330         class MockFileUploader:
331             def __init__(mock, url, timeout):
332                 self.assertEqual(url, 'https://some.host/api/test/report')
333                 self.assertTrue(isinstance(timeout, int) and timeout)
334                 called.append('FileUploader')
335
336             def upload_single_text_file(mock, filesystem, content_type, filename):
337                 self.assertEqual(filesystem, runner._host.filesystem)
338                 self.assertEqual(content_type, 'application/json')
339                 self.assertEqual(filename, 'some.json')
340                 called.append('upload_single_text_file')
341                 if upload_single_text_file_throws:
342                     raise "Some exception"
343                 return upload_single_text_file_return_value
344
345         runner._upload_json('some.host', 'some.json', MockFileUploader)
346         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
347
348         output = OutputCapture()
349         output.capture_output()
350         upload_single_text_file_return_value = StringIO.StringIO('Some error')
351         runner._upload_json('some.host', 'some.json', MockFileUploader)
352         _, _, logs = output.restore_output()
353         self.assertEqual(logs, 'Uploaded JSON but got a bad response:\nSome error\n')
354
355         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
356         called = []
357         upload_single_text_file_throws = True
358         runner._upload_json('some.host', 'some.json', MockFileUploader)
359         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
360
361     def test_collect_tests(self):
362         runner = self.create_runner()
363         filename = runner._host.filesystem.join(runner._base_path, 'inspector', 'a_file.html')
364         runner._host.filesystem.files[filename] = 'a content'
365         tests = runner._collect_tests()
366         self.assertEqual(len(tests), 1)
367
368     def _collect_tests_and_sort_test_name(self, runner):
369         return sorted([test.test_name() for test in runner._collect_tests()])
370
371     def test_collect_tests(self):
372         runner = self.create_runner(args=['PerformanceTests/test1.html', 'test2.html'])
373
374         def add_file(filename):
375             runner._host.filesystem.files[runner._host.filesystem.join(runner._base_path, filename)] = 'some content'
376
377         add_file('test1.html')
378         add_file('test2.html')
379         add_file('test3.html')
380         runner._host.filesystem.chdir(runner._port.perf_tests_dir()[:runner._port.perf_tests_dir().rfind(runner._host.filesystem.sep)])
381         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['test1.html', 'test2.html'])
382
383     def test_collect_tests_with_skipped_list(self):
384         runner = self.create_runner()
385
386         def add_file(dirname, filename, content=True):
387             dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
388             runner._host.filesystem.maybe_make_directory(dirname)
389             runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
390
391         add_file('inspector', 'test1.html')
392         add_file('inspector', 'unsupported_test1.html')
393         add_file('inspector', 'test2.html')
394         add_file('inspector/resources', 'resource_file.html')
395         add_file('unsupported', 'unsupported_test2.html')
396         runner._port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
397         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html'])
398
399     def test_collect_tests_with_page_load_svg(self):
400         runner = self.create_runner()
401
402         def add_file(dirname, filename, content=True):
403             dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
404             runner._host.filesystem.maybe_make_directory(dirname)
405             runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
406
407         add_file('PageLoad', 'some-svg-test.svg')
408         tests = runner._collect_tests()
409         self.assertEqual(len(tests), 1)
410         self.assertEqual(tests[0].__class__.__name__, 'PageLoadingPerfTest')
411
412     def test_parse_args(self):
413         runner = self.create_runner()
414         options, args = PerfTestsRunner._parse_args([
415                 '--build-directory=folder42',
416                 '--platform=platform42',
417                 '--builder-name', 'webkit-mac-1',
418                 '--build-number=56',
419                 '--time-out-ms=42',
420                 '--output-json-path=a/output.json',
421                 '--source-json-path=a/source.json',
422                 '--test-results-server=somehost',
423                 '--debug'])
424         self.assertEqual(options.build, True)
425         self.assertEqual(options.build_directory, 'folder42')
426         self.assertEqual(options.platform, 'platform42')
427         self.assertEqual(options.builder_name, 'webkit-mac-1')
428         self.assertEqual(options.build_number, '56')
429         self.assertEqual(options.time_out_ms, '42')
430         self.assertEqual(options.configuration, 'Debug')
431         self.assertEqual(options.output_json_path, 'a/output.json')
432         self.assertEqual(options.source_json_path, 'a/source.json')
433         self.assertEqual(options.test_results_server, 'somehost')
434
435
436 if __name__ == '__main__':
437     unittest.main()