de3528cb178d78278694caf083c00a18472fe8be
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import StringIO
33 import json
34 import unittest
35
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.common.system.outputcapture import OutputCapture
39 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
40 from webkitpy.layout_tests.port.test import TestPort
41 from webkitpy.layout_tests.views import printing
42 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
43 from webkitpy.performance_tests.perftest import PerfTest
44 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
45
46
47 class MainTest(unittest.TestCase):
48     def assertWritten(self, stream, contents):
49         self.assertEquals(stream.buflist, contents)
50
51     class TestDriver:
52         def run_test(self, driver_input):
53             text = ''
54             timeout = False
55             crash = False
56             if driver_input.test_name.endswith('pass.html'):
57                 text = 'RESULT group_name: test_name= 42 ms'
58             elif driver_input.test_name.endswith('timeout.html'):
59                 timeout = True
60             elif driver_input.test_name.endswith('failed.html'):
61                 text = None
62             elif driver_input.test_name.endswith('tonguey.html'):
63                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
64             elif driver_input.test_name.endswith('crash.html'):
65                 crash = True
66             elif driver_input.test_name.endswith('event-target-wrapper.html'):
67                 text = """Running 20 times
68 Ignoring warm-up run (1502)
69 1504
70 1505
71 1510
72 1504
73 1507
74 1509
75 1510
76 1487
77 1488
78 1472
79 1472
80 1488
81 1473
82 1472
83 1475
84 1487
85 1486
86 1486
87 1475
88 1471
89
90 avg 1489.05
91 median 1487
92 stdev 14.46
93 min 1471
94 max 1510
95 """
96             elif driver_input.test_name.endswith('some-parser.html'):
97                 text = """Running 20 times
98 Ignoring warm-up run (1115)
99
100 avg 1100
101 median 1101
102 stdev 11
103 min 1080
104 max 1120
105 """
106             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
107
108         def start(self):
109             """do nothing"""
110
111         def stop(self):
112             """do nothing"""
113
114     def create_runner(self, args=[], driver_class=TestDriver):
115         options, parsed_args = PerfTestsRunner._parse_args(args)
116         test_port = TestPort(host=MockHost(), options=options)
117         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
118
119         runner = PerfTestsRunner(args=args, port=test_port)
120         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
121         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
122         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
123         return runner, test_port
124
125     def run_test(self, test_name):
126         runner, port = self.create_runner()
127         driver = MainTest.TestDriver()
128         return runner._run_single_test(ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name)), driver)
129
130     def test_run_passing_test(self):
131         self.assertTrue(self.run_test('pass.html'))
132
133     def test_run_silent_test(self):
134         self.assertFalse(self.run_test('silent.html'))
135
136     def test_run_failed_test(self):
137         self.assertFalse(self.run_test('failed.html'))
138
139     def test_run_tonguey_test(self):
140         self.assertFalse(self.run_test('tonguey.html'))
141
142     def test_run_timeout_test(self):
143         self.assertFalse(self.run_test('timeout.html'))
144
145     def test_run_crash_test(self):
146         self.assertFalse(self.run_test('crash.html'))
147
148     def _tests_for_runner(self, runner, test_names):
149         filesystem = runner._host.filesystem
150         tests = []
151         for test in test_names:
152             path = filesystem.join(runner._base_path, test)
153             dirname = filesystem.dirname(path)
154             if test.startswith('inspector/'):
155                 tests.append(ChromiumStylePerfTest(runner._port, test, path))
156             else:
157                 tests.append(PerfTest(runner._port, test, path))
158         return tests
159
160     def test_run_test_set(self):
161         runner, port = self.create_runner()
162         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
163             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
164         output = OutputCapture()
165         output.capture_output()
166         try:
167             unexpected_result_count = runner._run_tests_set(tests, port)
168         finally:
169             stdout, stderr, log = output.restore_output()
170         self.assertEqual(unexpected_result_count, len(tests) - 1)
171         self.assertTrue('\nRESULT group_name: test_name= 42 ms\n' in log)
172
173     def test_run_test_set_kills_drt_per_run(self):
174
175         class TestDriverWithStopCount(MainTest.TestDriver):
176             stop_count = 0
177
178             def stop(self):
179                 TestDriverWithStopCount.stop_count += 1
180
181         runner, port = self.create_runner(driver_class=TestDriverWithStopCount)
182
183         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
184             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
185         unexpected_result_count = runner._run_tests_set(tests, port)
186
187         self.assertEqual(TestDriverWithStopCount.stop_count, 6)
188
189     def test_run_test_pause_before_testing(self):
190         class TestDriverWithStartCount(MainTest.TestDriver):
191             start_count = 0
192
193             def start(self):
194                 TestDriverWithStartCount.start_count += 1
195
196         runner, port = self.create_runner(args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
197         tests = self._tests_for_runner(runner, ['inspector/pass.html'])
198
199         output = OutputCapture()
200         output.capture_output()
201         try:
202             unexpected_result_count = runner._run_tests_set(tests, port)
203             self.assertEqual(TestDriverWithStartCount.start_count, 1)
204         finally:
205             stdout, stderr, log = output.restore_output()
206         self.assertEqual(stderr, "Ready to run test?\n")
207         self.assertEqual(log, "Running inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\n\n")
208
209     def test_run_test_set_for_parser_tests(self):
210         runner, port = self.create_runner()
211         tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.html', 'Parser/some-parser.html'])
212         output = OutputCapture()
213         output.capture_output()
214         try:
215             unexpected_result_count = runner._run_tests_set(tests, port)
216         finally:
217             stdout, stderr, log = output.restore_output()
218         self.assertEqual(unexpected_result_count, 0)
219         self.assertEqual(log, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
220         'RESULT Bindings: event-target-wrapper= 1489.05 ms',
221         'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
222         '',
223         'Running Parser/some-parser.html (2 of 2)',
224         'RESULT Parser: some-parser= 1100.0 ms',
225         'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms',
226         '', '']))
227
228     def test_run_test_set_with_json_output(self):
229         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json'])
230         port.host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
231         port.host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
232         runner._timestamp = 123456789
233         output_capture = OutputCapture()
234         output_capture.capture_output()
235         try:
236             self.assertEqual(runner.run(), 0)
237         finally:
238             stdout, stderr, logs = output_capture.restore_output()
239
240         self.assertEqual(logs,
241             '\n'.join(['Running 2 tests',
242                        'Running Bindings/event-target-wrapper.html (1 of 2)',
243                        'RESULT Bindings: event-target-wrapper= 1489.05 ms',
244                        'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
245                        '',
246                        'Running inspector/pass.html (2 of 2)',
247                        'RESULT group_name: test_name= 42 ms',
248                        '', '']))
249
250         self.assertEqual(json.loads(port.host.filesystem.files['/mock-checkout/output.json']), {
251             "timestamp": 123456789, "results":
252             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
253             "inspector/pass.html:group_name:test_name": 42},
254             "webkit-revision": 5678})
255
256     def test_run_test_set_with_json_source(self):
257         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json', '--source-json-path=/mock-checkout/source.json'])
258         port.host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
259         port.host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
260         port.host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
261         runner._timestamp = 123456789
262         output_capture = OutputCapture()
263         output_capture.capture_output()
264         try:
265             self.assertEqual(runner.run(), 0)
266         finally:
267             stdout, stderr, logs = output_capture.restore_output()
268
269         self.assertEqual(logs, '\n'.join(['Running 2 tests',
270             'Running Bindings/event-target-wrapper.html (1 of 2)',
271             'RESULT Bindings: event-target-wrapper= 1489.05 ms',
272             'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
273             '',
274             'Running inspector/pass.html (2 of 2)',
275             'RESULT group_name: test_name= 42 ms',
276             '', '']))
277
278         self.assertEqual(json.loads(port.host.filesystem.files['/mock-checkout/output.json']), {
279             "timestamp": 123456789, "results":
280             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
281             "inspector/pass.html:group_name:test_name": 42},
282             "webkit-revision": 5678,
283             "key": "value"})
284
285     def test_run_test_set_with_multiple_repositories(self):
286         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json'])
287         port.host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
288         runner._timestamp = 123456789
289         port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
290         self.assertEqual(runner.run(), 0)
291         self.assertEqual(json.loads(port.host.filesystem.files['/mock-checkout/output.json']), {
292             "timestamp": 123456789, "results": {"inspector/pass.html:group_name:test_name": 42.0}, "webkit-revision": 5678, "some-revision": 5678})
293
294     def test_run_with_upload_json(self):
295         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
296             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
297         upload_json_is_called = [False]
298         upload_json_returns_true = True
299
300         def mock_upload_json(hostname, json_path):
301             self.assertEqual(hostname, 'some.host')
302             self.assertEqual(json_path, '/mock-checkout/output.json')
303             upload_json_is_called[0] = True
304             return upload_json_returns_true
305
306         runner._upload_json = mock_upload_json
307         port.host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
308         port.host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
309         port.host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
310         runner._timestamp = 123456789
311         self.assertEqual(runner.run(), 0)
312         self.assertEqual(upload_json_is_called[0], True)
313         generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
314         self.assertEqual(generated_json['platform'], 'platform1')
315         self.assertEqual(generated_json['builder-name'], 'builder1')
316         self.assertEqual(generated_json['build-number'], 123)
317         upload_json_returns_true = False
318
319         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
320             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
321         runner._upload_json = mock_upload_json
322         self.assertEqual(runner.run(), -3)
323
324     def test_upload_json(self):
325         runner, port = self.create_runner()
326         port.host.filesystem.files['/mock-checkout/some.json'] = 'some content'
327
328         called = []
329         upload_single_text_file_throws = False
330         upload_single_text_file_return_value = StringIO.StringIO('OK')
331
332         class MockFileUploader:
333             def __init__(mock, url, timeout):
334                 self.assertEqual(url, 'https://some.host/api/test/report')
335                 self.assertTrue(isinstance(timeout, int) and timeout)
336                 called.append('FileUploader')
337
338             def upload_single_text_file(mock, filesystem, content_type, filename):
339                 self.assertEqual(filesystem, port.host.filesystem)
340                 self.assertEqual(content_type, 'application/json')
341                 self.assertEqual(filename, 'some.json')
342                 called.append('upload_single_text_file')
343                 if upload_single_text_file_throws:
344                     raise "Some exception"
345                 return upload_single_text_file_return_value
346
347         runner._upload_json('some.host', 'some.json', MockFileUploader)
348         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
349
350         output = OutputCapture()
351         output.capture_output()
352         upload_single_text_file_return_value = StringIO.StringIO('Some error')
353         runner._upload_json('some.host', 'some.json', MockFileUploader)
354         _, _, logs = output.restore_output()
355         self.assertEqual(logs, 'Uploaded JSON but got a bad response:\nSome error\n')
356
357         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
358         called = []
359         upload_single_text_file_throws = True
360         runner._upload_json('some.host', 'some.json', MockFileUploader)
361         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
362
363     def _add_file(self, runner, dirname, filename, content=True):
364         dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
365         runner._host.filesystem.maybe_make_directory(dirname)
366         runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
367
368     def test_collect_tests(self):
369         runner, port = self.create_runner()
370         self._add_file(runner, 'inspector', 'a_file.html', 'a content')
371         tests = runner._collect_tests()
372         self.assertEqual(len(tests), 1)
373
374     def _collect_tests_and_sort_test_name(self, runner):
375         return sorted([test.test_name() for test in runner._collect_tests()])
376
377     def test_collect_tests_with_multile_files(self):
378         runner, port = self.create_runner(args=['PerformanceTests/test1.html', 'test2.html'])
379
380         def add_file(filename):
381             port.host.filesystem.files[runner._host.filesystem.join(runner._base_path, filename)] = 'some content'
382
383         add_file('test1.html')
384         add_file('test2.html')
385         add_file('test3.html')
386         port.host.filesystem.chdir(runner._port.perf_tests_dir()[:runner._port.perf_tests_dir().rfind(runner._host.filesystem.sep)])
387         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['test1.html', 'test2.html'])
388
389     def test_collect_tests_with_skipped_list(self):
390         runner, port = self.create_runner()
391
392         self._add_file(runner, 'inspector', 'test1.html')
393         self._add_file(runner, 'inspector', 'unsupported_test1.html')
394         self._add_file(runner, 'inspector', 'test2.html')
395         self._add_file(runner, 'inspector/resources', 'resource_file.html')
396         self._add_file(runner, 'unsupported', 'unsupported_test2.html')
397         port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
398         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html'])
399
400     def test_collect_tests_with_skipped_list(self):
401         runner, port = self.create_runner(args=['--force'])
402
403         self._add_file(runner, 'inspector', 'test1.html')
404         self._add_file(runner, 'inspector', 'unsupported_test1.html')
405         self._add_file(runner, 'inspector', 'test2.html')
406         self._add_file(runner, 'inspector/resources', 'resource_file.html')
407         self._add_file(runner, 'unsupported', 'unsupported_test2.html')
408         port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
409         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html', 'inspector/unsupported_test1.html', 'unsupported/unsupported_test2.html'])
410
411     def test_collect_tests_with_page_load_svg(self):
412         runner, port = self.create_runner()
413         self._add_file(runner, 'PageLoad', 'some-svg-test.svg')
414         tests = runner._collect_tests()
415         self.assertEqual(len(tests), 1)
416         self.assertEqual(tests[0].__class__.__name__, 'PageLoadingPerfTest')
417
418     def test_collect_tests_should_ignore_replay_tests_by_default(self):
419         runner, port = self.create_runner()
420         self._add_file(runner, 'Replay', 'www.webkit.org.replay')
421         self.assertEqual(runner._collect_tests(), [])
422
423     def test_collect_tests_with_replay_tests(self):
424         runner, port = self.create_runner(args=['--replay'])
425         self._add_file(runner, 'Replay', 'www.webkit.org.replay')
426         tests = runner._collect_tests()
427         self.assertEqual(len(tests), 1)
428         self.assertEqual(tests[0].__class__.__name__, 'ReplayPerfTest')
429
430     def test_parse_args(self):
431         runner, port = self.create_runner()
432         options, args = PerfTestsRunner._parse_args([
433                 '--build-directory=folder42',
434                 '--platform=platform42',
435                 '--builder-name', 'webkit-mac-1',
436                 '--build-number=56',
437                 '--time-out-ms=42',
438                 '--output-json-path=a/output.json',
439                 '--source-json-path=a/source.json',
440                 '--test-results-server=somehost',
441                 '--debug'])
442         self.assertEqual(options.build, True)
443         self.assertEqual(options.build_directory, 'folder42')
444         self.assertEqual(options.platform, 'platform42')
445         self.assertEqual(options.builder_name, 'webkit-mac-1')
446         self.assertEqual(options.build_number, '56')
447         self.assertEqual(options.time_out_ms, '42')
448         self.assertEqual(options.configuration, 'Debug')
449         self.assertEqual(options.output_json_path, 'a/output.json')
450         self.assertEqual(options.source_json_path, 'a/source.json')
451         self.assertEqual(options.test_results_server, 'somehost')
452
453
454 if __name__ == '__main__':
455     unittest.main()