[PerformanceTests] tests have dependencies
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import StringIO
33 import json
34 import unittest
35
36 from webkitpy.common import array_stream
37 from webkitpy.common.host_mock import MockHost
38 from webkitpy.common.system.filesystem_mock import MockFileSystem
39 from webkitpy.common.system.outputcapture import OutputCapture
40 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
41 from webkitpy.layout_tests.port.test import TestPort
42 from webkitpy.layout_tests.views import printing
43 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
44
45
46 class MainTest(unittest.TestCase):
47     class TestDriver:
48         def run_test(self, driver_input):
49             text = ''
50             timeout = False
51             crash = False
52             if driver_input.test_name.endswith('init.html'):
53                 text = 'PASS\n'
54             elif driver_input.test_name.endswith('pass.html'):
55                 text = 'RESULT group_name: test_name= 42 ms'
56             elif driver_input.test_name.endswith('timeout.html'):
57                 timeout = True
58             elif driver_input.test_name.endswith('failed.html'):
59                 text = None
60             elif driver_input.test_name.endswith('tonguey.html'):
61                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
62             elif driver_input.test_name.endswith('crash.html'):
63                 crash = True
64             elif driver_input.test_name.endswith('event-target-wrapper.html'):
65                 text = """Running 20 times
66 Ignoring warm-up run (1502)
67 1504
68 1505
69 1510
70 1504
71 1507
72 1509
73 1510
74 1487
75 1488
76 1472
77 1472
78 1488
79 1473
80 1472
81 1475
82 1487
83 1486
84 1486
85 1475
86 1471
87
88 avg 1489.05
89 median 1487
90 stdev 14.46
91 min 1471
92 max 1510
93 """
94             elif driver_input.test_name.endswith('some-parser.html'):
95                 text = """Running 20 times
96 Ignoring warm-up run (1115)
97
98 avg 1100
99 median 1101
100 stdev 11
101 min 1080
102 max 1120
103 """
104             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
105
106         def stop(self):
107             """do nothing"""
108
109     def create_runner(self, buildbot_output=None, args=[], regular_output=None, driver_class=TestDriver):
110         buildbot_output = buildbot_output or array_stream.ArrayStream()
111         regular_output = regular_output or array_stream.ArrayStream()
112
113         options, parsed_args = PerfTestsRunner._parse_args(args)
114         test_port = TestPort(host=MockHost(), options=options)
115         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
116
117         runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port)
118         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
119         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
120         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
121         return runner
122
123     def run_test(self, test_name):
124         runner = self.create_runner()
125         driver = MainTest.TestDriver()
126         return runner._run_single_test(test_name, driver, is_chromium_style=True)
127
128     def test_initial_page_loaded(self):
129         runner = self.create_runner()
130         driver = MainTest.TestDriver()
131         inputs = []
132
133         def run_test(input):
134             inputs.append(input)
135             if input.test_name.endswith('init.html'):
136                 return DriverOutput('PASS\n', 'image output', 'some hash', None)
137             else:
138                 return DriverOutput('RESULT group_name: test_name= 42 ms\n', 'image output', 'some hash', None)
139
140         driver.run_test = run_test
141         self.assertTrue(runner._run_single_test('pass.html', driver, is_chromium_style=True))
142         self.assertEqual(len(inputs), 2)
143         self.assertEqual(inputs[0].test_name, runner._base_path + '/resources/init.html')
144         self.assertEqual(inputs[1].test_name, 'pass.html')
145
146     def test_run_passing_test(self):
147         self.assertTrue(self.run_test('pass.html'))
148
149     def test_run_silent_test(self):
150         self.assertFalse(self.run_test('silent.html'))
151
152     def test_run_failed_test(self):
153         self.assertFalse(self.run_test('failed.html'))
154
155     def test_run_tonguey_test(self):
156         self.assertFalse(self.run_test('tonguey.html'))
157
158     def test_run_timeout_test(self):
159         self.assertFalse(self.run_test('timeout.html'))
160
161     def test_run_crash_test(self):
162         self.assertFalse(self.run_test('crash.html'))
163
164     def test_run_test_set(self):
165         buildbot_output = array_stream.ArrayStream()
166         runner = self.create_runner(buildbot_output)
167         dirname = runner._base_path + '/inspector/'
168         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
169             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
170         unexpected_result_count = runner._run_tests_set(tests, runner._port)
171         self.assertEqual(unexpected_result_count, len(tests) - 1)
172         self.assertEqual(len(buildbot_output.get()), 1)
173         self.assertEqual(buildbot_output.get()[0], 'RESULT group_name: test_name= 42 ms\n')
174
175     def test_run_test_set_kills_drt_per_run(self):
176
177         class TestDriverWithStopCount(MainTest.TestDriver):
178             stop_count = 0
179
180             def __init__(self):
181                 TestDriverWithStopCount.sotp_count = 0
182
183             def stop(self):
184                 TestDriverWithStopCount.stop_count += 1
185
186         buildbot_output = array_stream.ArrayStream()
187         runner = self.create_runner(buildbot_output, driver_class=TestDriverWithStopCount)
188
189         dirname = runner._base_path + '/inspector/'
190         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
191             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
192
193         unexpected_result_count = runner._run_tests_set(tests, runner._port)
194         self.assertEqual(TestDriverWithStopCount.stop_count, 6)
195
196     def test_run_test_set_for_parser_tests(self):
197         buildbot_output = array_stream.ArrayStream()
198         runner = self.create_runner(buildbot_output)
199         tests = [runner._base_path + '/Bindings/event-target-wrapper.html', runner._base_path + '/Parser/some-parser.html']
200         unexpected_result_count = runner._run_tests_set(tests, runner._port)
201         self.assertEqual(unexpected_result_count, 0)
202         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
203         self.assertEqual(buildbot_output.get()[1], 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n')
204         self.assertEqual(buildbot_output.get()[2], 'RESULT Parser: some-parser= 1100.0 ms\n')
205         self.assertEqual(buildbot_output.get()[3], 'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms\n')
206
207     def test_run_test_set_with_json_output(self):
208         buildbot_output = array_stream.ArrayStream()
209         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
210         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
211         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
212         runner._timestamp = 123456789
213         self.assertEqual(runner.run(), 0)
214         self.assertEqual(len(buildbot_output.get()), 3)
215         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
216         self.assertEqual(buildbot_output.get()[1], 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n')
217         self.assertEqual(buildbot_output.get()[2], 'RESULT group_name: test_name= 42 ms\n')
218
219         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
220             "timestamp": 123456789, "results":
221             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46},
222             "group_name:test_name": 42},
223             "revision": 1234})
224
225     def test_run_test_set_with_json_source(self):
226         buildbot_output = array_stream.ArrayStream()
227         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json',
228             '--source-json-path=/mock-checkout/source.json'])
229         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
230         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
231         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
232         runner._timestamp = 123456789
233         self.assertEqual(runner.run(), 0)
234         self.assertEqual(len(buildbot_output.get()), 3)
235         self.assertEqual(buildbot_output.get()[0], 'RESULT Bindings: event-target-wrapper= 1489.05 ms\n')
236         self.assertEqual(buildbot_output.get()[1], 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n')
237         self.assertEqual(buildbot_output.get()[2], 'RESULT group_name: test_name= 42 ms\n')
238
239         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
240             "timestamp": 123456789, "results":
241             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46},
242             "group_name:test_name": 42},
243             "revision": 1234,
244             "key": "value"})
245
246     def test_run_with_upload_json(self):
247         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
248             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
249         upload_json_is_called = [False]
250         upload_json_returns_true = True
251
252         def mock_upload_json(hostname, json_path):
253             self.assertEqual(hostname, 'some.host')
254             self.assertEqual(json_path, '/mock-checkout/output.json')
255             upload_json_is_called[0] = True
256             return upload_json_returns_true
257
258         runner._upload_json = mock_upload_json
259         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
260         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
261         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
262         runner._timestamp = 123456789
263         self.assertEqual(runner.run(), 0)
264         self.assertEqual(upload_json_is_called[0], True)
265         generated_json = json.loads(runner._host.filesystem.files['/mock-checkout/output.json'])
266         self.assertEqual(generated_json['platform'], 'platform1')
267         self.assertEqual(generated_json['builder-name'], 'builder1')
268         self.assertEqual(generated_json['build-number'], 123)
269         upload_json_returns_true = False
270         self.assertEqual(runner.run(), -3)
271
272     def test_upload_json(self):
273         regular_output = array_stream.ArrayStream()
274         runner = self.create_runner(regular_output=regular_output)
275         runner._host.filesystem.files['/mock-checkout/some.json'] = 'some content'
276
277         called = []
278         upload_single_text_file_throws = False
279         upload_single_text_file_return_value = StringIO.StringIO('OK')
280
281         class MockFileUploader:
282             def __init__(mock, url, timeout):
283                 self.assertEqual(url, 'https://some.host/api/test/report')
284                 self.assertTrue(isinstance(timeout, int) and timeout)
285                 called.append('FileUploader')
286
287             def upload_single_text_file(mock, filesystem, content_type, filename):
288                 self.assertEqual(filesystem, runner._host.filesystem)
289                 self.assertEqual(content_type, 'application/json')
290                 self.assertEqual(filename, 'some.json')
291                 called.append('upload_single_text_file')
292                 if upload_single_text_file_throws:
293                     raise "Some exception"
294                 return upload_single_text_file_return_value
295
296         runner._upload_json('some.host', 'some.json', MockFileUploader)
297         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
298
299         output = OutputCapture()
300         output.capture_output()
301         upload_single_text_file_return_value = StringIO.StringIO('Some error')
302         runner._upload_json('some.host', 'some.json', MockFileUploader)
303         _, _, logs = output.restore_output()
304         self.assertEqual(logs, 'Uploaded JSON but got a bad response:\nSome error\n')
305
306         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
307         called = []
308         upload_single_text_file_throws = True
309         runner._upload_json('some.host', 'some.json', MockFileUploader)
310         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
311
312     def test_collect_tests(self):
313         runner = self.create_runner()
314         filename = runner._host.filesystem.join(runner._base_path, 'inspector', 'a_file.html')
315         runner._host.filesystem.files[filename] = 'a content'
316         tests = runner._collect_tests()
317         self.assertEqual(len(tests), 1)
318
319     def test_collect_tests_with_skipped_list(self):
320         runner = self.create_runner()
321
322         def add_file(dirname, filename, content=True):
323             dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
324             runner._host.filesystem.maybe_make_directory(dirname)
325             runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
326
327         add_file('inspector', 'test1.html')
328         add_file('inspector', 'unsupported_test1.html')
329         add_file('inspector', 'test2.html')
330         add_file('inspector/resources', 'resource_file.html')
331         add_file('unsupported', 'unsupported_test2.html')
332         runner._port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
333         tests = [runner._port.relative_perf_test_filename(test) for test in runner._collect_tests()]
334         self.assertEqual(sorted(tests), ['inspector/test1.html', 'inspector/test2.html'])
335
336     def test_parse_args(self):
337         runner = self.create_runner()
338         options, args = PerfTestsRunner._parse_args([
339                 '--verbose',
340                 '--build-directory=folder42',
341                 '--platform=platform42',
342                 '--builder-name', 'webkit-mac-1',
343                 '--build-number=56',
344                 '--time-out-ms=42',
345                 '--output-json-path=a/output.json',
346                 '--source-json-path=a/source.json',
347                 '--test-results-server=somehost',
348                 '--debug', 'an_arg'])
349         self.assertEqual(options.build, True)
350         self.assertEqual(options.verbose, True)
351         self.assertEqual(options.help_printing, None)
352         self.assertEqual(options.build_directory, 'folder42')
353         self.assertEqual(options.platform, 'platform42')
354         self.assertEqual(options.builder_name, 'webkit-mac-1')
355         self.assertEqual(options.build_number, '56')
356         self.assertEqual(options.time_out_ms, '42')
357         self.assertEqual(options.configuration, 'Debug')
358         self.assertEqual(options.print_options, None)
359         self.assertEqual(options.output_json_path, 'a/output.json')
360         self.assertEqual(options.source_json_path, 'a/source.json')
361         self.assertEqual(options.test_results_server, 'somehost')
362
363
364 if __name__ == '__main__':
365     unittest.main()