clean up NRWT logging/metering, be less verbose
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import StringIO
33 import json
34 import unittest
35
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.common.system.outputcapture import OutputCapture
39 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
40 from webkitpy.layout_tests.port.test import TestPort
41 from webkitpy.layout_tests.views import printing
42 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
43
44
45 class MainTest(unittest.TestCase):
46     def assertWritten(self, stream, contents):
47         self.assertEquals(stream.buflist, contents)
48
49     class TestDriver:
50         def run_test(self, driver_input):
51             text = ''
52             timeout = False
53             crash = False
54             if driver_input.test_name.endswith('pass.html'):
55                 text = 'RESULT group_name: test_name= 42 ms'
56             elif driver_input.test_name.endswith('timeout.html'):
57                 timeout = True
58             elif driver_input.test_name.endswith('failed.html'):
59                 text = None
60             elif driver_input.test_name.endswith('tonguey.html'):
61                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
62             elif driver_input.test_name.endswith('crash.html'):
63                 crash = True
64             elif driver_input.test_name.endswith('event-target-wrapper.html'):
65                 text = """Running 20 times
66 Ignoring warm-up run (1502)
67 1504
68 1505
69 1510
70 1504
71 1507
72 1509
73 1510
74 1487
75 1488
76 1472
77 1472
78 1488
79 1473
80 1472
81 1475
82 1487
83 1486
84 1486
85 1475
86 1471
87
88 avg 1489.05
89 median 1487
90 stdev 14.46
91 min 1471
92 max 1510
93 """
94             elif driver_input.test_name.endswith('some-parser.html'):
95                 text = """Running 20 times
96 Ignoring warm-up run (1115)
97
98 avg 1100
99 median 1101
100 stdev 11
101 min 1080
102 max 1120
103 """
104             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
105
106         def start(self):
107             """do nothing"""
108
109         def stop(self):
110             """do nothing"""
111
112     def create_runner(self, buildbot_output=None, args=[], regular_output=None, driver_class=TestDriver):
113         buildbot_output = buildbot_output or StringIO.StringIO()
114         regular_output = regular_output or StringIO.StringIO()
115
116         options, parsed_args = PerfTestsRunner._parse_args(args)
117         test_port = TestPort(host=MockHost(), options=options)
118         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
119
120         runner = PerfTestsRunner(regular_output, buildbot_output, args=args, port=test_port)
121         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
122         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
123         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
124         return runner
125
126     def run_test(self, test_name):
127         runner = self.create_runner()
128         driver = MainTest.TestDriver()
129         return runner._run_single_test(test_name, driver, is_chromium_style=True)
130
131     def test_run_passing_test(self):
132         self.assertTrue(self.run_test('pass.html'))
133
134     def test_run_silent_test(self):
135         self.assertFalse(self.run_test('silent.html'))
136
137     def test_run_failed_test(self):
138         self.assertFalse(self.run_test('failed.html'))
139
140     def test_run_tonguey_test(self):
141         self.assertFalse(self.run_test('tonguey.html'))
142
143     def test_run_timeout_test(self):
144         self.assertFalse(self.run_test('timeout.html'))
145
146     def test_run_crash_test(self):
147         self.assertFalse(self.run_test('crash.html'))
148
149     def test_run_test_set(self):
150         buildbot_output = StringIO.StringIO()
151         runner = self.create_runner(buildbot_output)
152         dirname = runner._base_path + '/inspector/'
153         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
154             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
155         unexpected_result_count = runner._run_tests_set(tests, runner._port)
156         self.assertEqual(unexpected_result_count, len(tests) - 1)
157         self.assertWritten(buildbot_output, ['RESULT group_name: test_name= 42 ms\n'])
158
159     def test_run_test_set_kills_drt_per_run(self):
160
161         class TestDriverWithStopCount(MainTest.TestDriver):
162             stop_count = 0
163
164             def stop(self):
165                 TestDriverWithStopCount.stop_count += 1
166
167         buildbot_output = StringIO.StringIO()
168         runner = self.create_runner(buildbot_output, driver_class=TestDriverWithStopCount)
169
170         dirname = runner._base_path + '/inspector/'
171         tests = [dirname + 'pass.html', dirname + 'silent.html', dirname + 'failed.html',
172             dirname + 'tonguey.html', dirname + 'timeout.html', dirname + 'crash.html']
173
174         unexpected_result_count = runner._run_tests_set(tests, runner._port)
175         self.assertEqual(TestDriverWithStopCount.stop_count, 6)
176
177     def test_run_test_pause_before_testing(self):
178         class TestDriverWithStartCount(MainTest.TestDriver):
179             start_count = 0
180
181             def start(self):
182                 TestDriverWithStartCount.start_count += 1
183
184         buildbot_output = StringIO.StringIO()
185         regular_output = StringIO.StringIO()
186         runner = self.create_runner(buildbot_output, args=["--pause-before-testing"], regular_output=regular_output, driver_class=TestDriverWithStartCount)
187
188         dirname = runner._base_path + '/inspector/'
189         tests = [dirname + 'pass.html']
190
191         try:
192             output = OutputCapture()
193             output.capture_output()
194             unexpected_result_count = runner._run_tests_set(tests, runner._port)
195             self.assertEqual(TestDriverWithStartCount.start_count, 1)
196         finally:
197             _, stderr, _ = output.restore_output()
198             self.assertEqual(stderr, "Ready to run test?\n")
199             self.assertTrue("Running inspector/pass.html (1 of 1)" in regular_output.getvalue())
200
201     def test_run_test_set_for_parser_tests(self):
202         buildbot_output = StringIO.StringIO()
203         runner = self.create_runner(buildbot_output)
204         tests = [runner._base_path + '/Bindings/event-target-wrapper.html', runner._base_path + '/Parser/some-parser.html']
205         unexpected_result_count = runner._run_tests_set(tests, runner._port)
206         self.assertEqual(unexpected_result_count, 0)
207         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
208                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
209                                              'RESULT Parser: some-parser= 1100.0 ms\n',
210                                              'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms\n'])
211
212     def test_run_test_set_with_json_output(self):
213         buildbot_output = StringIO.StringIO()
214         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
215         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
216         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
217         runner._timestamp = 123456789
218         self.assertEqual(runner.run(), 0)
219         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
220                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
221                                              'RESULT group_name: test_name= 42 ms\n'])
222
223         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
224             "timestamp": 123456789, "results":
225             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46},
226             "group_name:test_name": 42},
227             "webkit-revision": 5678})
228
229     def test_run_test_set_with_json_source(self):
230         buildbot_output = StringIO.StringIO()
231         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json',
232             '--source-json-path=/mock-checkout/source.json'])
233         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
234         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
235         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
236         runner._timestamp = 123456789
237         self.assertEqual(runner.run(), 0)
238         self.assertWritten(buildbot_output, ['RESULT Bindings: event-target-wrapper= 1489.05 ms\n',
239                                              'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms\n',
240                                              'RESULT group_name: test_name= 42 ms\n'])
241
242         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
243             "timestamp": 123456789, "results":
244             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46},
245             "group_name:test_name": 42},
246             "webkit-revision": 5678,
247             "key": "value"})
248
249     def test_run_test_set_with_multiple_repositories(self):
250         buildbot_output = StringIO.StringIO()
251         runner = self.create_runner(buildbot_output, args=['--output-json-path=/mock-checkout/output.json'])
252         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
253         runner._timestamp = 123456789
254         runner._port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
255         self.assertEqual(runner.run(), 0)
256
257         self.assertEqual(json.loads(runner._host.filesystem.files['/mock-checkout/output.json']), {
258             "timestamp": 123456789, "results": {"group_name:test_name": 42.0}, "webkit-revision": 5678, "some-revision": 5678})
259
260     def test_run_with_upload_json(self):
261         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
262             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
263         upload_json_is_called = [False]
264         upload_json_returns_true = True
265
266         def mock_upload_json(hostname, json_path):
267             self.assertEqual(hostname, 'some.host')
268             self.assertEqual(json_path, '/mock-checkout/output.json')
269             upload_json_is_called[0] = True
270             return upload_json_returns_true
271
272         runner._upload_json = mock_upload_json
273         runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
274         runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
275         runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
276         runner._timestamp = 123456789
277         self.assertEqual(runner.run(), 0)
278         self.assertEqual(upload_json_is_called[0], True)
279         generated_json = json.loads(runner._host.filesystem.files['/mock-checkout/output.json'])
280         self.assertEqual(generated_json['platform'], 'platform1')
281         self.assertEqual(generated_json['builder-name'], 'builder1')
282         self.assertEqual(generated_json['build-number'], 123)
283         upload_json_returns_true = False
284
285         runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
286             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
287         runner._upload_json = mock_upload_json
288         self.assertEqual(runner.run(), -3)
289
290     def test_upload_json(self):
291         regular_output = StringIO.StringIO()
292         runner = self.create_runner(regular_output=regular_output)
293         runner._host.filesystem.files['/mock-checkout/some.json'] = 'some content'
294
295         called = []
296         upload_single_text_file_throws = False
297         upload_single_text_file_return_value = StringIO.StringIO('OK')
298
299         class MockFileUploader:
300             def __init__(mock, url, timeout):
301                 self.assertEqual(url, 'https://some.host/api/test/report')
302                 self.assertTrue(isinstance(timeout, int) and timeout)
303                 called.append('FileUploader')
304
305             def upload_single_text_file(mock, filesystem, content_type, filename):
306                 self.assertEqual(filesystem, runner._host.filesystem)
307                 self.assertEqual(content_type, 'application/json')
308                 self.assertEqual(filename, 'some.json')
309                 called.append('upload_single_text_file')
310                 if upload_single_text_file_throws:
311                     raise "Some exception"
312                 return upload_single_text_file_return_value
313
314         runner._upload_json('some.host', 'some.json', MockFileUploader)
315         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
316
317         output = OutputCapture()
318         output.capture_output()
319         upload_single_text_file_return_value = StringIO.StringIO('Some error')
320         runner._upload_json('some.host', 'some.json', MockFileUploader)
321         _, _, logs = output.restore_output()
322         self.assertEqual(logs, 'Uploaded JSON but got a bad response:\nSome error\n')
323
324         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
325         called = []
326         upload_single_text_file_throws = True
327         runner._upload_json('some.host', 'some.json', MockFileUploader)
328         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
329
330     def test_collect_tests(self):
331         runner = self.create_runner()
332         filename = runner._host.filesystem.join(runner._base_path, 'inspector', 'a_file.html')
333         runner._host.filesystem.files[filename] = 'a content'
334         tests = runner._collect_tests()
335         self.assertEqual(len(tests), 1)
336
337     def test_collect_tests(self):
338         runner = self.create_runner(args=['PerformanceTests/test1.html', 'test2.html'])
339
340         def add_file(filename):
341             runner._host.filesystem.files[runner._host.filesystem.join(runner._base_path, filename)] = 'some content'
342
343         add_file('test1.html')
344         add_file('test2.html')
345         add_file('test3.html')
346         runner._host.filesystem.chdir(runner._port.perf_tests_dir()[:runner._port.perf_tests_dir().rfind(runner._host.filesystem.sep)])
347         tests = [runner._port.relative_perf_test_filename(test) for test in runner._collect_tests()]
348         self.assertEqual(sorted(tests), ['test1.html', 'test2.html'])
349
350     def test_collect_tests_with_skipped_list(self):
351         runner = self.create_runner()
352
353         def add_file(dirname, filename, content=True):
354             dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
355             runner._host.filesystem.maybe_make_directory(dirname)
356             runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
357
358         add_file('inspector', 'test1.html')
359         add_file('inspector', 'unsupported_test1.html')
360         add_file('inspector', 'test2.html')
361         add_file('inspector/resources', 'resource_file.html')
362         add_file('unsupported', 'unsupported_test2.html')
363         runner._port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
364         tests = [runner._port.relative_perf_test_filename(test) for test in runner._collect_tests()]
365         self.assertEqual(sorted(tests), ['inspector/test1.html', 'inspector/test2.html'])
366
367     def test_parse_args(self):
368         runner = self.create_runner()
369         options, args = PerfTestsRunner._parse_args([
370                 '--verbose',
371                 '--build-directory=folder42',
372                 '--platform=platform42',
373                 '--builder-name', 'webkit-mac-1',
374                 '--build-number=56',
375                 '--time-out-ms=42',
376                 '--output-json-path=a/output.json',
377                 '--source-json-path=a/source.json',
378                 '--test-results-server=somehost',
379                 '--debug', 'an_arg'])
380         self.assertEqual(options.build, True)
381         self.assertEqual(options.verbose, True)
382         self.assertEqual(options.help_printing, None)
383         self.assertEqual(options.build_directory, 'folder42')
384         self.assertEqual(options.platform, 'platform42')
385         self.assertEqual(options.builder_name, 'webkit-mac-1')
386         self.assertEqual(options.build_number, '56')
387         self.assertEqual(options.time_out_ms, '42')
388         self.assertEqual(options.configuration, 'Debug')
389         self.assertEqual(options.print_options, None)
390         self.assertEqual(options.output_json_path, 'a/output.json')
391         self.assertEqual(options.source_json_path, 'a/source.json')
392         self.assertEqual(options.test_results_server, 'somehost')
393
394
395 if __name__ == '__main__':
396     unittest.main()