6653857d816aaba27c570e43abb455974d00979a
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import StringIO
33 import json
34 import unittest
35
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.common.system.outputcapture import OutputCapture
39 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
40 from webkitpy.layout_tests.port.test import TestPort
41 from webkitpy.layout_tests.views import printing
42 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
43 from webkitpy.performance_tests.perftest import PerfTest
44 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
45
46
47 class MainTest(unittest.TestCase):
48     def assertWritten(self, stream, contents):
49         self.assertEquals(stream.buflist, contents)
50
51     class TestDriver:
52         def run_test(self, driver_input, stop_when_done):
53             text = ''
54             timeout = False
55             crash = False
56             if driver_input.test_name.endswith('pass.html'):
57                 text = 'RESULT group_name: test_name= 42 ms'
58             elif driver_input.test_name.endswith('timeout.html'):
59                 timeout = True
60             elif driver_input.test_name.endswith('failed.html'):
61                 text = None
62             elif driver_input.test_name.endswith('tonguey.html'):
63                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
64             elif driver_input.test_name.endswith('crash.html'):
65                 crash = True
66             elif driver_input.test_name.endswith('event-target-wrapper.html'):
67                 text = """Running 20 times
68 Ignoring warm-up run (1502)
69 1504
70 1505
71 1510
72 1504
73 1507
74 1509
75 1510
76 1487
77 1488
78 1472
79 1472
80 1488
81 1473
82 1472
83 1475
84 1487
85 1486
86 1486
87 1475
88 1471
89
90 Time:
91 avg 1489.05 ms
92 median 1487 ms
93 stdev 14.46 ms
94 min 1471 ms
95 max 1510 ms
96 """
97             elif driver_input.test_name.endswith('some-parser.html'):
98                 text = """Running 20 times
99 Ignoring warm-up run (1115)
100
101 Time:
102 avg 1100 ms
103 median 1101 ms
104 stdev 11 ms
105 min 1080 ms
106 max 1120 ms
107 """
108             elif driver_input.test_name.endswith('memory-test.html'):
109                 text = """Running 20 times
110 Ignoring warm-up run (1115)
111
112 Time:
113 avg 1100 ms
114 median 1101 ms
115 stdev 11 ms
116 min 1080 ms
117 max 1120 ms
118
119 JS Heap:
120 avg 832000 bytes
121 median 829000 bytes
122 stdev 15000 bytes
123 min 811000 bytes
124 max 848000 bytes
125
126 Malloc:
127 avg 532000 bytes
128 median 529000 bytes
129 stdev 13000 bytes
130 min 511000 bytes
131 max 548000 bytes
132 """
133             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
134
135         def start(self):
136             """do nothing"""
137
138         def stop(self):
139             """do nothing"""
140
141     def create_runner(self, args=[], driver_class=TestDriver):
142         options, parsed_args = PerfTestsRunner._parse_args(args)
143         test_port = TestPort(host=MockHost(), options=options)
144         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
145
146         runner = PerfTestsRunner(args=args, port=test_port)
147         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
148         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
149         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
150
151         filesystem = runner._host.filesystem
152         runner.load_output_json = lambda: json.loads(filesystem.read_text_file(runner._output_json_path()))
153         return runner, test_port
154
155     def run_test(self, test_name):
156         runner, port = self.create_runner()
157         driver = MainTest.TestDriver()
158         return runner._run_single_test(ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name)), driver)
159
160     def test_run_passing_test(self):
161         self.assertTrue(self.run_test('pass.html'))
162
163     def test_run_silent_test(self):
164         self.assertFalse(self.run_test('silent.html'))
165
166     def test_run_failed_test(self):
167         self.assertFalse(self.run_test('failed.html'))
168
169     def test_run_tonguey_test(self):
170         self.assertFalse(self.run_test('tonguey.html'))
171
172     def test_run_timeout_test(self):
173         self.assertFalse(self.run_test('timeout.html'))
174
175     def test_run_crash_test(self):
176         self.assertFalse(self.run_test('crash.html'))
177
178     def _tests_for_runner(self, runner, test_names):
179         filesystem = runner._host.filesystem
180         tests = []
181         for test in test_names:
182             path = filesystem.join(runner._base_path, test)
183             dirname = filesystem.dirname(path)
184             if test.startswith('inspector/'):
185                 tests.append(ChromiumStylePerfTest(runner._port, test, path))
186             else:
187                 tests.append(PerfTest(runner._port, test, path))
188         return tests
189
190     def test_run_test_set(self):
191         runner, port = self.create_runner()
192         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
193             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
194         output = OutputCapture()
195         output.capture_output()
196         try:
197             unexpected_result_count = runner._run_tests_set(tests, port)
198         finally:
199             stdout, stderr, log = output.restore_output()
200         self.assertEqual(unexpected_result_count, len(tests) - 1)
201         self.assertTrue('\nRESULT group_name: test_name= 42 ms\n' in log)
202
203     def test_run_test_set_kills_drt_per_run(self):
204
205         class TestDriverWithStopCount(MainTest.TestDriver):
206             stop_count = 0
207
208             def stop(self):
209                 TestDriverWithStopCount.stop_count += 1
210
211         runner, port = self.create_runner(driver_class=TestDriverWithStopCount)
212
213         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
214             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
215         unexpected_result_count = runner._run_tests_set(tests, port)
216
217         self.assertEqual(TestDriverWithStopCount.stop_count, 6)
218
219     def test_run_test_pause_before_testing(self):
220         class TestDriverWithStartCount(MainTest.TestDriver):
221             start_count = 0
222
223             def start(self):
224                 TestDriverWithStartCount.start_count += 1
225
226         runner, port = self.create_runner(args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
227         tests = self._tests_for_runner(runner, ['inspector/pass.html'])
228
229         output = OutputCapture()
230         output.capture_output()
231         try:
232             unexpected_result_count = runner._run_tests_set(tests, port)
233             self.assertEqual(TestDriverWithStartCount.start_count, 1)
234         finally:
235             stdout, stderr, log = output.restore_output()
236         self.assertEqual(stderr, "Ready to run test?\n")
237         self.assertEqual(log, "Running inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\n\n")
238
239     def test_run_test_set_for_parser_tests(self):
240         runner, port = self.create_runner()
241         tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.html', 'Parser/some-parser.html'])
242         output = OutputCapture()
243         output.capture_output()
244         try:
245             unexpected_result_count = runner._run_tests_set(tests, port)
246         finally:
247             stdout, stderr, log = output.restore_output()
248         self.assertEqual(unexpected_result_count, 0)
249         self.assertEqual(log, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
250         'RESULT Bindings: event-target-wrapper= 1489.05 ms',
251         'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
252         '',
253         'Running Parser/some-parser.html (2 of 2)',
254         'RESULT Parser: some-parser= 1100.0 ms',
255         'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms',
256         '', '']))
257
258     def test_run_memory_test(self):
259         runner, port = self.create_runner_and_setup_results_template()
260         runner._timestamp = 123456789
261         port.host.filesystem.write_text_file(runner._base_path + '/Parser/memory-test.html', 'some content')
262
263         output = OutputCapture()
264         output.capture_output()
265         try:
266             unexpected_result_count = runner.run()
267         finally:
268             stdout, stderr, log = output.restore_output()
269         self.assertEqual(unexpected_result_count, 0)
270         self.assertEqual(log, '\n'.join([
271             'Running 1 tests',
272             'Running Parser/memory-test.html (1 of 1)',
273             'RESULT Parser: memory-test= 1100.0 ms',
274             'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms',
275             'RESULT Parser: memory-test: JSHeap= 832000.0 bytes',
276             'median= 829000.0 bytes, stdev= 15000.0 bytes, min= 811000.0 bytes, max= 848000.0 bytes',
277             'RESULT Parser: memory-test: Malloc= 532000.0 bytes',
278             'median= 529000.0 bytes, stdev= 13000.0 bytes, min= 511000.0 bytes, max= 548000.0 bytes',
279             '', '']))
280         results = runner.load_output_json()[0]['results']
281         self.assertEqual(results['Parser/memory-test'], {'min': 1080.0, 'max': 1120.0, 'median': 1101.0, 'stdev': 11.0, 'avg': 1100.0, 'unit': 'ms'})
282         self.assertEqual(results['Parser/memory-test:JSHeap'], {'min': 811000.0, 'max': 848000.0, 'median': 829000.0, 'stdev': 15000.0, 'avg': 832000.0, 'unit': 'bytes'})
283         self.assertEqual(results['Parser/memory-test:Malloc'], {'min': 511000.0, 'max': 548000.0, 'median': 529000.0, 'stdev': 13000.0, 'avg': 532000.0, 'unit': 'bytes'})
284
285     def _test_run_with_json_output(self, runner, filesystem, upload_suceeds=False, expected_exit_code=0):
286         filesystem.write_text_file(runner._base_path + '/inspector/pass.html', 'some content')
287         filesystem.write_text_file(runner._base_path + '/Bindings/event-target-wrapper.html', 'some content')
288
289         uploaded = [False]
290
291         def mock_upload_json(hostname, json_path):
292             self.assertEqual(hostname, 'some.host')
293             self.assertEqual(json_path, '/mock-checkout/output.json')
294             uploaded[0] = upload_suceeds
295             return upload_suceeds
296
297         runner._upload_json = mock_upload_json
298         runner._timestamp = 123456789
299         output_capture = OutputCapture()
300         output_capture.capture_output()
301         try:
302             self.assertEqual(runner.run(), expected_exit_code)
303         finally:
304             stdout, stderr, logs = output_capture.restore_output()
305
306         if not expected_exit_code:
307             self.assertEqual(logs, '\n'.join([
308                 'Running 2 tests',
309                 'Running Bindings/event-target-wrapper.html (1 of 2)',
310                 'RESULT Bindings: event-target-wrapper= 1489.05 ms',
311                 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
312                 '',
313                 'Running inspector/pass.html (2 of 2)',
314                 'RESULT group_name: test_name= 42 ms',
315                 '',
316                 '']))
317
318         self.assertEqual(uploaded[0], upload_suceeds)
319
320         return logs
321
322     _event_target_wrapper_and_inspector_results = {
323         "Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
324         "inspector/pass.html:group_name:test_name": 42}
325
326     def test_run_with_json_output(self):
327         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
328             '--test-results-server=some.host'])
329         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
330         self.assertEqual(runner.load_output_json(), {
331             "timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
332             "webkit-revision": "5678", "branch": "webkit-trunk"})
333
334     def test_run_with_description(self):
335         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
336             '--test-results-server=some.host', '--description', 'some description'])
337         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
338         self.assertEqual(runner.load_output_json(), {
339             "timestamp": 123456789, "description": "some description",
340             "results": self._event_target_wrapper_and_inspector_results,
341             "webkit-revision": "5678", "branch": "webkit-trunk"})
342
343     def create_runner_and_setup_results_template(self, args=[]):
344         runner, port = self.create_runner(args)
345         filesystem = port.host.filesystem
346         filesystem.write_text_file(runner._base_path + '/resources/results-template.html',
347             'BEGIN<script src="%AbsolutePathToWebKitTrunk%/some.js"></script>'
348             '<script src="%AbsolutePathToWebKitTrunk%/other.js"></script><script>%PeformanceTestsResultsJSON%</script>END')
349         filesystem.write_text_file(runner._base_path + '/Dromaeo/resources/dromaeo/web/lib/jquery-1.6.4.js', 'jquery content')
350         return runner, port
351
352     def test_run_respects_no_results(self):
353         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
354             '--test-results-server=some.host', '--no-results'])
355         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=False)
356         self.assertFalse(port.host.filesystem.isfile('/mock-checkout/output.json'))
357
358     def test_run_generates_json_by_default(self):
359         runner, port = self.create_runner_and_setup_results_template()
360         filesystem = port.host.filesystem
361         output_json_path = filesystem.join(port.perf_results_directory(), runner._DEFAULT_JSON_FILENAME)
362         results_page_path = filesystem.splitext(output_json_path)[0] + '.html'
363
364         self.assertFalse(filesystem.isfile(output_json_path))
365         self.assertFalse(filesystem.isfile(results_page_path))
366
367         self._test_run_with_json_output(runner, port.host.filesystem)
368
369         self.assertEqual(json.loads(port.host.filesystem.read_text_file(output_json_path)), [{
370             "timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
371             "webkit-revision": "5678", "branch": "webkit-trunk"}])
372
373         self.assertTrue(filesystem.isfile(output_json_path))
374         self.assertTrue(filesystem.isfile(results_page_path))
375
376     def test_run_generates_and_show_results_page(self):
377         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
378         page_shown = []
379         port.show_results_html_file = lambda path: page_shown.append(path)
380         filesystem = port.host.filesystem
381         self._test_run_with_json_output(runner, filesystem)
382
383         expected_entry = {"timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
384             "webkit-revision": "5678", "branch": "webkit-trunk"}
385
386         self.maxDiff = None
387         json_output = port.host.filesystem.read_text_file('/mock-checkout/output.json')
388         self.assertEqual(json.loads(json_output), [expected_entry])
389         self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
390             'BEGIN<script src="/test.checkout/some.js"></script><script src="/test.checkout/other.js"></script>'
391             '<script>%s</script>END' % json_output)
392         self.assertEqual(page_shown[0], '/mock-checkout/output.html')
393
394         self._test_run_with_json_output(runner, filesystem)
395         json_output = port.host.filesystem.read_text_file('/mock-checkout/output.json')
396         self.assertEqual(json.loads(json_output), [expected_entry, expected_entry])
397         self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
398             'BEGIN<script src="/test.checkout/some.js"></script><script src="/test.checkout/other.js"></script>'
399             '<script>%s</script>END' % json_output)
400
401     def test_run_respects_no_show_results(self):
402         show_results_html_file = lambda path: page_shown.append(path)
403
404         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
405         page_shown = []
406         port.show_results_html_file = show_results_html_file
407         self._test_run_with_json_output(runner, port.host.filesystem)
408         self.assertEqual(page_shown[0], '/mock-checkout/output.html')
409
410         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
411             '--no-show-results'])
412         page_shown = []
413         port.show_results_html_file = show_results_html_file
414         self._test_run_with_json_output(runner, port.host.filesystem)
415         self.assertEqual(page_shown, [])
416
417     def test_run_with_bad_output_json(self):
418         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
419         port.host.filesystem.write_text_file('/mock-checkout/output.json', 'bad json')
420         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
421         port.host.filesystem.write_text_file('/mock-checkout/output.json', '{"another bad json": "1"}')
422         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
423
424     def test_run_with_slave_config_json(self):
425         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
426             '--source-json-path=/mock-checkout/slave-config.json', '--test-results-server=some.host'])
427         port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', '{"key": "value"}')
428         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
429         self.assertEqual(runner.load_output_json(), {
430             "timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
431             "webkit-revision": "5678", "branch": "webkit-trunk", "key": "value"})
432
433     def test_run_with_bad_slave_config_json(self):
434         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
435             '--source-json-path=/mock-checkout/slave-config.json', '--test-results-server=some.host'])
436         logs = self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
437         self.assertTrue('Missing slave configuration JSON file: /mock-checkout/slave-config.json' in logs)
438         port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', 'bad json')
439         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
440         port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', '["another bad json"]')
441         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
442
443     def test_run_with_multiple_repositories(self):
444         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
445             '--test-results-server=some.host'])
446         port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
447         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
448         self.assertEqual(runner.load_output_json(), {
449             "timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
450             "webkit-revision": "5678", "some-revision": "5678", "branch": "webkit-trunk"})
451
452     def test_run_with_upload_json(self):
453         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
454             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
455
456         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
457         generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
458         self.assertEqual(generated_json['platform'], 'platform1')
459         self.assertEqual(generated_json['builder-name'], 'builder1')
460         self.assertEqual(generated_json['build-number'], 123)
461
462         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=False, expected_exit_code=PerfTestsRunner.EXIT_CODE_FAILED_UPLOADING)
463
464     def test_upload_json(self):
465         runner, port = self.create_runner()
466         port.host.filesystem.files['/mock-checkout/some.json'] = 'some content'
467
468         called = []
469         upload_single_text_file_throws = False
470         upload_single_text_file_return_value = StringIO.StringIO('OK')
471
472         class MockFileUploader:
473             def __init__(mock, url, timeout):
474                 self.assertEqual(url, 'https://some.host/api/test/report')
475                 self.assertTrue(isinstance(timeout, int) and timeout)
476                 called.append('FileUploader')
477
478             def upload_single_text_file(mock, filesystem, content_type, filename):
479                 self.assertEqual(filesystem, port.host.filesystem)
480                 self.assertEqual(content_type, 'application/json')
481                 self.assertEqual(filename, 'some.json')
482                 called.append('upload_single_text_file')
483                 if upload_single_text_file_throws:
484                     raise "Some exception"
485                 return upload_single_text_file_return_value
486
487         runner._upload_json('some.host', 'some.json', MockFileUploader)
488         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
489
490         output = OutputCapture()
491         output.capture_output()
492         upload_single_text_file_return_value = StringIO.StringIO('Some error')
493         runner._upload_json('some.host', 'some.json', MockFileUploader)
494         _, _, logs = output.restore_output()
495         self.assertEqual(logs, 'Uploaded JSON but got a bad response:\nSome error\n')
496
497         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
498         called = []
499         upload_single_text_file_throws = True
500         runner._upload_json('some.host', 'some.json', MockFileUploader)
501         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
502
503     def _add_file(self, runner, dirname, filename, content=True):
504         dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
505         runner._host.filesystem.maybe_make_directory(dirname)
506         runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
507
508     def test_collect_tests(self):
509         runner, port = self.create_runner()
510         self._add_file(runner, 'inspector', 'a_file.html', 'a content')
511         tests = runner._collect_tests()
512         self.assertEqual(len(tests), 1)
513
514     def _collect_tests_and_sort_test_name(self, runner):
515         return sorted([test.test_name() for test in runner._collect_tests()])
516
517     def test_collect_tests_with_multile_files(self):
518         runner, port = self.create_runner(args=['PerformanceTests/test1.html', 'test2.html'])
519
520         def add_file(filename):
521             port.host.filesystem.files[runner._host.filesystem.join(runner._base_path, filename)] = 'some content'
522
523         add_file('test1.html')
524         add_file('test2.html')
525         add_file('test3.html')
526         port.host.filesystem.chdir(runner._port.perf_tests_dir()[:runner._port.perf_tests_dir().rfind(runner._host.filesystem.sep)])
527         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['test1.html', 'test2.html'])
528
529     def test_collect_tests_with_skipped_list(self):
530         runner, port = self.create_runner()
531
532         self._add_file(runner, 'inspector', 'test1.html')
533         self._add_file(runner, 'inspector', 'unsupported_test1.html')
534         self._add_file(runner, 'inspector', 'test2.html')
535         self._add_file(runner, 'inspector/resources', 'resource_file.html')
536         self._add_file(runner, 'unsupported', 'unsupported_test2.html')
537         port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
538         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html'])
539
540     def test_collect_tests_with_skipped_list(self):
541         runner, port = self.create_runner(args=['--force'])
542
543         self._add_file(runner, 'inspector', 'test1.html')
544         self._add_file(runner, 'inspector', 'unsupported_test1.html')
545         self._add_file(runner, 'inspector', 'test2.html')
546         self._add_file(runner, 'inspector/resources', 'resource_file.html')
547         self._add_file(runner, 'unsupported', 'unsupported_test2.html')
548         port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
549         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html', 'inspector/unsupported_test1.html', 'unsupported/unsupported_test2.html'])
550
551     def test_collect_tests_with_page_load_svg(self):
552         runner, port = self.create_runner()
553         self._add_file(runner, 'PageLoad', 'some-svg-test.svg')
554         tests = runner._collect_tests()
555         self.assertEqual(len(tests), 1)
556         self.assertEqual(tests[0].__class__.__name__, 'PageLoadingPerfTest')
557
558     def test_collect_tests_should_ignore_replay_tests_by_default(self):
559         runner, port = self.create_runner()
560         self._add_file(runner, 'Replay', 'www.webkit.org.replay')
561         self.assertEqual(runner._collect_tests(), [])
562
563     def test_collect_tests_with_replay_tests(self):
564         runner, port = self.create_runner(args=['--replay'])
565         self._add_file(runner, 'Replay', 'www.webkit.org.replay')
566         tests = runner._collect_tests()
567         self.assertEqual(len(tests), 1)
568         self.assertEqual(tests[0].__class__.__name__, 'ReplayPerfTest')
569
570     def test_parse_args(self):
571         runner, port = self.create_runner()
572         options, args = PerfTestsRunner._parse_args([
573                 '--build-directory=folder42',
574                 '--platform=platform42',
575                 '--builder-name', 'webkit-mac-1',
576                 '--build-number=56',
577                 '--time-out-ms=42',
578                 '--output-json-path=a/output.json',
579                 '--source-json-path=a/source.json',
580                 '--test-results-server=somehost',
581                 '--debug'])
582         self.assertEqual(options.build, True)
583         self.assertEqual(options.build_directory, 'folder42')
584         self.assertEqual(options.platform, 'platform42')
585         self.assertEqual(options.builder_name, 'webkit-mac-1')
586         self.assertEqual(options.build_number, '56')
587         self.assertEqual(options.time_out_ms, '42')
588         self.assertEqual(options.configuration, 'Debug')
589         self.assertEqual(options.output_json_path, 'a/output.json')
590         self.assertEqual(options.source_json_path, 'a/source.json')
591         self.assertEqual(options.test_results_server, 'somehost')
592
593
594 if __name__ == '__main__':
595     unittest.main()