95e6cc4eee09bb5ff16c548b34784cac1b697c25
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_unittest.py
1 #!/usr/bin/python
2 # Copyright (C) 2012 Google Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Unit tests for run_perf_tests."""
31
32 import StringIO
33 import json
34 import unittest
35
36 from webkitpy.common.host_mock import MockHost
37 from webkitpy.common.system.filesystem_mock import MockFileSystem
38 from webkitpy.common.system.outputcapture import OutputCapture
39 from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
40 from webkitpy.layout_tests.port.test import TestPort
41 from webkitpy.layout_tests.views import printing
42 from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
43 from webkitpy.performance_tests.perftest import PerfTest
44 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
45
46
47 class MainTest(unittest.TestCase):
48     def assertWritten(self, stream, contents):
49         self.assertEquals(stream.buflist, contents)
50
51     class TestDriver:
52         def run_test(self, driver_input):
53             text = ''
54             timeout = False
55             crash = False
56             if driver_input.test_name.endswith('pass.html'):
57                 text = 'RESULT group_name: test_name= 42 ms'
58             elif driver_input.test_name.endswith('timeout.html'):
59                 timeout = True
60             elif driver_input.test_name.endswith('failed.html'):
61                 text = None
62             elif driver_input.test_name.endswith('tonguey.html'):
63                 text = 'we are not expecting an output from perf tests but RESULT blablabla'
64             elif driver_input.test_name.endswith('crash.html'):
65                 crash = True
66             elif driver_input.test_name.endswith('event-target-wrapper.html'):
67                 text = """Running 20 times
68 Ignoring warm-up run (1502)
69 1504
70 1505
71 1510
72 1504
73 1507
74 1509
75 1510
76 1487
77 1488
78 1472
79 1472
80 1488
81 1473
82 1472
83 1475
84 1487
85 1486
86 1486
87 1475
88 1471
89
90 avg 1489.05
91 median 1487
92 stdev 14.46
93 min 1471
94 max 1510
95 """
96             elif driver_input.test_name.endswith('some-parser.html'):
97                 text = """Running 20 times
98 Ignoring warm-up run (1115)
99
100 avg 1100
101 median 1101
102 stdev 11
103 min 1080
104 max 1120
105 """
106             return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
107
108         def start(self):
109             """do nothing"""
110
111         def stop(self):
112             """do nothing"""
113
114     def create_runner(self, args=[], driver_class=TestDriver):
115         options, parsed_args = PerfTestsRunner._parse_args(args)
116         test_port = TestPort(host=MockHost(), options=options)
117         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
118
119         runner = PerfTestsRunner(args=args, port=test_port)
120         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
121         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
122         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
123         return runner, test_port
124
125     def run_test(self, test_name):
126         runner, port = self.create_runner()
127         driver = MainTest.TestDriver()
128         return runner._run_single_test(ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name)), driver)
129
130     def test_run_passing_test(self):
131         self.assertTrue(self.run_test('pass.html'))
132
133     def test_run_silent_test(self):
134         self.assertFalse(self.run_test('silent.html'))
135
136     def test_run_failed_test(self):
137         self.assertFalse(self.run_test('failed.html'))
138
139     def test_run_tonguey_test(self):
140         self.assertFalse(self.run_test('tonguey.html'))
141
142     def test_run_timeout_test(self):
143         self.assertFalse(self.run_test('timeout.html'))
144
145     def test_run_crash_test(self):
146         self.assertFalse(self.run_test('crash.html'))
147
148     def _tests_for_runner(self, runner, test_names):
149         filesystem = runner._host.filesystem
150         tests = []
151         for test in test_names:
152             path = filesystem.join(runner._base_path, test)
153             dirname = filesystem.dirname(path)
154             if test.startswith('inspector/'):
155                 tests.append(ChromiumStylePerfTest(runner._port, test, path))
156             else:
157                 tests.append(PerfTest(runner._port, test, path))
158         return tests
159
160     def test_run_test_set(self):
161         runner, port = self.create_runner()
162         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
163             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
164         output = OutputCapture()
165         output.capture_output()
166         try:
167             unexpected_result_count = runner._run_tests_set(tests, port)
168         finally:
169             stdout, stderr, log = output.restore_output()
170         self.assertEqual(unexpected_result_count, len(tests) - 1)
171         self.assertTrue('\nRESULT group_name: test_name= 42 ms\n' in log)
172
173     def test_run_test_set_kills_drt_per_run(self):
174
175         class TestDriverWithStopCount(MainTest.TestDriver):
176             stop_count = 0
177
178             def stop(self):
179                 TestDriverWithStopCount.stop_count += 1
180
181         runner, port = self.create_runner(driver_class=TestDriverWithStopCount)
182
183         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
184             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
185         unexpected_result_count = runner._run_tests_set(tests, port)
186
187         self.assertEqual(TestDriverWithStopCount.stop_count, 6)
188
189     def test_run_test_pause_before_testing(self):
190         class TestDriverWithStartCount(MainTest.TestDriver):
191             start_count = 0
192
193             def start(self):
194                 TestDriverWithStartCount.start_count += 1
195
196         runner, port = self.create_runner(args=["--pause-before-testing"], driver_class=TestDriverWithStartCount)
197         tests = self._tests_for_runner(runner, ['inspector/pass.html'])
198
199         output = OutputCapture()
200         output.capture_output()
201         try:
202             unexpected_result_count = runner._run_tests_set(tests, port)
203             self.assertEqual(TestDriverWithStartCount.start_count, 1)
204         finally:
205             stdout, stderr, log = output.restore_output()
206         self.assertEqual(stderr, "Ready to run test?\n")
207         self.assertEqual(log, "Running inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\n\n")
208
209     def test_run_test_set_for_parser_tests(self):
210         runner, port = self.create_runner()
211         tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.html', 'Parser/some-parser.html'])
212         output = OutputCapture()
213         output.capture_output()
214         try:
215             unexpected_result_count = runner._run_tests_set(tests, port)
216         finally:
217             stdout, stderr, log = output.restore_output()
218         self.assertEqual(unexpected_result_count, 0)
219         self.assertEqual(log, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
220         'RESULT Bindings: event-target-wrapper= 1489.05 ms',
221         'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
222         '',
223         'Running Parser/some-parser.html (2 of 2)',
224         'RESULT Parser: some-parser= 1100.0 ms',
225         'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms',
226         '', '']))
227
228     def _test_run_with_json_output(self, runner, filesystem, upload_suceeds=True, expected_exit_code=0):
229         filesystem.write_text_file(runner._base_path + '/inspector/pass.html', 'some content')
230         filesystem.write_text_file(runner._base_path + '/Bindings/event-target-wrapper.html', 'some content')
231
232         uploaded = [False]
233
234         def mock_upload_json(hostname, json_path):
235             self.assertEqual(hostname, 'some.host')
236             self.assertEqual(json_path, '/mock-checkout/output.json')
237             uploaded[0] = True
238             return upload_suceeds
239
240         runner._upload_json = mock_upload_json
241         runner._timestamp = 123456789
242         output_capture = OutputCapture()
243         output_capture.capture_output()
244         try:
245             self.assertEqual(runner.run(), expected_exit_code)
246         finally:
247             stdout, stderr, logs = output_capture.restore_output()
248
249         if not expected_exit_code:
250             self.assertEqual(logs, '\n'.join([
251                 'Running 2 tests',
252                 'Running Bindings/event-target-wrapper.html (1 of 2)',
253                 'RESULT Bindings: event-target-wrapper= 1489.05 ms',
254                 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
255                 '',
256                 'Running inspector/pass.html (2 of 2)',
257                 'RESULT group_name: test_name= 42 ms',
258                 '',
259                 '']))
260
261         return uploaded[0]
262
263     def test_run_with_json_output(self):
264         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
265             '--test-results-server=some.host'])
266         self._test_run_with_json_output(runner, port.host.filesystem)
267         self.assertEqual(json.loads(port.host.filesystem.read_text_file('/mock-checkout/output.json')), {
268             "timestamp": 123456789, "results":
269             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
270             "inspector/pass.html:group_name:test_name": 42},
271             "webkit-revision": 5678, "branch": "webkit-trunk"})
272
273     def create_runner_and_setup_results_template(self, args=[]):
274         runner, port = self.create_runner(args)
275         filesystem = port.host.filesystem
276         filesystem.write_text_file(runner._base_path + '/resources/results-template.html', 'BEGIN<?WebKitPerfTestRunnerInsertionPoint?>END')
277         filesystem.write_text_file(runner._base_path + '/Dromaeo/resources/dromaeo/web/lib/jquery-1.6.4.js', 'jquery content')
278         return runner, port
279
280     def test_run_respects_no_results(self):
281         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
282             '--test-results-server=some.host', '--no-results'])
283         self.assertFalse(self._test_run_with_json_output(runner, port.host.filesystem))
284         self.assertFalse(port.host.filesystem.isfile('/mock-checkout/output.json'))
285
286     def test_run_generates_json_by_default(self):
287         runner, port = self.create_runner_and_setup_results_template()
288         filesystem = port.host.filesystem
289         output_json_path = filesystem.join(port.perf_results_directory(), runner._DEFAULT_JSON_FILENAME)
290         results_page_path = filesystem.splitext(output_json_path)[0] + '.html'
291
292         self.assertFalse(filesystem.isfile(output_json_path))
293         self.assertFalse(filesystem.isfile(results_page_path))
294
295         self._test_run_with_json_output(runner, port.host.filesystem)
296
297         self.assertEqual(json.loads(port.host.filesystem.read_text_file(output_json_path)), [{
298             "timestamp": 123456789, "results":
299             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
300             "inspector/pass.html:group_name:test_name": 42},
301             "webkit-revision": 5678, "branch": "webkit-trunk"}])
302
303         self.assertTrue(filesystem.isfile(output_json_path))
304         self.assertTrue(filesystem.isfile(results_page_path))
305
306     def test_run_generates_and_show_results_page(self):
307         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
308         page_shown = []
309         port.show_results_html_file = lambda path: page_shown.append(path)
310         filesystem = port.host.filesystem
311         self._test_run_with_json_output(runner, filesystem)
312
313         expected_entry = {"timestamp": 123456789, "results": {"Bindings/event-target-wrapper":
314             {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
315             "inspector/pass.html:group_name:test_name": 42}, "webkit-revision": 5678, "branch": "webkit-trunk"}
316
317         self.maxDiff = None
318         json_output = port.host.filesystem.read_text_file('/mock-checkout/output.json')
319         self.assertEqual(json.loads(json_output), [expected_entry])
320         self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
321             'BEGIN<script>jquery content</script><script id="json">' + json_output + '</script>END')
322         self.assertEqual(page_shown[0], '/mock-checkout/output.html')
323
324         self._test_run_with_json_output(runner, filesystem)
325         json_output = port.host.filesystem.read_text_file('/mock-checkout/output.json')
326         self.assertEqual(json.loads(json_output), [expected_entry, expected_entry])
327         self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
328             'BEGIN<script>jquery content</script><script id="json">' + json_output + '</script>END')
329
330     def test_run_with_bad_output_json(self):
331         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
332         port.host.filesystem.write_text_file('/mock-checkout/output.json', 'bad json')
333         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
334         port.host.filesystem.write_text_file('/mock-checkout/output.json', '{"another bad json": "1"}')
335         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
336
337     def test_run_with_json_source(self):
338         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
339             '--source-json-path=/mock-checkout/source.json', '--test-results-server=some.host'])
340         port.host.filesystem.write_text_file('/mock-checkout/source.json', '{"key": "value"}')
341         self._test_run_with_json_output(runner, port.host.filesystem)
342         self.assertEqual(json.loads(port.host.filesystem.files['/mock-checkout/output.json']), {
343             "timestamp": 123456789, "results":
344             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
345             "inspector/pass.html:group_name:test_name": 42},
346             "webkit-revision": 5678, "branch": "webkit-trunk",
347             "key": "value"})
348
349     def test_run_with_bad_json_source(self):
350         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
351             '--source-json-path=/mock-checkout/source.json', '--test-results-server=some.host'])
352         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
353         port.host.filesystem.write_text_file('/mock-checkout/source.json', 'bad json')
354         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
355         port.host.filesystem.write_text_file('/mock-checkout/source.json', '["another bad json"]')
356         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
357
358     def test_run_with_multiple_repositories(self):
359         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
360             '--test-results-server=some.host'])
361         port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
362         self._test_run_with_json_output(runner, port.host.filesystem)
363         self.assertEqual(json.loads(port.host.filesystem.files['/mock-checkout/output.json']), {
364             "timestamp": 123456789, "results":
365             {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
366             "inspector/pass.html:group_name:test_name": 42.0},
367             "webkit-revision": 5678, "some-revision": 5678, "branch": "webkit-trunk"})
368
369     def test_run_with_upload_json(self):
370         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
371             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
372
373         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
374         generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
375         self.assertEqual(generated_json['platform'], 'platform1')
376         self.assertEqual(generated_json['builder-name'], 'builder1')
377         self.assertEqual(generated_json['build-number'], 123)
378
379         self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=False, expected_exit_code=PerfTestsRunner.EXIT_CODE_FAILED_UPLOADING)
380
381     def test_upload_json(self):
382         runner, port = self.create_runner()
383         port.host.filesystem.files['/mock-checkout/some.json'] = 'some content'
384
385         called = []
386         upload_single_text_file_throws = False
387         upload_single_text_file_return_value = StringIO.StringIO('OK')
388
389         class MockFileUploader:
390             def __init__(mock, url, timeout):
391                 self.assertEqual(url, 'https://some.host/api/test/report')
392                 self.assertTrue(isinstance(timeout, int) and timeout)
393                 called.append('FileUploader')
394
395             def upload_single_text_file(mock, filesystem, content_type, filename):
396                 self.assertEqual(filesystem, port.host.filesystem)
397                 self.assertEqual(content_type, 'application/json')
398                 self.assertEqual(filename, 'some.json')
399                 called.append('upload_single_text_file')
400                 if upload_single_text_file_throws:
401                     raise "Some exception"
402                 return upload_single_text_file_return_value
403
404         runner._upload_json('some.host', 'some.json', MockFileUploader)
405         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
406
407         output = OutputCapture()
408         output.capture_output()
409         upload_single_text_file_return_value = StringIO.StringIO('Some error')
410         runner._upload_json('some.host', 'some.json', MockFileUploader)
411         _, _, logs = output.restore_output()
412         self.assertEqual(logs, 'Uploaded JSON but got a bad response:\nSome error\n')
413
414         # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
415         called = []
416         upload_single_text_file_throws = True
417         runner._upload_json('some.host', 'some.json', MockFileUploader)
418         self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
419
420     def _add_file(self, runner, dirname, filename, content=True):
421         dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
422         runner._host.filesystem.maybe_make_directory(dirname)
423         runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
424
425     def test_collect_tests(self):
426         runner, port = self.create_runner()
427         self._add_file(runner, 'inspector', 'a_file.html', 'a content')
428         tests = runner._collect_tests()
429         self.assertEqual(len(tests), 1)
430
431     def _collect_tests_and_sort_test_name(self, runner):
432         return sorted([test.test_name() for test in runner._collect_tests()])
433
434     def test_collect_tests_with_multile_files(self):
435         runner, port = self.create_runner(args=['PerformanceTests/test1.html', 'test2.html'])
436
437         def add_file(filename):
438             port.host.filesystem.files[runner._host.filesystem.join(runner._base_path, filename)] = 'some content'
439
440         add_file('test1.html')
441         add_file('test2.html')
442         add_file('test3.html')
443         port.host.filesystem.chdir(runner._port.perf_tests_dir()[:runner._port.perf_tests_dir().rfind(runner._host.filesystem.sep)])
444         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['test1.html', 'test2.html'])
445
446     def test_collect_tests_with_skipped_list(self):
447         runner, port = self.create_runner()
448
449         self._add_file(runner, 'inspector', 'test1.html')
450         self._add_file(runner, 'inspector', 'unsupported_test1.html')
451         self._add_file(runner, 'inspector', 'test2.html')
452         self._add_file(runner, 'inspector/resources', 'resource_file.html')
453         self._add_file(runner, 'unsupported', 'unsupported_test2.html')
454         port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
455         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html'])
456
457     def test_collect_tests_with_skipped_list(self):
458         runner, port = self.create_runner(args=['--force'])
459
460         self._add_file(runner, 'inspector', 'test1.html')
461         self._add_file(runner, 'inspector', 'unsupported_test1.html')
462         self._add_file(runner, 'inspector', 'test2.html')
463         self._add_file(runner, 'inspector/resources', 'resource_file.html')
464         self._add_file(runner, 'unsupported', 'unsupported_test2.html')
465         port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
466         self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html', 'inspector/unsupported_test1.html', 'unsupported/unsupported_test2.html'])
467
468     def test_collect_tests_with_page_load_svg(self):
469         runner, port = self.create_runner()
470         self._add_file(runner, 'PageLoad', 'some-svg-test.svg')
471         tests = runner._collect_tests()
472         self.assertEqual(len(tests), 1)
473         self.assertEqual(tests[0].__class__.__name__, 'PageLoadingPerfTest')
474
475     def test_collect_tests_should_ignore_replay_tests_by_default(self):
476         runner, port = self.create_runner()
477         self._add_file(runner, 'Replay', 'www.webkit.org.replay')
478         self.assertEqual(runner._collect_tests(), [])
479
480     def test_collect_tests_with_replay_tests(self):
481         runner, port = self.create_runner(args=['--replay'])
482         self._add_file(runner, 'Replay', 'www.webkit.org.replay')
483         tests = runner._collect_tests()
484         self.assertEqual(len(tests), 1)
485         self.assertEqual(tests[0].__class__.__name__, 'ReplayPerfTest')
486
487     def test_parse_args(self):
488         runner, port = self.create_runner()
489         options, args = PerfTestsRunner._parse_args([
490                 '--build-directory=folder42',
491                 '--platform=platform42',
492                 '--builder-name', 'webkit-mac-1',
493                 '--build-number=56',
494                 '--time-out-ms=42',
495                 '--output-json-path=a/output.json',
496                 '--source-json-path=a/source.json',
497                 '--test-results-server=somehost',
498                 '--debug'])
499         self.assertEqual(options.build, True)
500         self.assertEqual(options.build_directory, 'folder42')
501         self.assertEqual(options.platform, 'platform42')
502         self.assertEqual(options.builder_name, 'webkit-mac-1')
503         self.assertEqual(options.build_number, '56')
504         self.assertEqual(options.time_out_ms, '42')
505         self.assertEqual(options.configuration, 'Debug')
506         self.assertEqual(options.output_json_path, 'a/output.json')
507         self.assertEqual(options.source_json_path, 'a/source.json')
508         self.assertEqual(options.test_results_server, 'somehost')
509
510
511 if __name__ == '__main__':
512     unittest.main()