[JSC] Add LLInt IC for try_get_by_id of own cacheable value
[WebKit-https.git] / Tools / Scripts / webkitpy / performance_tests / perftestsrunner_integrationtest.py
1 # Copyright (C) 2012 Google Inc. All rights reserved.
2 # Copyright (C) 2020 Apple Inc. All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are
6 # met:
7 #
8 #     * Redistributions of source code must retain the above copyright
9 # notice, this list of conditions and the following disclaimer.
10 #     * Redistributions in binary form must reproduce the above
11 # copyright notice, this list of conditions and the following disclaimer
12 # in the documentation and/or other materials provided with the
13 # distribution.
14 #     * Neither the name of Google Inc. nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 """Integration tests for run_perf_tests."""
31
32 import datetime
33 import json
34 import logging
35 import re
36 import unittest
37
38 from webkitpy.common.host_mock import MockHost
39 from webkitpy.port.driver import DriverOutput
40 from webkitpy.port.test import TestPort
41 from webkitpy.performance_tests.perftest import PerfTest
42 from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
43
44 from webkitcorepy import OutputCapture
45
46
47 class EventTargetWrapperTestData:
48     text = """:Time -> [1486, 1471, 1510, 1505, 1478, 1490] ms
49 """
50
51     output = """Running Bindings/event-target-wrapper.html (1 of 2)
52 RESULT Bindings: event-target-wrapper: Time= 1490.0 ms
53 median= 1488.0 ms, stdev= 14.11751 ms, min= 1471.0 ms, max= 1510.0 ms
54 Finished: 0.1 s
55
56 """
57
58     results = {'url': 'https://trac.webkit.org/browser/trunk/PerformanceTests/Bindings/event-target-wrapper.html',
59         'metrics': {'Time': {'current': [[1486.0, 1471.0, 1510.0, 1505.0, 1478.0, 1490.0]] * 4}}}
60
61
62 class SomeParserTestData:
63     text = """:Time -> [1080, 1120, 1095, 1101, 1104] ms
64 """
65
66     output = """Running Parser/some-parser.html (2 of 2)
67 RESULT Parser: some-parser: Time= 1100.0 ms
68 median= 1101.0 ms, stdev= 13.31402 ms, min= 1080.0 ms, max= 1120.0 ms
69 Finished: 0.1 s
70
71 """
72
73     results = {'url': 'https://trac.webkit.org/browser/trunk/PerformanceTests/Parser/some-parser.html',
74         'metrics': {'Time': {'current': [[1080.0, 1120.0, 1095.0, 1101.0, 1104.0]] * 4}}}
75
76
77 class MemoryTestData:
78     text = """:Time -> [1080, 1120, 1095, 1101, 1104] ms
79
80 :JSHeap -> [825000, 811000, 848000, 837000, 829000] bytes
81
82 :Malloc -> [529000, 511000, 548000, 536000, 521000] bytes
83 """
84
85     output = """Running 1 tests
86 Running Parser/memory-test.html (1 of 1)
87 RESULT Parser: memory-test: Time= 1100.0 ms
88 median= 1101.0 ms, stdev= 13.31402 ms, min= 1080.0 ms, max= 1120.0 ms
89 RESULT Parser: memory-test: JSHeap= 830000.0 bytes
90 median= 829000.0 bytes, stdev= 12649.11064 bytes, min= 811000.0 bytes, max= 848000.0 bytes
91 RESULT Parser: memory-test: Malloc= 529000.0 bytes
92 median= 529000.0 bytes, stdev= 12961.48139 bytes, min= 511000.0 bytes, max= 548000.0 bytes
93 Finished: 0.1 s
94 """
95
96     results = {'current': [[1080, 1120, 1095, 1101, 1104]] * 4}
97     js_heap_results = {'current': [[825000, 811000, 848000, 837000, 829000]] * 4}
98     malloc_results = {'current': [[529000, 511000, 548000, 536000, 521000]] * 4}
99
100
101 class TestWithSubtestsData:
102     text = """subtest:Time -> [1, 2, 3, 4, 5] ms
103 total-test:Time:Total -> [1, 2, 3, 4, 5] ms
104 total-test/subsubtest:Time -> [1, 2, 3, 4, 5] ms
105 :Time -> [1080, 1120, 1095, 1101, 1104] ms
106 """
107
108     output = """Running 1 tests
109 Running Parser/test-with-subtests.html (1 of 1)
110 RESULT Parser: test-with-subtests: Time= 1100.0 ms
111 median= 1101.0 ms, stdev= 13.31402 ms, min= 1080.0 ms, max= 1120.0 ms
112 Finished: 0.1 s
113 """
114
115     results = {'url': 'https://trac.webkit.org/browser/trunk/PerformanceTests/Parser/test-with-subtests.html',
116         'metrics': {'Time': {'current': [[1080.0, 1120.0, 1095.0, 1101.0, 1104.0]] * 4}},
117         'tests': {
118             'subtest': {
119                 'url': 'https://trac.webkit.org/browser/trunk/PerformanceTests/Parser/test-with-subtests.html',
120                 'metrics': {'Time': {'current': [[1.0, 2.0, 3.0, 4.0, 5.0]] * 4}}},
121             'total-test': {
122                 'url': 'https://trac.webkit.org/browser/trunk/PerformanceTests/Parser/test-with-subtests.html',
123                 'metrics': {'Time': {'current': [[1.0, 2.0, 3.0, 4.0, 5.0]] * 4, "aggregators": ["Total"]}},
124                 'tests': {
125                     'subsubtest':
126                         {'url': 'https://trac.webkit.org/browser/trunk/PerformanceTests/Parser/test-with-subtests.html',
127                         'metrics': {'Time': {'current': [[1.0, 2.0, 3.0, 4.0, 5.0]] * 4}}}}}}}
128
129
130 class TestDriver:
131     def run_test(self, driver_input, stop_when_done):
132         text = ''
133         timeout = False
134         crash = False
135         if driver_input.test_name.endswith('pass.html'):
136             text = SomeParserTestData.text
137         elif driver_input.test_name.endswith('timeout.html'):
138             timeout = True
139         elif driver_input.test_name.endswith('failed.html'):
140             text = None
141         elif driver_input.test_name.endswith('tonguey.html'):
142             text = 'we are not expecting an output from perf tests but RESULT blablabla'
143         elif driver_input.test_name.endswith('crash.html'):
144             crash = True
145         elif driver_input.test_name.endswith('event-target-wrapper.html'):
146             text = EventTargetWrapperTestData.text
147         elif driver_input.test_name.endswith('some-parser.html'):
148             text = SomeParserTestData.text
149         elif driver_input.test_name.endswith('memory-test.html'):
150             text = MemoryTestData.text
151         elif driver_input.test_name.endswith('test-with-subtests.html'):
152             text = TestWithSubtestsData.text
153         return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
154
155     def start(self):
156         """do nothing"""
157
158     def stop(self):
159         """do nothing"""
160
161
162 class MainTest(unittest.TestCase):
163     def _normalize_output(self, log):
164         return re.sub(r'(stdev=\s+\d+\.\d{5})\d+', r'\1', re.sub(r'Finished: [0-9\.]+ s', 'Finished: 0.1 s', log))
165
166     def _load_output_json(self, runner):
167         json_content = runner._host.filesystem.read_text_file(runner._output_json_path())
168         return json.loads(re.sub(r'("stdev":\s*\d+\.\d{5})\d+', r'\1', json_content))
169
170     def create_runner(self, args=[], driver_class=TestDriver):
171         options, parsed_args = PerfTestsRunner._parse_args(args)
172         test_port = TestPort(host=MockHost(), options=options)
173         test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
174
175         runner = PerfTestsRunner(args=args, port=test_port)
176         runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
177         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
178         runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
179
180         return runner, test_port
181
182     def run_test(self, test_name):
183         runner, port = self.create_runner()
184         tests = [PerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name))]
185         return runner._run_tests_set(tests) == 0
186
187     def test_run_passing_test(self):
188         self.assertTrue(self.run_test('pass.html'))
189
190     def test_run_silent_test(self):
191         self.assertFalse(self.run_test('silent.html'))
192
193     def test_run_failed_test(self):
194         self.assertFalse(self.run_test('failed.html'))
195
196     def test_run_tonguey_test(self):
197         self.assertFalse(self.run_test('tonguey.html'))
198
199     def test_run_timeout_test(self):
200         self.assertFalse(self.run_test('timeout.html'))
201
202     def test_run_crash_test(self):
203         self.assertFalse(self.run_test('crash.html'))
204
205     def _tests_for_runner(self, runner, test_names):
206         filesystem = runner._host.filesystem
207         tests = []
208         for test in test_names:
209             path = filesystem.join(runner._base_path, test)
210             dirname = filesystem.dirname(path)
211             tests.append(PerfTest(runner._port, test, path))
212         return tests
213
214     def test_run_test_set_kills_drt_per_run(self):
215
216         class TestDriverWithStopCount(TestDriver):
217             stop_count = 0
218
219             def stop(self):
220                 TestDriverWithStopCount.stop_count += 1
221
222         runner, port = self.create_runner(driver_class=TestDriverWithStopCount)
223
224         tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
225             'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
226         unexpected_result_count = runner._run_tests_set(tests)
227
228         self.assertEqual(TestDriverWithStopCount.stop_count, 9)
229
230     def test_run_test_set_for_parser_tests(self):
231         runner, port = self.create_runner()
232         tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.html', 'Parser/some-parser.html'])
233         with OutputCapture(level=logging.INFO) as capturer:
234             unexpected_result_count = runner._run_tests_set(tests)
235
236         self.assertEqual(unexpected_result_count, 0)
237         self.assertEqual(
238             self._normalize_output(capturer.root.log.getvalue()),
239             EventTargetWrapperTestData.output + SomeParserTestData.output,
240         )
241
242     def test_run_memory_test(self):
243         runner, port = self.create_runner_and_setup_results_template()
244         runner._timestamp = 123456789
245         port.host.filesystem.write_text_file(runner._base_path + '/Parser/memory-test.html', 'some content')
246
247         with OutputCapture(level=logging.INFO) as capturer:
248             unexpected_result_count = runner.run()
249
250         self.assertEqual(unexpected_result_count, 0)
251         self.assertEqual(
252             self._normalize_output(capturer.root.log.getvalue()),
253             MemoryTestData.output + '\nMOCK: user.open_url: file://...\n',
254         )
255         parser_tests = self._load_output_json(runner)[0]['tests']['Parser']['tests']
256         self.assertEqual(parser_tests['memory-test']['metrics']['Time'], MemoryTestData.results)
257         self.assertEqual(parser_tests['memory-test']['metrics']['JSHeap'], MemoryTestData.js_heap_results)
258         self.assertEqual(parser_tests['memory-test']['metrics']['Malloc'], MemoryTestData.malloc_results)
259
260     def test_run_test_with_subtests(self):
261         runner, port = self.create_runner_and_setup_results_template()
262         runner._timestamp = 123456789
263         port.host.filesystem.write_text_file(runner._base_path + '/Parser/test-with-subtests.html', 'some content')
264
265         with OutputCapture(level=logging.INFO) as capturer:
266             unexpected_result_count = runner.run()
267
268         self.assertEqual(unexpected_result_count, 0)
269         self.assertEqual(
270             self._normalize_output(capturer.root.log.getvalue()),
271             TestWithSubtestsData.output + '\nMOCK: user.open_url: file://...\n',
272         )
273         parser_tests = self._load_output_json(runner)[0]['tests']['Parser']['tests']
274         self.maxDiff = None
275         self.assertEqual(parser_tests['test-with-subtests'], TestWithSubtestsData.results)
276
277     def _test_run_with_json_output(self, runner, filesystem, upload_succeeds=False, results_shown=True, expected_exit_code=0, repeat=1, compare_logs=True):
278         filesystem.write_text_file(runner._base_path + '/Parser/some-parser.html', 'some content')
279         filesystem.write_text_file(runner._base_path + '/Bindings/event-target-wrapper.html', 'some content')
280
281         uploaded = [False]
282
283         def mock_upload_json(hostname, json_path, host_path=None):
284             # FIXME: Get rid of the hard-coded perf.webkit.org once we've completed the transition.
285             self.assertIn(hostname, ['some.host'])
286             self.assertIn(json_path, ['/mock-checkout/output.json'])
287             self.assertIn(host_path, [None, '/api/report'])
288             uploaded[0] = upload_succeeds
289             return upload_succeeds
290
291         runner._upload_json = mock_upload_json
292         runner._timestamp = 123456789
293         runner._utc_timestamp = datetime.datetime(2013, 2, 8, 15, 19, 37, 460000)
294         with OutputCapture(level=logging.INFO) as capturer:
295             self.assertEqual(runner.run(), expected_exit_code)
296
297         if not expected_exit_code and compare_logs:
298             expected_logs = ''
299             for i in range(repeat):
300                 runs = ' (Run %d of %d)' % (i + 1, repeat) if repeat > 1 else ''
301                 expected_logs += 'Running 2 tests%s\n' % runs + EventTargetWrapperTestData.output + SomeParserTestData.output
302             if results_shown:
303                 expected_logs += 'MOCK: user.open_url: file://...\n'
304             self.assertEqual(self._normalize_output(capturer.root.log.getvalue()), expected_logs)
305
306         self.assertEqual(uploaded[0], upload_succeeds)
307
308         return capturer.root.log.getvalue()
309
310     _event_target_wrapper_and_inspector_results = {
311         "Bindings":
312             {"url": "https://trac.webkit.org/browser/trunk/PerformanceTests/Bindings",
313             "tests": {"event-target-wrapper": EventTargetWrapperTestData.results}},
314         "Parser":
315             {"url": "https://trac.webkit.org/browser/trunk/PerformanceTests/Parser",
316             "tests": {"some-parser": SomeParserTestData.results}}}
317
318     def test_run_with_json_output(self):
319         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
320             '--test-results-server=some.host'])
321         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
322         self.assertEqual(self._load_output_json(runner), [{
323             "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
324             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
325
326         filesystem = port.host.filesystem
327         self.assertTrue(filesystem.isfile(runner._output_json_path()))
328         self.assertTrue(filesystem.isfile(filesystem.splitext(runner._output_json_path())[0] + '.html'))
329
330     def test_run_with_description(self):
331         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
332             '--test-results-server=some.host', '--description', 'some description'])
333         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
334         self.assertEqual(self._load_output_json(runner), [{
335             "buildTime": "2013-02-08T15:19:37.460000", "description": "some description",
336             "tests": self._event_target_wrapper_and_inspector_results,
337             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
338
339     def create_runner_and_setup_results_template(self, args=[]):
340         runner, port = self.create_runner(args)
341         filesystem = port.host.filesystem
342         filesystem.write_text_file(runner._base_path + '/resources/results-template.html',
343             'BEGIN<script src="%AbsolutePathToWebKitTrunk%/some.js"></script>'
344             '<script src="%AbsolutePathToWebKitTrunk%/other.js"></script><script>%PeformanceTestsResultsJSON%</script>END')
345         filesystem.write_text_file(runner._base_path + '/Dromaeo/resources/dromaeo/web/lib/jquery-1.6.4.js', 'jquery content')
346         return runner, port
347
348     def test_run_respects_no_results(self):
349         runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
350             '--test-results-server=some.host', '--no-results'])
351         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=False, results_shown=False)
352         self.assertFalse(port.host.filesystem.isfile('/mock-checkout/output.json'))
353
354     def test_run_generates_json_by_default(self):
355         runner, port = self.create_runner_and_setup_results_template()
356         filesystem = port.host.filesystem
357         output_json_path = runner._output_json_path()
358         results_page_path = filesystem.splitext(output_json_path)[0] + '.html'
359
360         self.assertFalse(filesystem.isfile(output_json_path))
361         self.assertFalse(filesystem.isfile(results_page_path))
362
363         self._test_run_with_json_output(runner, port.host.filesystem)
364
365         self.assertEqual(self._load_output_json(runner), [{
366             "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
367             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
368
369         self.assertTrue(filesystem.isfile(output_json_path))
370         self.assertTrue(filesystem.isfile(results_page_path))
371
372     def test_run_merges_output_by_default(self):
373         runner, port = self.create_runner_and_setup_results_template()
374         filesystem = port.host.filesystem
375         output_json_path = runner._output_json_path()
376
377         filesystem.write_text_file(output_json_path, '[{"previous": "results"}]')
378
379         self._test_run_with_json_output(runner, port.host.filesystem)
380
381         self.assertEqual(self._load_output_json(runner), [{"previous": "results"}, {
382             "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
383             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
384         self.assertTrue(filesystem.isfile(filesystem.splitext(output_json_path)[0] + '.html'))
385
386     def test_run_respects_reset_results(self):
387         runner, port = self.create_runner_and_setup_results_template(args=["--reset-results"])
388         filesystem = port.host.filesystem
389         output_json_path = runner._output_json_path()
390
391         filesystem.write_text_file(output_json_path, '[{"previous": "results"}]')
392
393         self._test_run_with_json_output(runner, port.host.filesystem)
394
395         self.assertEqual(self._load_output_json(runner), [{
396             "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
397             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
398         self.assertTrue(filesystem.isfile(filesystem.splitext(output_json_path)[0] + '.html'))
399         pass
400
401     def test_run_generates_and_show_results_page(self):
402         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
403         page_shown = []
404         port.show_results_html_file = lambda path: page_shown.append(path)
405         filesystem = port.host.filesystem
406         self._test_run_with_json_output(runner, filesystem, results_shown=False)
407
408         expected_entry = {"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
409             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}
410
411         self.maxDiff = None
412         self.assertEqual(runner._output_json_path(), '/mock-checkout/output.json')
413         self.assertEqual(self._load_output_json(runner), [expected_entry])
414         self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
415             'BEGIN<script src="/test.checkout/some.js"></script><script src="/test.checkout/other.js"></script>'
416             '<script>%s</script>END' % port.host.filesystem.read_text_file(runner._output_json_path()))
417         self.assertEqual(page_shown[0], '/mock-checkout/output.html')
418
419         self._test_run_with_json_output(runner, filesystem, results_shown=False)
420         self.assertEqual(runner._output_json_path(), '/mock-checkout/output.json')
421         self.assertEqual(self._load_output_json(runner), [expected_entry, expected_entry])
422         self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
423             'BEGIN<script src="/test.checkout/some.js"></script><script src="/test.checkout/other.js"></script>'
424             '<script>%s</script>END' % port.host.filesystem.read_text_file(runner._output_json_path()))
425
426     def test_run_respects_no_show_results(self):
427         show_results_html_file = lambda path: page_shown.append(path)
428
429         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
430         page_shown = []
431         port.show_results_html_file = show_results_html_file
432         self._test_run_with_json_output(runner, port.host.filesystem, results_shown=False)
433         self.assertEqual(page_shown[0], '/mock-checkout/output.html')
434
435         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
436             '--no-show-results'])
437         page_shown = []
438         port.show_results_html_file = show_results_html_file
439         self._test_run_with_json_output(runner, port.host.filesystem, results_shown=False)
440         self.assertEqual(page_shown, [])
441
442     def test_run_with_bad_output_json(self):
443         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
444         port.host.filesystem.write_text_file('/mock-checkout/output.json', 'bad json')
445         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
446         port.host.filesystem.write_text_file('/mock-checkout/output.json', '{"another bad json": "1"}')
447         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
448
449     def test_run_with_worker_config_json(self):
450         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
451             '--worker-config-json-path=/mock-checkout/worker-config.json', '--test-results-server=some.host'])
452         port.host.filesystem.write_text_file('/mock-checkout/worker-config.json', '{"key": "value"}')
453         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
454         self.assertEqual(self._load_output_json(runner), [{
455             "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
456             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}, "builderKey": "value"}])
457
458     def test_run_with_bad_worker_config_json(self):
459         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
460             '--worker-config-json-path=/mock-checkout/worker-config.json', '--test-results-server=some.host'])
461         logs = self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
462         self.assertTrue('Missing worker configuration JSON file: /mock-checkout/worker-config.json' in logs)
463         port.host.filesystem.write_text_file('/mock-checkout/worker-config.json', 'bad json')
464         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
465         port.host.filesystem.write_text_file('/mock-checkout/worker-config.json', '["another bad json"]')
466         self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
467
468     def test_run_with_multiple_repositories(self):
469         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
470             '--test-results-server=some.host'])
471         port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
472         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
473         self.assertEqual(self._load_output_json(runner), [{
474             "buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
475             "revisions": {"webkit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"},
476             "some": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
477
478     def test_run_with_upload_json(self):
479         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
480             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
481
482         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
483         generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
484         self.assertEqual(generated_json[0]['platform'], 'platform1')
485         self.assertEqual(generated_json[0]['builderName'], 'builder1')
486         self.assertEqual(generated_json[0]['buildNumber'], 123)
487
488         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=False, expected_exit_code=PerfTestsRunner.EXIT_CODE_FAILED_UPLOADING)
489
490     def test_run_with_upload_json_should_generate_perf_webkit_json(self):
491         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
492             '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123',
493             '--worker-config-json-path=/mock-checkout/worker-config.json'])
494         port.host.filesystem.write_text_file('/mock-checkout/worker-config.json', '{"key": "value1"}')
495
496         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
497         generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
498         self.assertTrue(isinstance(generated_json, list))
499         self.assertEqual(len(generated_json), 1)
500
501         output = generated_json[0]
502         self.maxDiff = None
503         self.assertEqual(output['platform'], 'platform1')
504         self.assertEqual(output['buildNumber'], 123)
505         self.assertEqual(output['buildTime'], '2013-02-08T15:19:37.460000')
506         self.assertEqual(output['builderName'], 'builder1')
507         self.assertEqual(output['builderKey'], 'value1')
508         self.assertEqual(output['revisions'], {'WebKit': {'revision': '5678', 'timestamp': '2013-02-01 08:48:05 +0000'}})
509         self.assertEqual(list(output['tests'].keys()), ['Bindings', 'Parser'])
510         self.assertEqual(sorted(output['tests']['Bindings'].keys()), ['tests', 'url'])
511         self.assertEqual(output['tests']['Bindings']['url'], 'https://trac.webkit.org/browser/trunk/PerformanceTests/Bindings')
512         self.assertEqual(list(output['tests']['Bindings']['tests'].keys()), ['event-target-wrapper'])
513         self.assertEqual(output['tests']['Bindings']['tests']['event-target-wrapper'], {
514             'url': 'https://trac.webkit.org/browser/trunk/PerformanceTests/Bindings/event-target-wrapper.html',
515             'metrics': {'Time': {'current': [[1486.0, 1471.0, 1510.0, 1505.0, 1478.0, 1490.0]] * 4}}})
516
517     def test_run_with_repeat(self):
518         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
519             '--test-results-server=some.host', '--repeat', '5'])
520         self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True, repeat=5)
521         self.assertEqual(self._load_output_json(runner), [
522             {"buildTime": "2013-02-08T15:19:37.460000",
523             "tests": self._event_target_wrapper_and_inspector_results,
524             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
525             {"buildTime": "2013-02-08T15:19:37.460000",
526             "tests": self._event_target_wrapper_and_inspector_results,
527             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
528             {"buildTime": "2013-02-08T15:19:37.460000",
529             "tests": self._event_target_wrapper_and_inspector_results,
530             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
531             {"buildTime": "2013-02-08T15:19:37.460000",
532             "tests": self._event_target_wrapper_and_inspector_results,
533             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
534             {"buildTime": "2013-02-08T15:19:37.460000",
535             "tests": self._event_target_wrapper_and_inspector_results,
536             "revisions": {"WebKit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
537
538     def test_run_with_test_runner_count(self):
539         runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
540             '--test-runner-count=3'])
541         self._test_run_with_json_output(runner, port.host.filesystem, compare_logs=False)
542         generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
543         self.assertTrue(isinstance(generated_json, list))
544         self.assertEqual(len(generated_json), 1)
545
546         output = generated_json[0]['tests']['Bindings']['tests']['event-target-wrapper']['metrics']['Time']['current']
547         self.assertEqual(len(output), 3)
548         expectedMetrics = EventTargetWrapperTestData.results['metrics']['Time']['current'][0]
549         for metrics in output:
550             self.assertEqual(metrics, expectedMetrics)