REGRESSION: run-perf-tests no longer reports the total test time
authorrniwa@webkit.org <rniwa@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 19 Sep 2012 22:48:03 +0000 (22:48 +0000)
committerrniwa@webkit.org <rniwa@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 19 Sep 2012 22:48:03 +0000 (22:48 +0000)
https://bugs.webkit.org/show_bug.cgi?id=97138

Reviewed by Tony Chang.

Report the finished time as a info-level log as opposed to a debug level log.

* Scripts/webkitpy/performance_tests/perftestsrunner.py:
(PerfTestsRunner._run_single_test):
* Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
(MainTest.normalizeFinishedTime): Added. It replaces all finished times by 0.1 seconds.
(test_run_test_pause_before_testing):
(test_run_test_set_for_parser_tests):
(test_run_memory_test):
(_test_run_with_json_output):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@129055 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Tools/ChangeLog
Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py
Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py

index a8bf585..7b2d851 100644 (file)
@@ -1,3 +1,21 @@
+2012-09-19  Ryosuke Niwa  <rniwa@webkit.org>
+
+        REGRESSION: run-perf-tests no longer reports the total test time
+        https://bugs.webkit.org/show_bug.cgi?id=97138
+
+        Reviewed by Tony Chang.
+
+        Report the finished time as a info-level log as opposed to a debug level log.
+
+        * Scripts/webkitpy/performance_tests/perftestsrunner.py:
+        (PerfTestsRunner._run_single_test):
+        * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
+        (MainTest.normalizeFinishedTime): Added. It replaces all finished times by 0.1 seconds.
+        (test_run_test_pause_before_testing):
+        (test_run_test_set_for_parser_tests):
+        (test_run_memory_test):
+        (_test_run_with_json_output):
+
 2012-09-19  Dirk Pranke  <dpranke@chromium.org>
 
         Support new TestExpectations format alongside old one
index 34df002..1ef3cf0 100755 (executable)
@@ -317,6 +317,6 @@ class PerfTestsRunner(object):
         else:
             _log.error('FAILED')
 
-        _log.debug("Finished: %f s" % (time.time() - start_time))
+        _log.info("Finished: %f s" % (time.time() - start_time))
 
         return new_results != None
index 6653857..4ca9500 100755 (executable)
@@ -31,6 +31,7 @@
 
 import StringIO
 import json
+import re
 import unittest
 
 from webkitpy.common.host_mock import MockHost
@@ -48,6 +49,9 @@ class MainTest(unittest.TestCase):
     def assertWritten(self, stream, contents):
         self.assertEquals(stream.buflist, contents)
 
+    def normalizeFinishedTime(self, log):
+        return re.sub(r'Finished: [0-9\.]+ s', 'Finished: 0.1 s', log)
+
     class TestDriver:
         def run_test(self, driver_input, stop_when_done):
             text = ''
@@ -234,7 +238,8 @@ max 548000 bytes
         finally:
             stdout, stderr, log = output.restore_output()
         self.assertEqual(stderr, "Ready to run test?\n")
-        self.assertEqual(log, "Running inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\n\n")
+        self.assertEqual(self.normalizeFinishedTime(log),
+            "Running inspector/pass.html (1 of 1)\nRESULT group_name: test_name= 42 ms\nFinished: 0.1 s\n\n")
 
     def test_run_test_set_for_parser_tests(self):
         runner, port = self.create_runner()
@@ -246,13 +251,15 @@ max 548000 bytes
         finally:
             stdout, stderr, log = output.restore_output()
         self.assertEqual(unexpected_result_count, 0)
-        self.assertEqual(log, '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
+        self.assertEqual(self.normalizeFinishedTime(log), '\n'.join(['Running Bindings/event-target-wrapper.html (1 of 2)',
         'RESULT Bindings: event-target-wrapper= 1489.05 ms',
         'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
+        'Finished: 0.1 s',
         '',
         'Running Parser/some-parser.html (2 of 2)',
         'RESULT Parser: some-parser= 1100.0 ms',
         'median= 1101.0 ms, stdev= 11.0 ms, min= 1080.0 ms, max= 1120.0 ms',
+        'Finished: 0.1 s',
         '', '']))
 
     def test_run_memory_test(self):
@@ -267,7 +274,7 @@ max 548000 bytes
         finally:
             stdout, stderr, log = output.restore_output()
         self.assertEqual(unexpected_result_count, 0)
-        self.assertEqual(log, '\n'.join([
+        self.assertEqual(self.normalizeFinishedTime(log), '\n'.join([
             'Running 1 tests',
             'Running Parser/memory-test.html (1 of 1)',
             'RESULT Parser: memory-test= 1100.0 ms',
@@ -276,6 +283,7 @@ max 548000 bytes
             'median= 829000.0 bytes, stdev= 15000.0 bytes, min= 811000.0 bytes, max= 848000.0 bytes',
             'RESULT Parser: memory-test: Malloc= 532000.0 bytes',
             'median= 529000.0 bytes, stdev= 13000.0 bytes, min= 511000.0 bytes, max= 548000.0 bytes',
+            'Finished: 0.1 s',
             '', '']))
         results = runner.load_output_json()[0]['results']
         self.assertEqual(results['Parser/memory-test'], {'min': 1080.0, 'max': 1120.0, 'median': 1101.0, 'stdev': 11.0, 'avg': 1100.0, 'unit': 'ms'})
@@ -304,14 +312,16 @@ max 548000 bytes
             stdout, stderr, logs = output_capture.restore_output()
 
         if not expected_exit_code:
-            self.assertEqual(logs, '\n'.join([
-                'Running 2 tests',
+            self.assertEqual(self.normalizeFinishedTime(logs),
+                '\n'.join(['Running 2 tests',
                 'Running Bindings/event-target-wrapper.html (1 of 2)',
                 'RESULT Bindings: event-target-wrapper= 1489.05 ms',
                 'median= 1487.0 ms, stdev= 14.46 ms, min= 1471.0 ms, max= 1510.0 ms',
+                'Finished: 0.1 s',
                 '',
                 'Running inspector/pass.html (2 of 2)',
                 'RESULT group_name: test_name= 42 ms',
+                'Finished: 0.1 s',
                 '',
                 '']))