run-perf-tests should support --test-results-server option
authorrniwa@webkit.org <rniwa@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 20 Jan 2012 22:17:30 +0000 (22:17 +0000)
committerrniwa@webkit.org <rniwa@webkit.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 20 Jan 2012 22:17:30 +0000 (22:17 +0000)
https://bugs.webkit.org/show_bug.cgi?id=76680

Reviewed by Adam Barth.

Add --test-results-server, --builder-name, and --build-number options to run-perf-tests
to be used by perf bots. Also refactor file_uploader as needed.

* Scripts/webkitpy/common/net/file_uploader.py:
(FileUploader.__init__):
(FileUploader.upload_single_file):
(FileUploader.upload_as_multipart_form_data):
(FileUploader):
(FileUploader._upload_data):
(FileUploader._upload_data.callback):
* Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py:
(JSONResultsGeneratorBase.upload_json_files):
* Scripts/webkitpy/performance_tests/perftestsrunner.py:
(PerfTestsRunner):
(PerfTestsRunner._parse_args):
(PerfTestsRunner.run):
(PerfTestsRunner._generate_json):
(PerfTestsRunner._upload_json):
(PerfTestsRunner._run_tests_set):
* Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
(create_runner):
(test_run_with_upload_json):
(test_run_with_upload_json.mock_upload_json):
(test_upload_json):
(test_upload_json.MockFileUploader):
(test_upload_json.MockFileUploader.__init__):
(test_upload_json.MockFileUploader.upload_single_file):
(test_parse_args):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@105543 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Tools/ChangeLog
Tools/Scripts/webkitpy/common/net/file_uploader.py
Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py
Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py
Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py

index 8f94bd4e2ab577178f20f9b635f4da04a93d37c3..d6ead10885aef7003d6bed42580b90866801b5cd 100644 (file)
@@ -1,3 +1,39 @@
+2012-01-19  Ryosuke Niwa  <rniwa@webkit.org>
+
+        run-perf-tests should support --test-results-server option
+        https://bugs.webkit.org/show_bug.cgi?id=76680
+
+        Reviewed by Adam Barth.
+
+        Add --test-results-server, --builder-name, and --build-number options to run-perf-tests
+        to be used by perf bots. Also refactor file_uploader as needed.
+
+        * Scripts/webkitpy/common/net/file_uploader.py:
+        (FileUploader.__init__):
+        (FileUploader.upload_single_file):
+        (FileUploader.upload_as_multipart_form_data):
+        (FileUploader):
+        (FileUploader._upload_data):
+        (FileUploader._upload_data.callback):
+        * Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py:
+        (JSONResultsGeneratorBase.upload_json_files):
+        * Scripts/webkitpy/performance_tests/perftestsrunner.py:
+        (PerfTestsRunner):
+        (PerfTestsRunner._parse_args):
+        (PerfTestsRunner.run):
+        (PerfTestsRunner._generate_json):
+        (PerfTestsRunner._upload_json):
+        (PerfTestsRunner._run_tests_set):
+        * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
+        (create_runner):
+        (test_run_with_upload_json):
+        (test_run_with_upload_json.mock_upload_json):
+        (test_upload_json):
+        (test_upload_json.MockFileUploader):
+        (test_upload_json.MockFileUploader.__init__):
+        (test_upload_json.MockFileUploader.upload_single_file):
+        (test_parse_args):
+
 2012-01-20  Adam Barth  <abarth@webkit.org>
 
         The party time image overlaps real content!  This patch makes the
index 66e49e8667b045af47f1275dc5aef9292cafd95d..6278e1c8c4b30277e49de487da199847a28b326b 100644 (file)
@@ -84,31 +84,32 @@ def _encode_multipart_form_data(fields, files):
 
 
 class FileUploader(object):
-    def __init__(self, url):
+    def __init__(self, url, timeout_seconds):
         self._url = url
+        self._timeout_seconds = timeout_seconds
 
-    def _upload_files(self, attrs, file_objs):
-        # FIXME: We should use the same variable names for the formal and actual parameters.
-        content_type, data = _encode_multipart_form_data(attrs, file_objs)
-        headers = {
-            "Content-Type": content_type,
-        }
-        # FIXME: We should talk to the network via a Host object.
-        request = urllib2.Request(self._url, data, headers)
-        urllib2.urlopen(request)
-
-    def upload(self, params, files, timeout_seconds):
+    def upload_single_text_file(self, filesystem, content_type, filename):
+        self._upload_data(content_type, filesystem.read_text_file(filename))
+
+    def upload_as_multipart_form_data(self, filesystem, files, params, timeout_seconds):
         file_objs = []
         for filename, path in files:
             # FIXME: We should talk to the filesytem via a Host object.
-            with codecs.open(path, "rb") as file:
-                file_objs.append(('file', filename, file.read()))
+            file_objs.append(('file', filename, filesystem.read_text_file(filename)))
+
+        # FIXME: We should use the same variable names for the formal and actual parameters.
+        content_type, data = _encode_multipart_form_data(attrs, file_objs)
+        self._upload_data(content_type, data)
+
+    def _upload_data(self, content_type, data):
+        def callback():
+            request = urllib2.Request(self._url, data, {"Content-Type": content_type})
+            urllib2.urlopen(request)
 
         orig_timeout = socket.getdefaulttimeout()
         try:
             # FIXME: We shouldn't mutate global static state.
-            socket.setdefaulttimeout(timeout_seconds)
-            NetworkTransaction(timeout_seconds=timeout_seconds).run(
-                lambda: self._upload_files(params, file_objs))
+            socket.setdefaulttimeout(self._timeout_seconds)
+            NetworkTransaction(timeout_seconds=self._timeout_seconds).run(callback)
         finally:
             socket.setdefaulttimeout(orig_timeout)
index 79022247169ae3c9e16acb9d5f54fe737a46879d..b073d409ecf1719f5822f38db055a9da1c79ac81 100644 (file)
@@ -321,11 +321,11 @@ class JSONResultsGeneratorBase(object):
             for file in json_files]
 
         url = "http://%s/testfile/upload" % self._test_results_server
-        uploader = FileUploader(url)
+        # Set uploading timeout in case appengine server is having problems.
+        # 120 seconds are more than enough to upload test results.
+        uploader = FileUploader(url, 120)
         try:
-            # Set uploading timeout in case appengine server is having problem.
-            # 120 seconds are more than enough to upload test results.
-            uploader.upload(attrs, files, 120)
+            uploader.upload_as_multipart_form_data(self._filesystem, files, attrs)
         except Exception, err:
             _log.error("Upload failed: %s" % err)
             return
index ceda74e28a4eda2256baf05f70a9c732ad0b76ba..7b2fde35ee936440f66a01aa8b00f622ae8b2921 100644 (file)
@@ -38,6 +38,7 @@ import time
 
 from webkitpy.common import find_files
 from webkitpy.common.host import Host
+from webkitpy.common.net.file_uploader import FileUploader
 from webkitpy.layout_tests.port.driver import DriverInput
 from webkitpy.layout_tests.views import printing
 
@@ -47,6 +48,10 @@ _log = logging.getLogger(__name__)
 class PerfTestsRunner(object):
     _perf_tests_base_dir = 'PerformanceTests'
     _test_directories_for_chromium_style_tests = ['inspector']
+    _default_branch = 'webkit-trunk'
+    _EXIT_CODE_BAD_BUILD = -1
+    _EXIT_CODE_BAD_JSON = -2
+    _EXIT_CODE_FAILED_UPLOADING = -3
 
     def __init__(self, regular_output=sys.stderr, buildbot_output=sys.stdout, args=None, port=None):
         self._buildbot_output = buildbot_output
@@ -70,21 +75,27 @@ class PerfTestsRunner(object):
 
         perf_option_list = [
             optparse.make_option('--debug', action='store_const', const='Debug', dest="configuration",
-                                 help='Set the configuration to Debug'),
+                help='Set the configuration to Debug'),
             optparse.make_option('--release', action='store_const', const='Release', dest="configuration",
-                                 help='Set the configuration to Release'),
+                help='Set the configuration to Release'),
             optparse.make_option("--platform",
-                                 help="Specify port/platform being tested (i.e. chromium-mac)"),
+                help="Specify port/platform being tested (i.e. chromium-mac)"),
+            optparse.make_option("--builder-name",
+                help=("The name of the builder shown on the waterfall running this script e.g. google-mac-2.")),
+            optparse.make_option("--build-number",
+                help=("The build number of the builder running this script.")),
             optparse.make_option("--build", dest="build", action="store_true", default=True,
-                                help="Check to ensure the DumpRenderTree build is up-to-date (default)."),
+                help="Check to ensure the DumpRenderTree build is up-to-date (default)."),
             optparse.make_option("--build-directory",
-                                 help="Path to the directory under which build files are kept (should not include configuration)"),
+                help="Path to the directory under which build files are kept (should not include configuration)"),
             optparse.make_option("--time-out-ms", default=600 * 1000,
-                                 help="Set the timeout for each test"),
+                help="Set the timeout for each test"),
             optparse.make_option("--output-json-path",
-                                 help="Filename of the JSON file that summaries the results"),
+                help="Filename of the JSON file that summaries the results"),
             optparse.make_option("--source-json-path",
-                                 help="Path to a JSON file to be merged into the JSON file when --output-json-path is specified"),
+                help="Path to a JSON file to be merged into the JSON file when --output-json-path is present"),
+            optparse.make_option("--test-results-server",
+                help="Upload the generated JSON file to the specified server when --output-json-path is present"),
             ]
 
         option_list = (perf_option_list + print_options)
@@ -108,7 +119,7 @@ class PerfTestsRunner(object):
 
         if not self._port.check_build(needs_http=False):
             _log.error("Build not up to date for %s" % self._port._path_to_driver())
-            return -1
+            return self._EXIT_CODE_BAD_BUILD
 
         # We wrap any parts of the run that are slow or likely to raise exceptions
         # in a try/finally to ensure that we clean up the logging configuration.
@@ -119,36 +130,58 @@ class PerfTestsRunner(object):
         finally:
             self._printer.cleanup()
 
-        if not self._generate_json_if_specified(self._timestamp) and not unexpected:
-            return -2
+        options = self._options
+        if self._options.output_json_path:
+            # FIXME: Add --branch or auto-detect the branch we're in
+            test_results_server = options.test_results_server
+            branch = self._default_branch if test_results_server else None
+            build_number = int(options.build_number) if options.build_number else None
+            if not self._generate_json(self._timestamp, options.output_json_path, options.source_json_path,
+                branch, options.platform, options.builder_name, build_number) and not unexpected:
+                return self._EXIT_CODE_BAD_JSON
+            if test_results_server and not self._upload_json(test_results_server, options.output_json_path):
+                return self._EXIT_CODE_FAILED_UPLOADING
 
         return unexpected
 
-    def _generate_json_if_specified(self, timestamp):
-        output_json_path = self._options.output_json_path
-        if not output_json_path:
-            return True
-
+    def _generate_json(self, timestamp, output_json_path, source_json_path, branch, platform, builder_name, build_number):
         revision = self._host.scm().head_svn_revision()
         contents = {'timestamp': int(timestamp), 'revision': revision, 'results': self._results}
 
+        for key, value in {'branch': branch, 'platform': platform, 'builder-name': builder_name, 'build-number': build_number}.items():
+            if value:
+                contents[key] = value
+
         filesystem = self._host.filesystem
-        source_json_path = self._options.source_json_path
         if source_json_path:
             try:
                 source_json_file = filesystem.open_text_file_for_reading(source_json_path)
                 source_json = json.load(source_json_file)
-            except:
-                _log.error("Failed to parse %s" % source_json_path)
-                return False
-            if not isinstance(source_json, dict):
-                _log.error("The source JSON was not a dictionary")
+                contents = dict(source_json.items() + contents.items())
+                succeeded = True
+            except IOError, error:
+                _log.error("Failed to read %s: %s" % (source_json_path, error))
+            except ValueError, error:
+                _log.error("Failed to parse %s: %s" % (source_json_path, error))
+            except TypeError, error:
+                _log.error("Failed to merge JSON files: %s" % error)
+            if not succeeded:
                 return False
-            contents = dict(source_json.items() + contents.items())
 
         filesystem.write_text_file(output_json_path, json.dumps(contents))
         return True
 
+    def _upload_json(self, test_results_server, json_path, file_uploader=FileUploader):
+        uploader = file_uploader("https://%s/api/test/report" % test_results_server, 120)
+        try:
+            uploader.upload_single_text_file(self._host.filesystem, 'application/json', json_path)
+        except Exception, error:
+            _log.error("Failed to upload JSON file in 120s: %s" % error)
+            return False
+
+        self._printer.write("JSON file uploaded.")
+        return True
+
     def _print_status(self, tests, expected, unexpected):
         if len(tests) == expected + unexpected:
             status = "Ran %d tests" % len(tests)
@@ -167,7 +200,7 @@ class PerfTestsRunner(object):
 
         for test in tests:
             if driver_need_restart:
-                _log.debug("%s killing driver" % test)
+                _log.error("%s killing driver" % test)
                 driver.stop()
                 driver = None
             if not driver:
index f6e3e662ec50187a0c98577bf3a69ef204446648..6cb355fcf14f18d83469cd66751a93bfbd346183 100755 (executable)
@@ -102,9 +102,9 @@ max 1120
         def stop(self):
             """do nothing"""
 
-    def create_runner(self, buildbot_output=None, args=[]):
+    def create_runner(self, buildbot_output=None, args=[], regular_output=None):
         buildbot_output = buildbot_output or array_stream.ArrayStream()
-        regular_output = array_stream.ArrayStream()
+        regular_output = regular_output or array_stream.ArrayStream()
 
         options, parsed_args = PerfTestsRunner._parse_args(args)
         test_port = TestPort(host=MockHost(), options=options)
@@ -212,6 +212,63 @@ max 1120
             "revision": 1234,
             "key": "value"})
 
+    def test_run_with_upload_json(self):
+        runner = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
+            '--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
+        upload_json_is_called = [False]
+        upload_json_returns_true = True
+
+        def mock_upload_json(hostname, json_path):
+            self.assertEqual(hostname, 'some.host')
+            self.assertEqual(json_path, '/mock-checkout/output.json')
+            upload_json_is_called[0] = True
+            return upload_json_returns_true
+
+        runner._upload_json = mock_upload_json
+        runner._host.filesystem.files['/mock-checkout/source.json'] = '{"key": "value"}'
+        runner._host.filesystem.files[runner._base_path + '/inspector/pass.html'] = True
+        runner._host.filesystem.files[runner._base_path + '/Bindings/event-target-wrapper.html'] = True
+        runner._timestamp = 123456789
+        self.assertEqual(runner.run(), 0)
+        self.assertEqual(upload_json_is_called[0], True)
+        generated_json = json.loads(runner._host.filesystem.files['/mock-checkout/output.json'])
+        self.assertEqual(generated_json['platform'], 'platform1')
+        self.assertEqual(generated_json['builder-name'], 'builder1')
+        self.assertEqual(generated_json['build-number'], 123)
+        upload_json_returns_true = False
+        self.assertEqual(runner.run(), -3)
+
+    def test_upload_json(self):
+        regular_output = array_stream.ArrayStream()
+        runner = self.create_runner(buildbot_output=regular_output)
+        runner._host.filesystem.files['/mock-checkout/some.json'] = 'some content'
+
+        called = []
+        upload_single_text_file_throws = False
+
+        class MockFileUploader:
+            def __init__(mock, url, timeout):
+                self.assertEqual(url, 'https://some.host/api/test/report')
+                self.assertTrue(isinstance(timeout, int) and timeout)
+                called.append('FileUploader')
+
+            def upload_single_text_file(mock, filesystem, content_type, filename):
+                self.assertEqual(filesystem, runner._host.filesystem)
+                self.assertEqual(content_type, 'application/json')
+                self.assertEqual(filename, 'some.json')
+                called.append('upload_single_text_file')
+                if upload_single_text_file_throws:
+                    raise "Some exception"
+
+        runner._upload_json('some.host', 'some.json', MockFileUploader)
+        self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
+
+        # Throwing an exception upload_single_text_file shouldn't blow up _upload_json
+        called = []
+        upload_single_text_file_throws = True
+        runner._upload_json('some.host', 'some.json', MockFileUploader)
+        self.assertEqual(called, ['FileUploader', 'upload_single_text_file'])
+
     def test_collect_tests(self):
         runner = self.create_runner()
         filename = runner._host.filesystem.join(runner._base_path, 'inspector', 'a_file.html')
@@ -242,20 +299,26 @@ max 1120
                 '--verbose',
                 '--build-directory=folder42',
                 '--platform=platform42',
+                '--builder-name', 'webkit-mac-1',
+                '--build-number=56',
                 '--time-out-ms=42',
                 '--output-json-path=a/output.json',
                 '--source-json-path=a/source.json',
+                '--test-results-server=somehost',
                 '--debug', 'an_arg'])
         self.assertEqual(options.build, True)
         self.assertEqual(options.verbose, True)
         self.assertEqual(options.help_printing, None)
         self.assertEqual(options.build_directory, 'folder42')
         self.assertEqual(options.platform, 'platform42')
+        self.assertEqual(options.builder_name, 'webkit-mac-1')
+        self.assertEqual(options.build_number, '56')
         self.assertEqual(options.time_out_ms, '42')
         self.assertEqual(options.configuration, 'Debug')
         self.assertEqual(options.print_options, None)
         self.assertEqual(options.output_json_path, 'a/output.json')
         self.assertEqual(options.source_json_path, 'a/source.json')
+        self.assertEqual(options.test_results_server, 'somehost')
 
 
 if __name__ == '__main__':