Run http tests parallel
authorap@apple.com <ap@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 5 Dec 2014 01:37:02 +0000 (01:37 +0000)
committerap@apple.com <ap@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 5 Dec 2014 01:37:02 +0000 (01:37 +0000)
https://bugs.webkit.org/show_bug.cgi?id=138958

Reviewed by Daniel Bates.

Remove the concept of "locked shard". Now http tests are just like any other tests.
We start HTTP and WebSocket servers at the start if we need them, and terminate them
when done with all the tests (not when the last http test runs, which is unnecessarily
unpredictable).

This makes debug tests run in 8 minutes and 12 seconds on my Mac Pro. Without the
patch, they used to take over 15 minutes.

As part of the fix, we no longer pass the number of servers to Apache. I don't
think that these parameters did what we wanted them to do; Apache handles the load
just fine without them.

The change applies to all platforms. I fixed everything I could find on Mac, and
Ossy told me that he's been running http tests in parallel for a long time. If
there is increased instability for some ports, it will need to be fixed - there is
generally nothing special about http tests at this point, and most code is
cross-platform in WebKit2.

* Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py:
(LayoutTestRunner.__init__):
(LayoutTestRunner.run_tests):
(LayoutTestRunner.start_servers_with_lock):
(LayoutTestRunner._handle_started_test):
(Worker.handle):
(Sharder.__init__):
(Sharder.shard_tests):
(Sharder._shard_every_file):
(Sharder._shard_by_directory):
(LayoutTestRunner._handle_finished_test_list): Deleted.
(LayoutTestRunner._handle_finished_test_list.find): Deleted.
(Sharder._shard_in_two): Deleted.
(Sharder): Deleted.
(Sharder._resize_shards): Deleted.
(Sharder._resize_shards.divide_and_round_up): Deleted.
(Sharder._resize_shards.extract_and_flatten): Deleted.
(Sharder._resize_shards.split_at): Deleted.
* Scripts/webkitpy/layout_tests/controllers/layout_test_runner_unittest.py:
(FakePrinter.print_workers_and_shards):
(LayoutTestRunnerTests.test_servers_started.start_http_server):
(LayoutTestRunnerTests.test_servers_started):
(SharderTests.get_shards):
(SharderTests.test_shard_by_dir):
(SharderTests.test_shard_every_file):
(SharderTests): Deleted.
(SharderTests.test_shard_in_two): Deleted.
(SharderTests.test_shard_in_two_has_no_locked_shards): Deleted.
(SharderTests.test_shard_in_two_has_no_unlocked_shards): Deleted.
(SharderTests.test_multiple_locked_shards): Deleted.
* Scripts/webkitpy/layout_tests/controllers/manager_unittest.py:
(ManagerTest.test_needs_servers.get_manager):
(ManagerTest.integration_test_needs_servers.get_manager):
(ManagerTest.test_look_for_new_crash_logs.get_manager):
(ManagerTest):
* Scripts/webkitpy/layout_tests/run_webkit_tests.py:
(_set_up_derived_options):
* Scripts/webkitpy/layout_tests/run_webkit_tests_integrationtest.py:
(RunTest.test_batch_size):
(RunTest.test_max_locked_shards): Deleted.
* Scripts/webkitpy/layout_tests/servers/apache_http_server.py:
(LayoutTestApacheHttpd.__init__):
* Scripts/webkitpy/layout_tests/servers/apache_http_server_unittest.py:
(TestLayoutTestApacheHttpd.test_start_cmd):
* Scripts/webkitpy/layout_tests/servers/http_server.py:
(Lighttpd.__init__):
* Scripts/webkitpy/layout_tests/servers/http_server_base.py:
(HttpServerBase.__init__):
* Scripts/webkitpy/layout_tests/views/printing.py:
(Printer.print_workers_and_shards):
* Scripts/webkitpy/port/base.py:
(Port.default_child_processes):
(Port.to.start_http_server):
(Port.default_max_locked_shards): Deleted.
* Scripts/webkitpy/port/port_testcase.py:
(PortTestCase.make_port):
(PortTestCase.test_default_max_locked_shards): Deleted.
* Scripts/webkitpy/port/test.py:
(TestPort.start_http_server):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@176830 268f45cc-cd09-0410-ab3c-d52691b4dbfc

14 files changed:
Tools/ChangeLog
Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py
Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner_unittest.py
Tools/Scripts/webkitpy/layout_tests/controllers/manager_unittest.py
Tools/Scripts/webkitpy/layout_tests/run_webkit_tests.py
Tools/Scripts/webkitpy/layout_tests/run_webkit_tests_integrationtest.py
Tools/Scripts/webkitpy/layout_tests/servers/apache_http_server.py
Tools/Scripts/webkitpy/layout_tests/servers/apache_http_server_unittest.py
Tools/Scripts/webkitpy/layout_tests/servers/http_server.py
Tools/Scripts/webkitpy/layout_tests/servers/http_server_base.py
Tools/Scripts/webkitpy/layout_tests/views/printing.py
Tools/Scripts/webkitpy/port/base.py
Tools/Scripts/webkitpy/port/port_testcase.py
Tools/Scripts/webkitpy/port/test.py

index 25e9e0708ec9bb90ac47a1f97f9ad03f6fc05b9c..62f3cd86f9d68a9dd6780b6f24c3c8267d2430ed 100644 (file)
@@ -1,3 +1,88 @@
+2014-12-04  Alexey Proskuryakov  <ap@apple.com>
+
+        Run http tests parallel
+        https://bugs.webkit.org/show_bug.cgi?id=138958
+
+        Reviewed by Daniel Bates.
+
+        Remove the concept of "locked shard". Now http tests are just like any other tests.
+        We start HTTP and WebSocket servers at the start if we need them, and terminate them
+        when done with all the tests (not when the last http test runs, which is unnecessarily
+        unpredictable).
+
+        This makes debug tests run in 8 minutes and 12 seconds on my Mac Pro. Without the
+        patch, they used to take over 15 minutes.
+
+        As part of the fix, we no longer pass the number of servers to Apache. I don't
+        think that these parameters did what we wanted them to do; Apache handles the load
+        just fine without them.
+
+        The change applies to all platforms. I fixed everything I could find on Mac, and
+        Ossy told me that he's been running http tests in parallel for a long time. If
+        there is increased instability for some ports, it will need to be fixed - there is
+        generally nothing special about http tests at this point, and most code is
+        cross-platform in WebKit2.
+
+        * Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py:
+        (LayoutTestRunner.__init__):
+        (LayoutTestRunner.run_tests):
+        (LayoutTestRunner.start_servers_with_lock):
+        (LayoutTestRunner._handle_started_test):
+        (Worker.handle):
+        (Sharder.__init__):
+        (Sharder.shard_tests):
+        (Sharder._shard_every_file):
+        (Sharder._shard_by_directory):
+        (LayoutTestRunner._handle_finished_test_list): Deleted.
+        (LayoutTestRunner._handle_finished_test_list.find): Deleted.
+        (Sharder._shard_in_two): Deleted.
+        (Sharder): Deleted.
+        (Sharder._resize_shards): Deleted.
+        (Sharder._resize_shards.divide_and_round_up): Deleted.
+        (Sharder._resize_shards.extract_and_flatten): Deleted.
+        (Sharder._resize_shards.split_at): Deleted.
+        * Scripts/webkitpy/layout_tests/controllers/layout_test_runner_unittest.py:
+        (FakePrinter.print_workers_and_shards):
+        (LayoutTestRunnerTests.test_servers_started.start_http_server):
+        (LayoutTestRunnerTests.test_servers_started):
+        (SharderTests.get_shards):
+        (SharderTests.test_shard_by_dir):
+        (SharderTests.test_shard_every_file):
+        (SharderTests): Deleted.
+        (SharderTests.test_shard_in_two): Deleted.
+        (SharderTests.test_shard_in_two_has_no_locked_shards): Deleted.
+        (SharderTests.test_shard_in_two_has_no_unlocked_shards): Deleted.
+        (SharderTests.test_multiple_locked_shards): Deleted.
+        * Scripts/webkitpy/layout_tests/controllers/manager_unittest.py:
+        (ManagerTest.test_needs_servers.get_manager):
+        (ManagerTest.integration_test_needs_servers.get_manager):
+        (ManagerTest.test_look_for_new_crash_logs.get_manager):
+        (ManagerTest):
+        * Scripts/webkitpy/layout_tests/run_webkit_tests.py:
+        (_set_up_derived_options):
+        * Scripts/webkitpy/layout_tests/run_webkit_tests_integrationtest.py:
+        (RunTest.test_batch_size):
+        (RunTest.test_max_locked_shards): Deleted.
+        * Scripts/webkitpy/layout_tests/servers/apache_http_server.py:
+        (LayoutTestApacheHttpd.__init__):
+        * Scripts/webkitpy/layout_tests/servers/apache_http_server_unittest.py:
+        (TestLayoutTestApacheHttpd.test_start_cmd):
+        * Scripts/webkitpy/layout_tests/servers/http_server.py:
+        (Lighttpd.__init__):
+        * Scripts/webkitpy/layout_tests/servers/http_server_base.py:
+        (HttpServerBase.__init__):
+        * Scripts/webkitpy/layout_tests/views/printing.py:
+        (Printer.print_workers_and_shards):
+        * Scripts/webkitpy/port/base.py:
+        (Port.default_child_processes):
+        (Port.to.start_http_server):
+        (Port.default_max_locked_shards): Deleted.
+        * Scripts/webkitpy/port/port_testcase.py:
+        (PortTestCase.make_port):
+        (PortTestCase.test_default_max_locked_shards): Deleted.
+        * Scripts/webkitpy/port/test.py:
+        (TestPort.start_http_server):
+
 2014-12-04  Alexey Proskuryakov  <ap@apple.com>
 
         Don't lock perf tests in run-webkit-tests
index 1355a15f8daf4a7bbd8e7665d58eb0038b09c28c..99a28e7c073183df002265c5eca186446a0275ce 100644 (file)
@@ -67,7 +67,7 @@ class LayoutTestRunner(object):
         self._printer = printer
         self._results_directory = results_directory
         self._test_is_slow = test_is_slow_fn
-        self._sharder = Sharder(self._port.split_test, self._options.max_locked_shards)
+        self._sharder = Sharder(self._port.split_test)
         self._filesystem = self._port.host.filesystem
 
         self._expectations = None
@@ -77,7 +77,6 @@ class LayoutTestRunner(object):
         self._retrying = False
 
         self._current_run_results = None
-        self._remaining_locked_shards = []
         self._has_http_lock = False
 
     def run_tests(self, expectations, test_inputs, tests_to_skip, num_workers, needs_http, needs_websockets, retrying):
@@ -90,7 +89,6 @@ class LayoutTestRunner(object):
         # FIXME: rename all variables to test_run_results or some such ...
         run_results = TestRunResults(self._expectations, len(test_inputs) + len(tests_to_skip))
         self._current_run_results = run_results
-        self._remaining_locked_shards = []
         self._has_http_lock = False
         self._printer.num_tests = len(test_inputs)
         self._printer.num_started = 0
@@ -104,23 +102,13 @@ class LayoutTestRunner(object):
             run_results.add(result, expected=True, test_is_slow=self._test_is_slow(test_name))
 
         self._printer.write_update('Sharding tests ...')
-        locked_shards, unlocked_shards = self._sharder.shard_tests(test_inputs, int(self._options.child_processes), self._options.fully_parallel)
+        all_shards = self._sharder.shard_tests(test_inputs, int(self._options.child_processes), self._options.fully_parallel)
 
-        # FIXME: We don't have a good way to coordinate the workers so that
-        # they don't try to run the shards that need a lock if we don't actually
-        # have the lock. The easiest solution at the moment is to grab the
-        # lock at the beginning of the run, and then run all of the locked
-        # shards first. This minimizes the time spent holding the lock, but
-        # means that we won't be running tests while we're waiting for the lock.
-        # If this becomes a problem in practice we'll need to change this.
-
-        all_shards = locked_shards + unlocked_shards
-        self._remaining_locked_shards = locked_shards
-        if locked_shards and self._options.http:
-            self.start_servers_with_lock(2 * min(num_workers, len(locked_shards)))
+        if self._needs_http and self._options.http:
+            self.start_servers_with_lock()
 
         num_workers = min(num_workers, len(all_shards))
-        self._printer.print_workers_and_shards(num_workers, len(all_shards), len(locked_shards))
+        self._printer.print_workers_and_shards(num_workers, len(all_shards))
 
         if self._options.dry_run:
             return run_results
@@ -197,12 +185,12 @@ class LayoutTestRunner(object):
 
         self._interrupt_if_at_failure_limits(run_results)
 
-    def start_servers_with_lock(self, number_of_servers):
+    def start_servers_with_lock(self):
         self._printer.write_update('Acquiring http lock ...')
         self._port.acquire_http_lock()
         if self._needs_http:
             self._printer.write_update('Starting HTTP server ...')
-            self._port.start_http_server(number_of_servers=number_of_servers)
+            self._port.start_http_server()
         if self._needs_websockets:
             self._printer.write_update('Starting WebSocket server ...')
             self._port.start_websocket_server()
@@ -229,19 +217,6 @@ class LayoutTestRunner(object):
     def _handle_started_test(self, worker_name, test_input, test_timeout_sec):
         self._printer.print_started_test(test_input.test_name)
 
-    def _handle_finished_test_list(self, worker_name, list_name):
-        def find(name, test_lists):
-            for i in range(len(test_lists)):
-                if test_lists[i].name == name:
-                    return i
-            return -1
-
-        index = find(list_name, self._remaining_locked_shards)
-        if index >= 0:
-            self._remaining_locked_shards.pop(index)
-            if not self._remaining_locked_shards:
-                self.stop_servers_with_lock()
-
     def _handle_finished_test(self, worker_name, result, log_messages=[]):
         self._update_summary_with_result(self._current_run_results, result)
 
@@ -281,7 +256,6 @@ class Worker(object):
         assert name == 'test_list'
         for test_input in test_inputs:
             self._run_test(test_input, test_list_name)
-        self._caller.post('finished_test_list', test_list_name)
 
     def _update_test_input(self, test_input):
         if test_input.reference_files is None:
@@ -452,9 +426,8 @@ class TestShard(object):
 
 
 class Sharder(object):
-    def __init__(self, test_split_fn, max_locked_shards):
+    def __init__(self, test_split_fn):
         self._split = test_split_fn
-        self._max_locked_shards = max_locked_shards
 
     def shard_tests(self, test_inputs, num_workers, fully_parallel):
         """Groups tests into batches.
@@ -462,64 +435,37 @@ class Sharder(object):
         continue to run together as most cross-tests dependencies tend to
         occur within the same directory.
         Return:
-            Two list of TestShards. The first contains tests that must only be
-            run under the server lock, the second can be run whenever.
+            A list of TestShards.
         """
 
         # FIXME: Move all of the sharding logic out of manager into its
         # own class or module. Consider grouping it with the chunking logic
         # in prepare_lists as well.
         if num_workers == 1:
-            return self._shard_in_two(test_inputs)
+            return [TestShard('all_tests', test_inputs)]
         elif fully_parallel:
             return self._shard_every_file(test_inputs)
         return self._shard_by_directory(test_inputs, num_workers)
 
-    def _shard_in_two(self, test_inputs):
-        """Returns two lists of shards, one with all the tests requiring a lock and one with the rest.
-
-        This is used when there's only one worker, to minimize the per-shard overhead."""
-        locked_inputs = []
-        unlocked_inputs = []
-        for test_input in test_inputs:
-            if test_input.needs_servers:
-                locked_inputs.append(test_input)
-            else:
-                unlocked_inputs.append(test_input)
-
-        locked_shards = []
-        unlocked_shards = []
-        if locked_inputs:
-            locked_shards = [TestShard('locked_tests', locked_inputs)]
-        if unlocked_inputs:
-            unlocked_shards = [TestShard('unlocked_tests', unlocked_inputs)]
-
-        return locked_shards, unlocked_shards
-
     def _shard_every_file(self, test_inputs):
-        """Returns two lists of shards, each shard containing a single test file.
+        """Returns a list of shards, each shard containing a single test file.
 
         This mode gets maximal parallelism at the cost of much higher flakiness."""
-        locked_shards = []
-        unlocked_shards = []
+        shards = []
         for test_input in test_inputs:
             # Note that we use a '.' for the shard name; the name doesn't really
             # matter, and the only other meaningful value would be the filename,
             # which would be really redundant.
-            if test_input.needs_servers:
-                locked_shards.append(TestShard('.', [test_input]))
-            else:
-                unlocked_shards.append(TestShard('.', [test_input]))
+            shards.append(TestShard('.', [test_input]))
 
-        return locked_shards, unlocked_shards
+        return shards
 
     def _shard_by_directory(self, test_inputs, num_workers):
-        """Returns two lists of shards, each shard containing all the files in a directory.
+        """Returns a lists of shards, each shard containing all the files in a directory.
 
         This is the default mode, and gets as much parallelism as we can while
         minimizing flakiness caused by inter-test dependencies."""
-        locked_shards = []
-        unlocked_shards = []
+        shards = []
         tests_by_dir = {}
         # FIXME: Given that the tests are already sorted by directory,
         # we can probably rewrite this to be clearer and faster.
@@ -530,53 +476,9 @@ class Sharder(object):
 
         for directory, test_inputs in tests_by_dir.iteritems():
             shard = TestShard(directory, test_inputs)
-            if test_inputs[0].needs_servers:
-                locked_shards.append(shard)
-            else:
-                unlocked_shards.append(shard)
+            shards.append(shard)
 
         # Sort the shards by directory name.
-        locked_shards.sort(key=lambda shard: shard.name)
-        unlocked_shards.sort(key=lambda shard: shard.name)
-
-        # Put a ceiling on the number of locked shards, so that we
-        # don't hammer the servers too badly.
-
-        # FIXME: For now, limit to one shard or set it
-        # with the --max-locked-shards. After testing to make sure we
-        # can handle multiple shards, we should probably do something like
-        # limit this to no more than a quarter of all workers, e.g.:
-        # return max(math.ceil(num_workers / 4.0), 1)
-        return (self._resize_shards(locked_shards, self._max_locked_shards, 'locked_shard'),
-                unlocked_shards)
-
-    def _resize_shards(self, old_shards, max_new_shards, shard_name_prefix):
-        """Takes a list of shards and redistributes the tests into no more
-        than |max_new_shards| new shards."""
-
-        # This implementation assumes that each input shard only contains tests from a
-        # single directory, and that tests in each shard must remain together; as a
-        # result, a given input shard is never split between output shards.
-        #
-        # Each output shard contains the tests from one or more input shards and
-        # hence may contain tests from multiple directories.
-
-        def divide_and_round_up(numerator, divisor):
-            return int(math.ceil(float(numerator) / divisor))
-
-        def extract_and_flatten(shards):
-            test_inputs = []
-            for shard in shards:
-                test_inputs.extend(shard.test_inputs)
-            return test_inputs
-
-        def split_at(seq, index):
-            return (seq[:index], seq[index:])
-
-        num_old_per_new = divide_and_round_up(len(old_shards), max_new_shards)
-        new_shards = []
-        remaining_shards = old_shards
-        while remaining_shards:
-            some_shards, remaining_shards = split_at(remaining_shards, num_old_per_new)
-            new_shards.append(TestShard('%s_%d' % (shard_name_prefix, len(new_shards) + 1), extract_and_flatten(some_shards)))
-        return new_shards
+        shards.sort(key=lambda shard: shard.name)
+
+        return shards
index ada144ad1a319996b8adc82835db57e54326ccf7..ccaefe4d43a2dd5893210b3b5fc2ffe1eafbd2b3 100644 (file)
@@ -51,7 +51,7 @@ class FakePrinter(object):
     def print_expected(self, run_results, get_tests_with_result_type):
         pass
 
-    def print_workers_and_shards(self, num_workers, num_shards, num_locked_shards):
+    def print_workers_and_shards(self, num_workers, num_shards):
         pass
 
     def print_started_test(self, test_name):
@@ -166,7 +166,7 @@ class LayoutTestRunnerTests(unittest.TestCase):
 
     def test_servers_started(self):
 
-        def start_http_server(number_of_servers=None):
+        def start_http_server():
             self.http_started = True
 
         def start_websocket_server():
@@ -189,7 +189,7 @@ class LayoutTestRunnerTests(unittest.TestCase):
         runner = self._runner(port=port)
         runner._needs_http = True
         runner._needs_websockets = False
-        runner.start_servers_with_lock(number_of_servers=4)
+        runner.start_servers_with_lock()
         self.assertEqual(self.http_started, True)
         self.assertEqual(self.websocket_started, False)
         runner.stop_servers_with_lock()
@@ -199,7 +199,7 @@ class LayoutTestRunnerTests(unittest.TestCase):
         self.http_started = self.http_stopped = self.websocket_started = self.websocket_stopped = False
         runner._needs_http = True
         runner._needs_websockets = True
-        runner.start_servers_with_lock(number_of_servers=4)
+        runner.start_servers_with_lock()
         self.assertEqual(self.http_started, True)
         self.assertEqual(self.websocket_started, True)
         runner.stop_servers_with_lock()
@@ -209,7 +209,7 @@ class LayoutTestRunnerTests(unittest.TestCase):
         self.http_started = self.http_stopped = self.websocket_started = self.websocket_stopped = False
         runner._needs_http = False
         runner._needs_websockets = False
-        runner.start_servers_with_lock(number_of_servers=4)
+        runner.start_servers_with_lock()
         self.assertEqual(self.http_started, False)
         self.assertEqual(self.websocket_started, False)
         runner.stop_servers_with_lock()
@@ -234,9 +234,9 @@ class SharderTests(unittest.TestCase):
     def get_test_input(self, test_file):
         return TestInput(test_file, needs_servers=(test_file.startswith('http')))
 
-    def get_shards(self, num_workers, fully_parallel, test_list=None, max_locked_shards=1):
+    def get_shards(self, num_workers, fully_parallel, test_list=None):
         port = TestPort(MockSystemHost())
-        self.sharder = Sharder(port.split_test, max_locked_shards)
+        self.sharder = Sharder(port.split_test)
         test_list = test_list or self.test_list
         return self.sharder.shard_tests([self.get_test_input(test) for test in test_list], num_workers, fully_parallel)
 
@@ -249,80 +249,27 @@ class SharderTests(unittest.TestCase):
                               expected_test_names)
 
     def test_shard_by_dir(self):
-        locked, unlocked = self.get_shards(num_workers=2, fully_parallel=False)
-
-        # Note that although there are tests in multiple dirs that need locks,
-        # they are crammed into a single shard in order to reduce the # of
-        # workers hitting the server at once.
-        self.assert_shards(locked,
-             [('locked_shard_1',
-               ['http/tests/security/view-source-no-refresh.html',
-                'http/tests/websocket/tests/unicode.htm',
-                'http/tests/websocket/tests/websocket-protocol-ignored.html',
-                'http/tests/xmlhttprequest/supported-xml-content-types.html'])])
-        self.assert_shards(unlocked,
+        result = self.get_shards(num_workers=2, fully_parallel=False)
+
+        self.assert_shards(result,
             [('animations', ['animations/keyframes.html']),
              ('dom/html/level2/html', ['dom/html/level2/html/HTMLAnchorElement03.html',
                                       'dom/html/level2/html/HTMLAnchorElement06.html']),
              ('fast/css', ['fast/css/display-none-inline-style-change-crash.html']),
+             ('http/tests/security', ['http/tests/security/view-source-no-refresh.html']),
+             ('http/tests/websocket/tests', ['http/tests/websocket/tests/unicode.htm', 'http/tests/websocket/tests/websocket-protocol-ignored.html']),
+             ('http/tests/xmlhttprequest', ['http/tests/xmlhttprequest/supported-xml-content-types.html']),
              ('ietestcenter/Javascript', ['ietestcenter/Javascript/11.1.5_4-4-c-1.html'])])
 
     def test_shard_every_file(self):
-        locked, unlocked = self.get_shards(num_workers=2, fully_parallel=True)
-        self.assert_shards(locked,
+        result = self.get_shards(num_workers=2, fully_parallel=True)
+        self.assert_shards(result,
             [('.', ['http/tests/websocket/tests/unicode.htm']),
+             ('.', ['animations/keyframes.html']),
              ('.', ['http/tests/security/view-source-no-refresh.html']),
              ('.', ['http/tests/websocket/tests/websocket-protocol-ignored.html']),
-             ('.', ['http/tests/xmlhttprequest/supported-xml-content-types.html'])]),
-        self.assert_shards(unlocked,
-            [('.', ['animations/keyframes.html']),
              ('.', ['fast/css/display-none-inline-style-change-crash.html']),
+             ('.', ['http/tests/xmlhttprequest/supported-xml-content-types.html']),
              ('.', ['dom/html/level2/html/HTMLAnchorElement03.html']),
              ('.', ['ietestcenter/Javascript/11.1.5_4-4-c-1.html']),
              ('.', ['dom/html/level2/html/HTMLAnchorElement06.html'])])
-
-    def test_shard_in_two(self):
-        locked, unlocked = self.get_shards(num_workers=1, fully_parallel=False)
-        self.assert_shards(locked,
-            [('locked_tests',
-              ['http/tests/websocket/tests/unicode.htm',
-               'http/tests/security/view-source-no-refresh.html',
-               'http/tests/websocket/tests/websocket-protocol-ignored.html',
-               'http/tests/xmlhttprequest/supported-xml-content-types.html'])])
-        self.assert_shards(unlocked,
-            [('unlocked_tests',
-              ['animations/keyframes.html',
-               'fast/css/display-none-inline-style-change-crash.html',
-               'dom/html/level2/html/HTMLAnchorElement03.html',
-               'ietestcenter/Javascript/11.1.5_4-4-c-1.html',
-               'dom/html/level2/html/HTMLAnchorElement06.html'])])
-
-    def test_shard_in_two_has_no_locked_shards(self):
-        locked, unlocked = self.get_shards(num_workers=1, fully_parallel=False,
-             test_list=['animations/keyframe.html'])
-        self.assertEqual(len(locked), 0)
-        self.assertEqual(len(unlocked), 1)
-
-    def test_shard_in_two_has_no_unlocked_shards(self):
-        locked, unlocked = self.get_shards(num_workers=1, fully_parallel=False,
-             test_list=['http/tests/websocket/tests/unicode.htm'])
-        self.assertEqual(len(locked), 1)
-        self.assertEqual(len(unlocked), 0)
-
-    def test_multiple_locked_shards(self):
-        locked, unlocked = self.get_shards(num_workers=4, fully_parallel=False, max_locked_shards=2)
-        self.assert_shards(locked,
-            [('locked_shard_1',
-              ['http/tests/security/view-source-no-refresh.html',
-               'http/tests/websocket/tests/unicode.htm',
-               'http/tests/websocket/tests/websocket-protocol-ignored.html']),
-             ('locked_shard_2',
-              ['http/tests/xmlhttprequest/supported-xml-content-types.html'])])
-
-        locked, unlocked = self.get_shards(num_workers=4, fully_parallel=False)
-        self.assert_shards(locked,
-            [('locked_shard_1',
-              ['http/tests/security/view-source-no-refresh.html',
-               'http/tests/websocket/tests/unicode.htm',
-               'http/tests/websocket/tests/websocket-protocol-ignored.html',
-               'http/tests/xmlhttprequest/supported-xml-content-types.html'])])
index 1be0e465e37697208a38c241ad3c73de01599974..d4afb6097c92764600fa822b10e9e4db4a5b31bc 100644 (file)
@@ -46,7 +46,7 @@ class ManagerTest(unittest.TestCase):
         def get_manager():
             port = Mock()  # FIXME: Use a tighter mock.
             port.TEST_PATH_SEPARATOR = '/'
-            manager = Manager(port, options=MockOptions(http=True, max_locked_shards=1), printer=Mock())
+            manager = Manager(port, options=MockOptions(http=True), printer=Mock())
             return manager
 
         manager = get_manager()
@@ -59,7 +59,7 @@ class ManagerTest(unittest.TestCase):
         def get_manager():
             host = MockHost()
             port = host.port_factory.get()
-            manager = Manager(port, options=MockOptions(test_list=None, http=True, max_locked_shards=1), printer=Mock())
+            manager = Manager(port, options=MockOptions(test_list=None, http=True), printer=Mock())
             return manager
 
         manager = get_manager()
@@ -79,7 +79,7 @@ class ManagerTest(unittest.TestCase):
         def get_manager():
             host = MockHost()
             port = host.port_factory.get('test-mac-leopard')
-            manager = Manager(port, options=MockOptions(test_list=None, http=True, max_locked_shards=1), printer=Mock())
+            manager = Manager(port, options=MockOptions(test_list=None, http=True), printer=Mock())
             return manager
         host = MockHost()
         port = host.port_factory.get('test-mac-leopard')
index c143056aa8e3c708f916b450100947e19863370d..fa29a3088608633191e731228c186ee90b290d04 100644 (file)
@@ -331,9 +331,6 @@ def _set_up_derived_options(port, options):
     if not options.child_processes:
         options.child_processes = os.environ.get("WEBKIT_TEST_CHILD_PROCESSES",
                                                  str(port.default_child_processes()))
-    if not options.max_locked_shards:
-        options.max_locked_shards = int(os.environ.get("WEBKIT_TEST_MAX_LOCKED_SHARDS",
-                                                       str(port.default_max_locked_shards())))
 
     if not options.configuration:
         options.configuration = port.default_configuration()
index 795693547946f7caf56beff63615b5de25f75e67..0e12afd3c5c132a4a834b6cd224f6c8bb8c29a85 100644 (file)
@@ -224,21 +224,6 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
         for batch in batch_tests_run:
             self.assertTrue(len(batch) <= 2, '%s had too many tests' % ', '.join(batch))
 
-    def test_max_locked_shards(self):
-        # Tests for the default of using one locked shard even in the case of more than one child process.
-        if not self.should_test_processes:
-            return
-        save_env_webkit_test_max_locked_shards = None
-        if "WEBKIT_TEST_MAX_LOCKED_SHARDS" in os.environ:
-            save_env_webkit_test_max_locked_shards = os.environ["WEBKIT_TEST_MAX_LOCKED_SHARDS"]
-            del os.environ["WEBKIT_TEST_MAX_LOCKED_SHARDS"]
-        _, regular_output, _ = logging_run(['--debug-rwt-logging', '--child-processes', '2'], shared_port=False)
-        try:
-            self.assertTrue(any(['1 locked' in line for line in regular_output.buflist]))
-        finally:
-            if save_env_webkit_test_max_locked_shards:
-                os.environ["WEBKIT_TEST_MAX_LOCKED_SHARDS"] = save_env_webkit_test_max_locked_shards
-
     def test_child_processes_2(self):
         if self.should_test_processes:
             _, regular_output, _ = logging_run(
index e00a9a0cb449621a3de48b68f0cd8ed625da83e9..2c40d46c2ce5a9d4d2e80d657ff975acfe6909a3 100644 (file)
@@ -42,12 +42,12 @@ _log = logging.getLogger(__name__)
 
 
 class LayoutTestApacheHttpd(http_server_base.HttpServerBase):
-    def __init__(self, port_obj, output_dir, additional_dirs=None, number_of_servers=None):
+    def __init__(self, port_obj, output_dir, additional_dirs=None):
         """Args:
           port_obj: handle to the platform-specific routines
           output_dir: the absolute path to the layout test result directory
         """
-        http_server_base.HttpServerBase.__init__(self, port_obj, number_of_servers)
+        http_server_base.HttpServerBase.__init__(self, port_obj)
         # We use the name "httpd" instead of "apache" to make our paths (e.g. the pid file: /tmp/WebKit/httpd.pid)
         # match old-run-webkit-tests: https://bugs.webkit.org/show_bug.cgi?id=63956
         self._name = 'httpd'
@@ -115,11 +115,6 @@ class LayoutTestApacheHttpd(http_server_base.HttpServerBase):
                         '-c', "\'RemoveHandler .cgi .pl\'",
                         '-c', "\'</Location>\'"]
 
-        if self._number_of_servers:
-            start_cmd += ['-c', "\'StartServers %d\'" % self._number_of_servers,
-                          '-c', "\'MinSpareServers %d\'" % self._number_of_servers,
-                          '-c', "\'MaxSpareServers %d\'" % self._number_of_servers]
-
         stop_cmd = [executable,
             '-f', "\"%s\"" % self._get_apache_config_file_path(test_dir, output_dir),
             '-c', "\'PidFile %s'" % self._pid_file,
index bfc9bc2395c4072f039da98ef164278add9ed8c1..f2d9b5e42241a9e3f7375da90ba7c0f492a2bf01 100644 (file)
@@ -53,7 +53,7 @@ class TestLayoutTestApacheHttpd(unittest.TestCase):
         test_port = test.TestPort(host)
         host.filesystem.write_text_file(test_port._path_to_apache_config_file(), '')
 
-        server = LayoutTestApacheHttpd(test_port, "/mock/output_dir", number_of_servers=4)
+        server = LayoutTestApacheHttpd(test_port, "/mock/output_dir")
         server._check_that_all_ports_are_available = lambda: True
         server._is_server_running_on_all_ports = lambda: True
         server._wait_for_action = fake_pid
@@ -64,7 +64,4 @@ class TestLayoutTestApacheHttpd(unittest.TestCase):
             server.stop()
         finally:
             _, _, logs = oc.restore_output()
-        self.assertIn("StartServers 4", logs)
-        self.assertIn("MinSpareServers 4", logs)
-        self.assertIn("MaxSpareServers 4", logs)
         self.assertTrue(host.filesystem.exists("/mock/output_dir/httpd.conf"))
index 1fbf1321231976527a3ecb22c72d7a312fe225d2..441940b0a6da92b60c0257971daa06f241013196 100644 (file)
@@ -42,12 +42,12 @@ class Lighttpd(http_server_base.HttpServerBase):
 
     def __init__(self, port_obj, output_dir, background=False, port=None,
                  root=None, run_background=None, additional_dirs=None,
-                 layout_tests_dir=None, number_of_servers=None):
+                 layout_tests_dir=None):
         """Args:
           output_dir: the absolute path to the layout test result directory
         """
         # Webkit tests
-        http_server_base.HttpServerBase.__init__(self, port_obj, number_of_servers)
+        http_server_base.HttpServerBase.__init__(self, port_obj)
         self._name = 'lighttpd'
         self._output_dir = output_dir
         self._port = port
index 3ce15a5683caad4423e152dcf02ac7718b4d150d..5f4301a8698d580ea56e692d30e38bdf44f88e18 100644 (file)
@@ -46,7 +46,7 @@ class ServerError(Exception):
 class HttpServerBase(object):
     """A skeleton class for starting and stopping servers used by the layout tests."""
 
-    def __init__(self, port_obj, number_of_servers=None):
+    def __init__(self, port_obj):
         self._executive = port_obj._executive
         self._filesystem = port_obj._filesystem
         self._name = '<virtual>'
@@ -54,7 +54,6 @@ class HttpServerBase(object):
         self._pid = None
         self._pid_file = None
         self._port_obj = port_obj
-        self._number_of_servers = number_of_servers
 
         # We need a non-checkout-dependent place to put lock files, etc. We
         # don't use the Python default on the Mac because it defaults to a
index f5c42e262b8c893858bbbf7f4fecaea1fbd18444..2e9ac9474207ec706e8a0b0a968c2731b50d3d20 100644 (file)
@@ -110,14 +110,14 @@ class Printer(object):
         self._print_expected_results_of_type(run_results, test_expectations.FLAKY, "flaky", tests_with_result_type_callback)
         self._print_debug('')
 
-    def print_workers_and_shards(self, num_workers, num_shards, num_locked_shards):
+    def print_workers_and_shards(self, num_workers, num_shards):
         driver_name = self._port.driver_name()
         if num_workers == 1:
             self._print_default("Running 1 %s." % driver_name)
             self._print_debug("(%s)." % grammar.pluralize(num_shards, "shard"))
         else:
             self._print_default("Running %s in parallel." % (grammar.pluralize(num_workers, driver_name)))
-            self._print_debug("(%d shards; %d locked)." % (num_shards, num_locked_shards))
+            self._print_debug("(%d shards)." % num_shards)
         self._print_default('')
 
     def _print_expected_results_of_type(self, run_results, result_type, result_type_str, tests_with_result_type_callback):
index 9d47fee03b7316eebb0145c705dee089ab91a55c..ee4142c2451da365b12710eda6a7896f27a2d74f 100644 (file)
@@ -159,10 +159,6 @@ class Port(object):
         """Return the number of DumpRenderTree instances to use for this port."""
         return self._executive.cpu_count()
 
-    def default_max_locked_shards(self):
-        """Return the number of "locked" shards to run in parallel (like the http tests)."""
-        return 1
-
     def worker_startup_delay_secs(self):
         # FIXME: If we start workers up too quickly, DumpRenderTree appears
         # to thrash on something and time out its first few tests. Until
@@ -877,16 +873,16 @@ class Port(object):
         storage, it should override this method."""
         pass
 
-    def start_http_server(self, additional_dirs=None, number_of_servers=None):
+    def start_http_server(self, additional_dirs=None):
         """Start a web server. Raise an error if it can't start or is already running.
 
         Ports can stub this out if they don't need a web server to be running."""
         assert not self._http_server, 'Already running an http server.'
 
         if self._uses_apache():
-            server = apache_http_server.LayoutTestApacheHttpd(self, self.results_directory(), additional_dirs=additional_dirs, number_of_servers=number_of_servers)
+            server = apache_http_server.LayoutTestApacheHttpd(self, self.results_directory(), additional_dirs=additional_dirs)
         else:
-            server = http_server.Lighttpd(self, self.results_directory(), additional_dirs=additional_dirs, number_of_servers=number_of_servers)
+            server = http_server.Lighttpd(self, self.results_directory(), additional_dirs=additional_dirs)
 
         server.start()
         self._http_server = server
index 33597e9182dd924e84f044039e72e1f721169134..b235114426296700a8b5f19e99003c184a8e1046 100644 (file)
@@ -91,13 +91,6 @@ class PortTestCase(unittest.TestCase):
         port._config.build_directory = lambda configuration: '/mock-build'
         return port
 
-    def test_default_max_locked_shards(self):
-        port = self.make_port()
-        port.default_child_processes = lambda: 16
-        self.assertEqual(port.default_max_locked_shards(), 1)
-        port.default_child_processes = lambda: 2
-        self.assertEqual(port.default_max_locked_shards(), 1)
-
     def test_default_timeout_ms(self):
         self.assertEqual(self.make_port(options=MockOptions(configuration='Release')).default_timeout_ms(), 35000)
         self.assertEqual(self.make_port(options=MockOptions(configuration='Debug')).default_timeout_ms(), 35000)
index 42fe6898ad3e014f6ee964c4e98eca6d7eac7aba..105fd6fad7100fe5610b3ff73d8c41fa4cb6674b 100644 (file)
@@ -463,7 +463,7 @@ class TestPort(Port):
     def _driver_class(self):
         return TestDriver
 
-    def start_http_server(self, additional_dirs=None, number_of_servers=None):
+    def start_http_server(self, additional_dirs=None):
         pass
 
     def start_websocket_server(self):