Add proper filtering to WprProxySimulatorTestRunner tests.
ios-wpr-simulator is currently timing out due to a lack of filtering.
Each site tested by autofill automation is reported as its own suite,
i.e.
ios_costco.test.AutofillAutomationTestCase/testActions
ios_ebay.test.AutofillAutomationTestCase/testActions
ios_westelm.test.AutofillAutomationTestCase/testActions
And the test_runner will automatically rerun any suite that fails,
filtering for just that suite.
However, filters were not implemented for WprProxySimulatorTestRunner,
meaning that if ios_ebay failed, every site would be retested,
resulting in an polynomial amount of test rerunning and a time out.
This change implements filtering properly as well as tests for
filtering, which should reduce the bot runtime to a reasonable level.
Change-Id: Ib614a5902be3ffd9d41720173bdf51d140705f83
Reviewed-on: https://chromium-review.googlesource.com/c/1374453
Reviewed-by: Sergey Berezin <[email protected]>
Commit-Queue: ericale <[email protected]>
Cr-Commit-Position: refs/heads/master@{#616065}
diff --git a/ios/build/bots/scripts/test_runner_test.py b/ios/build/bots/scripts/test_runner_test.py
index 7cb988a..a6d71075 100755
--- a/ios/build/bots/scripts/test_runner_test.py
+++ b/ios/build/bots/scripts/test_runner_test.py
@@ -419,9 +419,8 @@
self.assertTrue(tr)
- def test_run(self):
- """Ensures the _run method can handle passed and failed tests."""
-
+ def run_wpr_test(self, test_filter=[], invert=False):
+ """Wrapper that mocks the _run method and returns its result."""
class FakeStdout:
def __init__(self):
self.line_index = 0
@@ -477,8 +476,12 @@
self.mock(subprocess, 'Popen', popen)
tr.xctest_path = 'fake.xctest'
- cmd = tr.get_launch_command()
- result = tr._run(cmd=cmd, shards=1)
+ cmd = tr.get_launch_command(test_filter=test_filter, invert=invert)
+ return tr._run(cmd=cmd, shards=1)
+
+ def test_run_no_filter(self):
+ """Ensures the _run method can handle passed and failed tests."""
+ result = self.run_wpr_test()
self.assertIn('file1.a/1', result.passed_tests)
self.assertIn('file1.b/2', result.passed_tests)
self.assertIn('file1.c/3', result.failed_tests)
@@ -486,6 +489,25 @@
self.assertIn('file2.b/2', result.passed_tests)
self.assertIn('file2.c/3', result.failed_tests)
+ def test_run_with_filter(self):
+ """Ensures the _run method works with a filter."""
+ result = self.run_wpr_test(test_filter=["file1"], invert=False)
+ self.assertIn('file1.a/1', result.passed_tests)
+ self.assertIn('file1.b/2', result.passed_tests)
+ self.assertIn('file1.c/3', result.failed_tests)
+ self.assertNotIn('file2.a/1', result.passed_tests)
+ self.assertNotIn('file2.b/2', result.passed_tests)
+ self.assertNotIn('file2.c/3', result.failed_tests)
+
+ def test_run_with_inverted_filter(self):
+ """Ensures the _run method works with an inverted filter."""
+ result = self.run_wpr_test(test_filter=["file1"], invert=True)
+ self.assertNotIn('file1.a/1', result.passed_tests)
+ self.assertNotIn('file1.b/2', result.passed_tests)
+ self.assertNotIn('file1.c/3', result.failed_tests)
+ self.assertIn('file2.a/1', result.passed_tests)
+ self.assertIn('file2.b/2', result.passed_tests)
+ self.assertIn('file2.c/3', result.failed_tests)
class DeviceTestRunnerTest(TestCase):
def setUp(self):