license.bot | bf09a50 | 2008-08-24 00:55:55 | [diff] [blame] | 1 | // Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
initial.commit | 09911bf | 2008-07-26 23:55:29 | [diff] [blame] | 4 | |
| 5 | // This is a gTest-based test that runs the Selenium Core testsuite in Chrome |
| 6 | // using the UITest automation. The number of total and failed tests are |
| 7 | // written to stdout. |
| 8 | // |
| 9 | // TODO(darin): output the names of the failed tests so we can easily track |
| 10 | // deviations from the expected output. |
| 11 | |
| 12 | #include <list> |
| 13 | #include <set> |
| 14 | |
| 15 | #include "base/file_util.h" |
| 16 | #include "base/path_service.h" |
| 17 | #include "base/string_util.h" |
| 18 | #include "chrome/common/chrome_paths.h" |
| 19 | #include "chrome/common/rand_util.h" |
| 20 | #include "chrome/test/automation/tab_proxy.h" |
| 21 | #include "chrome/test/automation/window_proxy.h" |
| 22 | #include "chrome/test/ui/ui_test.h" |
| 23 | #include "net/base/net_util.h" |
| 24 | |
| 25 | // Uncomment this to exercise this test without actually running the selenium |
| 26 | // test, which can take a while to run. This define is useful when modifying |
| 27 | // the analysis code. |
| 28 | //#define SIMULATE_RUN 1 |
| 29 | |
| 30 | namespace { |
| 31 | |
| 32 | // This file is a comma separated list of tests that are currently failing. |
| 33 | const wchar_t kExpectedFailuresFileName[] = L"expected_failures.txt"; |
| 34 | |
| 35 | class SeleniumTest : public UITest { |
| 36 | public: |
| 37 | SeleniumTest() { |
| 38 | show_window_ = true; |
| 39 | } |
| 40 | typedef std::list<std::string> ResultsList; |
| 41 | typedef std::set<std::string> ResultsSet; |
| 42 | |
| 43 | // Parses a selenium results string, which is of the form: |
| 44 | // "5.selectFrame,6.click,24.selectAndWait,24.verifyTitle" |
| 45 | void ParseResults(const std::string& input, ResultsSet* output) { |
| 46 | if (input.empty()) |
| 47 | return; |
| 48 | |
| 49 | std::vector<std::string> tokens; |
| 50 | SplitString(input, ',', &tokens); |
| 51 | for (size_t i = 0; i < tokens.size(); ++i) { |
| 52 | TrimWhitespace(tokens[i], TRIM_ALL, &tokens[i]); |
| 53 | output->insert(tokens[i]); |
| 54 | } |
| 55 | } |
| 56 | |
| 57 | // Find the elements of "b" that are not in "a" |
| 58 | void CompareSets(const ResultsSet& a, const ResultsSet& b, |
| 59 | ResultsList* only_in_b) { |
| 60 | ResultsSet::const_iterator it = b.begin(); |
| 61 | for (; it != b.end(); ++it) { |
| 62 | if (a.find(*it) == a.end()) |
| 63 | only_in_b->push_back(*it); |
| 64 | } |
| 65 | } |
| 66 | |
| 67 | // The results file is in trunk/chrome/test/selenium/ |
| 68 | std::wstring GetResultsFilePath() { |
| 69 | std::wstring results_path; |
| 70 | PathService::Get(chrome::DIR_TEST_DATA, &results_path); |
| 71 | file_util::UpOneDirectory(&results_path); |
| 72 | file_util::AppendToPath(&results_path, L"selenium"); |
| 73 | |
| 74 | file_util::AppendToPath(&results_path, kExpectedFailuresFileName); |
| 75 | return results_path; |
| 76 | } |
| 77 | |
| 78 | bool ReadExpectedResults(std::string* results) { |
| 79 | std::wstring results_path = GetResultsFilePath(); |
| 80 | return file_util::ReadFileToString(results_path, results); |
| 81 | } |
| 82 | |
| 83 | void RunSelenium(std::wstring* total, std::wstring* failed) { |
| 84 | #ifdef SIMULATE_RUN |
| 85 | *total = L"100"; |
| 86 | const wchar_t* kBogusFailures[] = { |
| 87 | L"5.selectFrame,6.click,24.selectAndWait,24.verifyTitle", |
| 88 | L"5.selectFrame,6.click,13.verifyLocation,13.verifyLocation,13.click,24.selectAndWait,24.verifyTitle", |
| 89 | L"5.selectFrame,6.click,24.selectAndWait" |
| 90 | }; |
| 91 | *failed = kBogusFailures[rand_util::RandInt(0, 2)]; |
| 92 | #else |
| 93 | std::wstring test_path; |
| 94 | PathService::Get(chrome::DIR_TEST_DATA, &test_path); |
| 95 | file_util::UpOneDirectory(&test_path); |
| 96 | file_util::UpOneDirectory(&test_path); |
| 97 | file_util::UpOneDirectory(&test_path); |
| 98 | file_util::AppendToPath(&test_path, L"data"); |
| 99 | file_util::AppendToPath(&test_path, L"selenium_core"); |
| 100 | file_util::AppendToPath(&test_path, L"core"); |
| 101 | file_util::AppendToPath(&test_path, L"TestRunner.html"); |
| 102 | |
[email protected] | 8ac1a75 | 2008-07-31 19:40:37 | [diff] [blame] | 103 | GURL test_url(net::FilePathToFileURL(test_path)); |
initial.commit | 09911bf | 2008-07-26 23:55:29 | [diff] [blame] | 104 | scoped_ptr<TabProxy> tab(GetActiveTab()); |
| 105 | tab->NavigateToURL(test_url); |
| 106 | |
| 107 | // Wait for the test to finish. |
| 108 | ASSERT_TRUE(WaitUntilCookieValue(tab.get(), test_url, "__tests_finished", |
| 109 | 3000, UITest::test_timeout_ms(), "1")); |
| 110 | |
| 111 | std::string cookie; |
| 112 | ASSERT_TRUE(tab->GetCookieByName(test_url, "__num_tests_total", &cookie)); |
| 113 | total->swap(UTF8ToWide(cookie)); |
| 114 | ASSERT_FALSE(total->empty()); |
| 115 | ASSERT_TRUE(tab->GetCookieByName(test_url, "__tests_failed", &cookie)); |
| 116 | failed->swap(UTF8ToWide(cookie)); |
| 117 | // The __tests_failed cookie will be empty if all the tests pass. |
| 118 | #endif |
| 119 | } |
| 120 | |
| 121 | void RunTest(ResultsList* new_passes_list, ResultsList* new_failures_list) { |
| 122 | std::string expected_failures; |
| 123 | bool have_expected_results = ReadExpectedResults(&expected_failures); |
| 124 | ASSERT_TRUE(have_expected_results); |
| 125 | |
| 126 | std::wstring total, failed; |
| 127 | RunSelenium(&total, &failed); |
| 128 | if (total.empty()) |
| 129 | return; |
| 130 | |
| 131 | printf("\n"); |
| 132 | wprintf(L"__num_tests_total = [%s]\n", total.c_str()); |
| 133 | wprintf(L"__tests_failed = [%s]\n", failed.c_str()); |
| 134 | |
| 135 | std::string cur_failures = WideToUTF8(failed); |
| 136 | |
| 137 | ResultsSet expected_failures_set; |
| 138 | ParseResults(expected_failures, &expected_failures_set); |
| 139 | |
| 140 | ResultsSet cur_failures_set; |
| 141 | ParseResults(cur_failures, &cur_failures_set); |
| 142 | |
| 143 | // Compute the list of new passes and failures |
| 144 | CompareSets(cur_failures_set, expected_failures_set, new_passes_list); |
| 145 | CompareSets(expected_failures_set, cur_failures_set, new_failures_list); |
| 146 | } |
| 147 | }; |
| 148 | |
| 149 | } // namespace |
| 150 | |
| 151 | TEST_F(SeleniumTest, Core) { |
| 152 | ResultsList new_passes_list, new_failures_list; |
| 153 | RunTest(&new_passes_list, &new_failures_list); |
| 154 | |
| 155 | if (!new_failures_list.empty()) { |
| 156 | ADD_FAILURE(); |
| 157 | printf("new tests failing:\n"); |
| 158 | ResultsList::const_iterator it = new_failures_list.begin(); |
| 159 | for (; it != new_failures_list.end(); ++it) |
| 160 | printf(" %s\n", it->c_str()); |
| 161 | printf("\n"); |
| 162 | } |
| 163 | |
| 164 | if (!new_passes_list.empty()) { |
| 165 | printf("new tests passing:\n"); |
| 166 | ResultsList::const_iterator it = new_passes_list.begin(); |
| 167 | for (; it != new_passes_list.end(); ++it) |
| 168 | printf(" %s\n", it->c_str()); |
| 169 | printf("\n"); |
| 170 | } |
| 171 | } |
license.bot | bf09a50 | 2008-08-24 00:55:55 | [diff] [blame] | 172 | |