OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/public/test/test_launcher.h" | |
6 | |
7 #include <string> | |
8 #include <vector> | |
9 | |
10 #include "base/command_line.h" | |
11 #include "base/environment.h" | |
12 #include "base/file_util.h" | |
13 #include "base/hash_tables.h" | |
14 #include "base/logging.h" | |
15 #include "base/memory/linked_ptr.h" | |
16 #include "base/memory/scoped_ptr.h" | |
17 #include "base/process_util.h" | |
18 #include "base/scoped_temp_dir.h" | |
19 #include "base/string_number_conversions.h" | |
20 #include "base/string_util.h" | |
21 #include "base/test/test_suite.h" | |
22 #include "base/test/test_timeouts.h" | |
23 #include "base/time.h" | |
24 #include "base/utf_string_conversions.h" | |
25 #include "content/public/app/startup_helper_win.h" | |
26 #include "content/public/common/sandbox_init.h" | |
27 #include "content/public/test/browser_test.h" | |
28 #include "net/base/escape.h" | |
29 #include "testing/gtest/include/gtest/gtest.h" | |
30 | |
31 #if defined(OS_WIN) | |
32 #include "base/base_switches.h" | |
33 #include "content/common/sandbox_policy.h" | |
34 #include "sandbox/win/src/dep.h" | |
35 #include "sandbox/win/src/sandbox_factory.h" | |
36 #include "sandbox/win/src/sandbox_types.h" | |
37 #elif defined(OS_MACOSX) | |
38 #include "base/mac/scoped_nsautorelease_pool.h" | |
39 #endif | |
40 | |
41 namespace test_launcher { | |
42 | |
43 namespace { | |
44 | |
45 // A multiplier for slow tests. We generally avoid multiplying | |
46 // test timeouts by any constants. Here it is used as last resort | |
47 // to implement the SLOW_ test prefix. | |
48 const int kSlowTestTimeoutMultiplier = 5; | |
49 | |
50 // Tests with this prefix have a longer timeout, see above. | |
51 const char kSlowTestPrefix[] = "SLOW_"; | |
52 | |
53 // Tests with this prefix run before the same test without it, and use the same | |
54 // profile. i.e. Foo.PRE_Test runs and then Foo.Test. This allows writing tests | |
55 // that span browser restarts. | |
56 const char kPreTestPrefix[] = "PRE_"; | |
57 | |
58 // Manual tests only run when --run-manual is specified. This allows writing | |
59 // tests that don't run automatically but are still in the same test binary. | |
60 // This is useful so that a team that wants to run a few tests doesn't have to | |
61 // add a new binary that must be compiled on all builds. | |
62 const char kManualTestPrefix[] = "MANUAL_"; | |
63 | |
64 TestLauncherDelegate* g_launcher_delegate; | |
65 } | |
66 | |
67 // The environment variable name for the total number of test shards. | |
68 const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS"; | |
69 // The environment variable name for the test shard index. | |
70 const char kTestShardIndex[] = "GTEST_SHARD_INDEX"; | |
71 | |
72 // The default output file for XML output. | |
73 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL( | |
74 "test_detail.xml"); | |
75 | |
76 // Quit test execution after this number of tests has timed out. | |
77 const int kMaxTimeouts = 5; // 45s timeout * (5 + 1) = 270s max run time. | |
78 | |
79 namespace { | |
80 | |
81 // Parses the environment variable var as an Int32. If it is unset, returns | |
82 // default_val. If it is set, unsets it then converts it to Int32 before | |
83 // returning it. If unsetting or converting to an Int32 fails, print an | |
84 // error and exit with failure. | |
85 int32 Int32FromEnvOrDie(const char* const var, int32 default_val) { | |
86 scoped_ptr<base::Environment> env(base::Environment::Create()); | |
87 std::string str_val; | |
88 int32 result; | |
89 if (!env->GetVar(var, &str_val)) | |
90 return default_val; | |
91 if (!env->UnSetVar(var)) { | |
92 LOG(ERROR) << "Invalid environment: we could not unset " << var << ".\n"; | |
93 exit(EXIT_FAILURE); | |
94 } | |
95 if (!base::StringToInt(str_val, &result)) { | |
96 LOG(ERROR) << "Invalid environment: " << var << " is not an integer.\n"; | |
97 exit(EXIT_FAILURE); | |
98 } | |
99 return result; | |
100 } | |
101 | |
102 // Checks whether sharding is enabled by examining the relevant | |
103 // environment variable values. If the variables are present, | |
104 // but inconsistent (i.e., shard_index >= total_shards), prints | |
105 // an error and exits. | |
106 bool ShouldShard(int32* total_shards, int32* shard_index) { | |
107 *total_shards = Int32FromEnvOrDie(kTestTotalShards, -1); | |
108 *shard_index = Int32FromEnvOrDie(kTestShardIndex, -1); | |
109 | |
110 if (*total_shards == -1 && *shard_index == -1) { | |
111 return false; | |
112 } else if (*total_shards == -1 && *shard_index != -1) { | |
113 LOG(ERROR) << "Invalid environment variables: you have " | |
114 << kTestShardIndex << " = " << *shard_index | |
115 << ", but have left " << kTestTotalShards << " unset.\n"; | |
116 exit(EXIT_FAILURE); | |
117 } else if (*total_shards != -1 && *shard_index == -1) { | |
118 LOG(ERROR) << "Invalid environment variables: you have " | |
119 << kTestTotalShards << " = " << *total_shards | |
120 << ", but have left " << kTestShardIndex << " unset.\n"; | |
121 exit(EXIT_FAILURE); | |
122 } else if (*shard_index < 0 || *shard_index >= *total_shards) { | |
123 LOG(ERROR) << "Invalid environment variables: we require 0 <= " | |
124 << kTestShardIndex << " < " << kTestTotalShards | |
125 << ", but you have " << kTestShardIndex << "=" << *shard_index | |
126 << ", " << kTestTotalShards << "=" << *total_shards << ".\n"; | |
127 exit(EXIT_FAILURE); | |
128 } | |
129 | |
130 return *total_shards > 1; | |
131 } | |
132 | |
133 // Given the total number of shards, the shard index, and the test id, returns | |
134 // true iff the test should be run on this shard. The test id is some arbitrary | |
135 // but unique non-negative integer assigned by this launcher to each test | |
136 // method. Assumes that 0 <= shard_index < total_shards, which is first | |
137 // verified in ShouldShard(). | |
138 bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) { | |
139 return (test_id % total_shards) == shard_index; | |
140 } | |
141 | |
142 // A helper class to output results. | |
143 // Note: as currently XML is the only supported format by gtest, we don't | |
144 // check output format (e.g. "xml:" prefix) here and output an XML file | |
145 // unconditionally. | |
146 // Note: we don't output per-test-case or total summary info like | |
147 // total failed_test_count, disabled_test_count, elapsed_time and so on. | |
148 // Only each test (testcase element in the XML) will have the correct | |
149 // failed/disabled/elapsed_time information. Each test won't include | |
150 // detailed failure messages either. | |
151 class ResultsPrinter { | |
152 public: | |
153 explicit ResultsPrinter(const CommandLine& command_line); | |
154 ~ResultsPrinter(); | |
155 void OnTestCaseStart(const char* name, int test_count) const; | |
156 void OnTestCaseEnd() const; | |
157 | |
158 void OnTestEnd(const char* name, const char* case_name, bool run, | |
159 bool failed, bool failure_ignored, double elapsed_time) const; | |
160 private: | |
161 FILE* out_; | |
162 | |
163 DISALLOW_COPY_AND_ASSIGN(ResultsPrinter); | |
164 }; | |
165 | |
166 ResultsPrinter::ResultsPrinter(const CommandLine& command_line) : out_(NULL) { | |
167 if (!command_line.HasSwitch(kGTestOutputFlag)) | |
168 return; | |
169 std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag); | |
170 size_t colon_pos = flag.find(':'); | |
171 FilePath path; | |
172 if (colon_pos != std::string::npos) { | |
173 FilePath flag_path = command_line.GetSwitchValuePath(kGTestOutputFlag); | |
174 FilePath::StringType path_string = flag_path.value(); | |
175 path = FilePath(path_string.substr(colon_pos + 1)); | |
176 // If the given path ends with '/', consider it is a directory. | |
177 // Note: This does NOT check that a directory (or file) actually exists | |
178 // (the behavior is same as what gtest does). | |
179 if (file_util::EndsWithSeparator(path)) { | |
180 FilePath executable = command_line.GetProgram().BaseName(); | |
181 path = path.Append(executable.ReplaceExtension( | |
182 FilePath::StringType(FILE_PATH_LITERAL("xml")))); | |
183 } | |
184 } | |
185 if (path.value().empty()) | |
186 path = FilePath(kDefaultOutputFile); | |
187 FilePath dir_name = path.DirName(); | |
188 if (!file_util::DirectoryExists(dir_name)) { | |
189 LOG(WARNING) << "The output directory does not exist. " | |
190 << "Creating the directory: " << dir_name.value(); | |
191 // Create the directory if necessary (because the gtest does the same). | |
192 file_util::CreateDirectory(dir_name); | |
193 } | |
194 out_ = file_util::OpenFile(path, "w"); | |
195 if (!out_) { | |
196 LOG(ERROR) << "Cannot open output file: " | |
197 << path.value() << "."; | |
198 return; | |
199 } | |
200 fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); | |
201 fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\"" | |
202 " disabled=\"\" errors=\"\" time=\"\">\n"); | |
203 } | |
204 | |
205 ResultsPrinter::~ResultsPrinter() { | |
206 if (!out_) | |
207 return; | |
208 fprintf(out_, "</testsuites>\n"); | |
209 fclose(out_); | |
210 } | |
211 | |
212 void ResultsPrinter::OnTestCaseStart(const char* name, int test_count) const { | |
213 if (!out_) | |
214 return; | |
215 fprintf(out_, " <testsuite name=\"%s\" tests=\"%d\" failures=\"\"" | |
216 " disabled=\"\" errors=\"\" time=\"\">\n", name, test_count); | |
217 } | |
218 | |
219 void ResultsPrinter::OnTestCaseEnd() const { | |
220 if (!out_) | |
221 return; | |
222 fprintf(out_, " </testsuite>\n"); | |
223 } | |
224 | |
225 void ResultsPrinter::OnTestEnd(const char* name, | |
226 const char* case_name, | |
227 bool run, | |
228 bool failed, | |
229 bool failure_ignored, | |
230 double elapsed_time) const { | |
231 if (!out_) | |
232 return; | |
233 fprintf(out_, " <testcase name=\"%s\" status=\"%s\" time=\"%.3f\"" | |
234 " classname=\"%s\"", | |
235 name, run ? "run" : "notrun", elapsed_time / 1000.0, case_name); | |
236 if (!failed) { | |
237 fprintf(out_, " />\n"); | |
238 return; | |
239 } | |
240 fprintf(out_, ">\n"); | |
241 fprintf(out_, " <failure message=\"\" type=\"\"%s></failure>\n", | |
242 failure_ignored ? " ignored=\"true\"" : ""); | |
243 fprintf(out_, " </testcase>\n"); | |
244 } | |
245 | |
246 class TestCasePrinterHelper { | |
247 public: | |
248 TestCasePrinterHelper(const ResultsPrinter& printer, | |
249 const char* name, | |
250 int total_test_count) | |
251 : printer_(printer) { | |
252 printer_.OnTestCaseStart(name, total_test_count); | |
253 } | |
254 ~TestCasePrinterHelper() { | |
255 printer_.OnTestCaseEnd(); | |
256 } | |
257 private: | |
258 const ResultsPrinter& printer_; | |
259 | |
260 DISALLOW_COPY_AND_ASSIGN(TestCasePrinterHelper); | |
261 }; | |
262 | |
263 // For a basic pattern matching for gtest_filter options. (Copied from | |
264 // gtest.cc, see the comment below and http://crbug.com/44497) | |
265 bool PatternMatchesString(const char* pattern, const char* str) { | |
266 switch (*pattern) { | |
267 case '\0': | |
268 case ':': // Either ':' or '\0' marks the end of the pattern. | |
269 return *str == '\0'; | |
270 case '?': // Matches any single character. | |
271 return *str != '\0' && PatternMatchesString(pattern + 1, str + 1); | |
272 case '*': // Matches any string (possibly empty) of characters. | |
273 return (*str != '\0' && PatternMatchesString(pattern, str + 1)) || | |
274 PatternMatchesString(pattern + 1, str); | |
275 default: // Non-special character. Matches itself. | |
276 return *pattern == *str && | |
277 PatternMatchesString(pattern + 1, str + 1); | |
278 } | |
279 } | |
280 | |
281 // TODO(phajdan.jr): Avoid duplicating gtest code. (http://crbug.com/44497) | |
282 // For basic pattern matching for gtest_filter options. (Copied from | |
283 // gtest.cc) | |
284 bool MatchesFilter(const std::string& name, const std::string& filter) { | |
285 const char *cur_pattern = filter.c_str(); | |
286 for (;;) { | |
287 if (PatternMatchesString(cur_pattern, name.c_str())) { | |
288 return true; | |
289 } | |
290 | |
291 // Finds the next pattern in the filter. | |
292 cur_pattern = strchr(cur_pattern, ':'); | |
293 | |
294 // Returns if no more pattern can be found. | |
295 if (cur_pattern == NULL) { | |
296 return false; | |
297 } | |
298 | |
299 // Skips the pattern separater (the ':' character). | |
300 cur_pattern++; | |
301 } | |
302 } | |
303 | |
304 base::TimeDelta GetTestTerminationTimeout(const std::string& test_name, | |
305 base::TimeDelta default_timeout) { | |
306 base::TimeDelta timeout = default_timeout; | |
307 | |
308 // Make it possible for selected tests to request a longer timeout. | |
309 // Generally tests should really avoid doing too much, and splitting | |
310 // a test instead of using SLOW prefix is strongly preferred. | |
311 if (test_name.find(kSlowTestPrefix) != std::string::npos) | |
312 timeout *= kSlowTestTimeoutMultiplier; | |
313 | |
314 return timeout; | |
315 } | |
316 | |
317 int RunTestInternal(const testing::TestCase* test_case, | |
318 const std::string& test_name, | |
319 CommandLine* command_line, | |
320 base::TimeDelta default_timeout, | |
321 bool* was_timeout) { | |
322 if (test_case) { | |
323 std::string pre_test_name = test_name; | |
324 std::string replace_string = std::string(".") + kPreTestPrefix; | |
325 ReplaceFirstSubstringAfterOffset(&pre_test_name, 0, ".", replace_string); | |
326 for (int i = 0; i < test_case->total_test_count(); ++i) { | |
327 const testing::TestInfo* test_info = test_case->GetTestInfo(i); | |
328 std::string cur_test_name = test_info->test_case_name(); | |
329 cur_test_name.append("."); | |
330 cur_test_name.append(test_info->name()); | |
331 if (cur_test_name == pre_test_name) { | |
332 int exit_code = RunTestInternal(test_case, pre_test_name, command_line, | |
333 default_timeout, was_timeout); | |
334 if (exit_code != 0) | |
335 return exit_code; | |
336 } | |
337 } | |
338 } | |
339 | |
340 CommandLine new_cmd_line(*command_line); | |
341 | |
342 // Always enable disabled tests. This method is not called with disabled | |
343 // tests unless this flag was specified to the browser test executable. | |
344 new_cmd_line.AppendSwitch("gtest_also_run_disabled_tests"); | |
345 new_cmd_line.AppendSwitchASCII("gtest_filter", test_name); | |
346 new_cmd_line.AppendSwitch(kSingleProcessTestsFlag); | |
347 | |
348 const char* browser_wrapper = getenv("BROWSER_WRAPPER"); | |
349 if (browser_wrapper) { | |
350 #if defined(OS_WIN) | |
351 new_cmd_line.PrependWrapper(ASCIIToWide(browser_wrapper)); | |
352 #elif defined(OS_POSIX) | |
353 new_cmd_line.PrependWrapper(browser_wrapper); | |
354 #endif | |
355 VLOG(1) << "BROWSER_WRAPPER was set, prefixing command_line with " | |
356 << browser_wrapper; | |
357 } | |
358 | |
359 base::ProcessHandle process_handle; | |
360 base::LaunchOptions options; | |
361 | |
362 #if defined(OS_POSIX) | |
363 // On POSIX, we launch the test in a new process group with pgid equal to | |
364 // its pid. Any child processes that the test may create will inherit the | |
365 // same pgid. This way, if the test is abruptly terminated, we can clean up | |
366 // any orphaned child processes it may have left behind. | |
367 options.new_process_group = true; | |
368 #endif | |
369 | |
370 if (!base::LaunchProcess(new_cmd_line, options, &process_handle)) | |
371 return -1; | |
372 | |
373 base::TimeDelta timeout = GetTestTerminationTimeout( | |
374 test_name, default_timeout); | |
375 | |
376 int exit_code = 0; | |
377 if (!base::WaitForExitCodeWithTimeout(process_handle, &exit_code, timeout)) { | |
378 LOG(ERROR) << "Test timeout (" << timeout.InMilliseconds() | |
379 << " ms) exceeded for " << test_name; | |
380 | |
381 if (was_timeout) | |
382 *was_timeout = true; | |
383 exit_code = -1; // Set a non-zero exit code to signal a failure. | |
384 | |
385 // Ensure that the process terminates. | |
386 base::KillProcess(process_handle, -1, true); | |
387 } | |
388 | |
389 #if defined(OS_POSIX) | |
390 if (exit_code != 0) { | |
391 // On POSIX, in case the test does not exit cleanly, either due to a crash | |
392 // or due to it timing out, we need to clean up any child processes that | |
393 // it might have created. On Windows, child processes are automatically | |
394 // cleaned up using JobObjects. | |
395 base::KillProcessGroup(process_handle); | |
396 } | |
397 #endif | |
398 | |
399 base::CloseProcessHandle(process_handle); | |
400 | |
401 return exit_code; | |
402 } | |
403 | |
404 // Runs test specified by |test_name| in a child process, | |
405 // and returns the exit code. | |
406 int RunTest(TestLauncherDelegate* launcher_delegate, | |
407 const testing::TestCase* test_case, | |
408 const std::string& test_name, | |
409 base::TimeDelta default_timeout, | |
410 bool* was_timeout) { | |
411 if (was_timeout) | |
412 *was_timeout = false; | |
413 | |
414 #if defined(OS_MACOSX) | |
415 // Some of the below method calls will leak objects if there is no | |
416 // autorelease pool in place. | |
417 base::mac::ScopedNSAutoreleasePool pool; | |
418 #endif | |
419 | |
420 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | |
421 CommandLine new_cmd_line(cmd_line->GetProgram()); | |
422 CommandLine::SwitchMap switches = cmd_line->GetSwitches(); | |
423 | |
424 // Strip out gtest_output flag because otherwise we would overwrite results | |
425 // of the previous test. We will generate the final output file later | |
426 // in RunTests(). | |
427 switches.erase(kGTestOutputFlag); | |
428 | |
429 // Strip out gtest_repeat flag because we can only run one test in the child | |
430 // process (restarting the browser in the same process is illegal after it | |
431 // has been shut down and will actually crash). | |
432 switches.erase(kGTestRepeatFlag); | |
433 | |
434 for (CommandLine::SwitchMap::const_iterator iter = switches.begin(); | |
435 iter != switches.end(); ++iter) { | |
436 new_cmd_line.AppendSwitchNative((*iter).first, (*iter).second); | |
437 } | |
438 | |
439 // Do not let the child ignore failures. We need to propagate the | |
440 // failure status back to the parent. | |
441 new_cmd_line.AppendSwitch(base::TestSuite::kStrictFailureHandling); | |
442 | |
443 if (!launcher_delegate->AdjustChildProcessCommandLine(&new_cmd_line)) | |
444 return -1; | |
445 | |
446 return RunTestInternal( | |
447 test_case, test_name, &new_cmd_line, default_timeout, was_timeout); | |
448 } | |
449 | |
450 bool RunTests(TestLauncherDelegate* launcher_delegate, | |
451 bool should_shard, | |
452 int total_shards, | |
453 int shard_index) { | |
454 const CommandLine* command_line = CommandLine::ForCurrentProcess(); | |
455 | |
456 DCHECK(!command_line->HasSwitch(kGTestListTestsFlag)); | |
457 | |
458 testing::UnitTest* const unit_test = testing::UnitTest::GetInstance(); | |
459 | |
460 std::string filter = command_line->GetSwitchValueASCII(kGTestFilterFlag); | |
461 | |
462 // Split --gtest_filter at '-', if there is one, to separate into | |
463 // positive filter and negative filter portions. | |
464 std::string positive_filter = filter; | |
465 std::string negative_filter = ""; | |
466 size_t dash_pos = filter.find('-'); | |
467 if (dash_pos != std::string::npos) { | |
468 positive_filter = filter.substr(0, dash_pos); // Everything up to the dash. | |
469 negative_filter = filter.substr(dash_pos + 1); // Everything after the dash. | |
470 } | |
471 | |
472 int num_runnable_tests = 0; | |
473 int test_run_count = 0; | |
474 int timeout_count = 0; | |
475 std::vector<std::string> failed_tests; | |
476 std::set<std::string> ignored_tests; | |
477 | |
478 ResultsPrinter printer(*command_line); | |
479 for (int i = 0; i < unit_test->total_test_case_count(); ++i) { | |
480 const testing::TestCase* test_case = unit_test->GetTestCase(i); | |
481 TestCasePrinterHelper helper(printer, test_case->name(), | |
482 test_case->total_test_count()); | |
483 for (int j = 0; j < test_case->total_test_count(); ++j) { | |
484 const testing::TestInfo* test_info = test_case->GetTestInfo(j); | |
485 std::string test_name = test_info->test_case_name(); | |
486 test_name.append("."); | |
487 test_name.append(test_info->name()); | |
488 | |
489 // Skip our special test so it's not run twice. That confuses the log | |
490 // parser. | |
491 if (test_name == launcher_delegate->GetEmptyTestName()) | |
492 continue; | |
493 | |
494 // Skip disabled tests. | |
495 if (test_name.find("DISABLED") != std::string::npos && | |
496 !command_line->HasSwitch(kGTestRunDisabledTestsFlag)) { | |
497 printer.OnTestEnd(test_info->name(), test_case->name(), | |
498 false, false, false, 0); | |
499 continue; | |
500 } | |
501 | |
502 if (StartsWithASCII(test_info->name(), kPreTestPrefix, true)) | |
503 continue; | |
504 | |
505 if (StartsWithASCII(test_info->name(), kManualTestPrefix, true) && | |
506 !command_line->HasSwitch(kRunManualTestsFlag)) { | |
507 continue; | |
508 } | |
509 | |
510 // Skip the test that doesn't match the filter string (if given). | |
511 if ((!positive_filter.empty() && | |
512 !MatchesFilter(test_name, positive_filter)) || | |
513 MatchesFilter(test_name, negative_filter)) { | |
514 printer.OnTestEnd(test_info->name(), test_case->name(), | |
515 false, false, false, 0); | |
516 continue; | |
517 } | |
518 | |
519 // Decide if this test should be run. | |
520 bool should_run = true; | |
521 if (should_shard) { | |
522 should_run = ShouldRunTestOnShard(total_shards, shard_index, | |
523 num_runnable_tests); | |
524 } | |
525 num_runnable_tests += 1; | |
526 // If sharding is enabled and the test should not be run, skip it. | |
527 if (!should_run) { | |
528 continue; | |
529 } | |
530 | |
531 base::Time start_time = base::Time::Now(); | |
532 ++test_run_count; | |
533 bool was_timeout = false; | |
534 int exit_code = RunTest(launcher_delegate, | |
535 test_case, | |
536 test_name, | |
537 TestTimeouts::action_max_timeout(), | |
538 &was_timeout); | |
539 if (exit_code == 0) { | |
540 // Test passed. | |
541 printer.OnTestEnd(test_info->name(), test_case->name(), true, false, | |
542 false, | |
543 (base::Time::Now() - start_time).InMillisecondsF()); | |
544 } else { | |
545 failed_tests.push_back(test_name); | |
546 | |
547 bool ignore_failure = false; | |
548 | |
549 // Never ignore crashes or hangs/timeouts, they are serious and should | |
550 // always be visible. | |
551 if (exit_code != -1 && !was_timeout) | |
552 ignore_failure = base::TestSuite::ShouldIgnoreFailure(*test_info); | |
553 | |
554 printer.OnTestEnd(test_info->name(), test_case->name(), true, true, | |
555 ignore_failure, | |
556 (base::Time::Now() - start_time).InMillisecondsF()); | |
557 if (ignore_failure) | |
558 ignored_tests.insert(test_name); | |
559 | |
560 if (was_timeout) | |
561 ++timeout_count; | |
562 } | |
563 | |
564 if (timeout_count > kMaxTimeouts) { | |
565 printf("More than %d timeouts, aborting test case\n", kMaxTimeouts); | |
566 break; | |
567 } | |
568 } | |
569 if (timeout_count > kMaxTimeouts) { | |
570 printf("More than %d timeouts, aborting test\n", kMaxTimeouts); | |
571 break; | |
572 } | |
573 } | |
574 | |
575 printf("%d test%s run\n", test_run_count, test_run_count > 1 ? "s" : ""); | |
576 printf("%d test%s failed (%d ignored)\n", | |
577 static_cast<int>(failed_tests.size()), | |
578 failed_tests.size() != 1 ? "s" : "", | |
579 static_cast<int>(ignored_tests.size())); | |
580 if (failed_tests.size() == ignored_tests.size()) | |
581 return true; | |
582 | |
583 printf("Failing tests:\n"); | |
584 for (std::vector<std::string>::const_iterator iter = failed_tests.begin(); | |
585 iter != failed_tests.end(); ++iter) { | |
586 bool is_ignored = ignored_tests.find(*iter) != ignored_tests.end(); | |
587 printf("%s%s\n", iter->c_str(), is_ignored ? " (ignored)" : ""); | |
588 } | |
589 | |
590 return false; | |
591 } | |
592 | |
593 void PrintUsage() { | |
594 fprintf(stdout, | |
595 "Runs tests using the gtest framework, each test being run in its own\n" | |
596 "process. Any gtest flags can be specified.\n" | |
597 " --single_process\n" | |
598 " Runs the tests and the launcher in the same process. Useful for \n" | |
599 " debugging a specific test in a debugger.\n" | |
600 " --single-process\n" | |
601 " Same as above, and also runs Chrome in single-process mode.\n" | |
602 " --help\n" | |
603 " Shows this message.\n" | |
604 " --gtest_help\n" | |
605 " Shows the gtest help message.\n"); | |
606 } | |
607 | |
608 } // namespace | |
609 | |
610 const char kGTestFilterFlag[] = "gtest_filter"; | |
611 const char kGTestHelpFlag[] = "gtest_help"; | |
612 const char kGTestListTestsFlag[] = "gtest_list_tests"; | |
613 const char kGTestRepeatFlag[] = "gtest_repeat"; | |
614 const char kGTestRunDisabledTestsFlag[] = "gtest_also_run_disabled_tests"; | |
615 const char kGTestOutputFlag[] = "gtest_output"; | |
616 | |
617 const char kSingleProcessTestsFlag[] = "single_process"; | |
618 const char kSingleProcessTestsAndChromeFlag[] = "single-process"; | |
619 | |
620 // See kManualTestPrefix above. | |
621 const char kRunManualTestsFlag[] = "run-manual"; | |
622 | |
623 // The following is kept for historical reasons (so people that are used to | |
624 // using it don't get surprised). | |
625 const char kChildProcessFlag[] = "child"; | |
626 | |
627 const char kHelpFlag[] = "help"; | |
628 | |
629 const char kWarmupFlag[] = "warmup"; | |
630 | |
631 TestLauncherDelegate::~TestLauncherDelegate() { | |
632 } | |
633 | |
634 int LaunchTests(TestLauncherDelegate* launcher_delegate, | |
635 int argc, | |
636 char** argv) { | |
637 DCHECK(!g_launcher_delegate); | |
638 g_launcher_delegate = launcher_delegate; | |
639 | |
640 CommandLine::Init(argc, argv); | |
641 const CommandLine* command_line = CommandLine::ForCurrentProcess(); | |
642 | |
643 if (command_line->HasSwitch(kHelpFlag)) { | |
644 PrintUsage(); | |
645 return 0; | |
646 } | |
647 | |
648 // TODO(pkasting): This "single_process vs. single-process" design is | |
649 // terrible UI. Instead, there should be some sort of signal flag on the | |
650 // command line, with all subsequent arguments passed through to the | |
651 // underlying browser. | |
652 if (command_line->HasSwitch(kSingleProcessTestsFlag) || | |
653 command_line->HasSwitch(kSingleProcessTestsAndChromeFlag) || | |
654 command_line->HasSwitch(kGTestListTestsFlag) || | |
655 command_line->HasSwitch(kGTestHelpFlag)) { | |
656 #if defined(OS_WIN) | |
657 if (command_line->HasSwitch(kSingleProcessTestsFlag)) { | |
658 sandbox::SandboxInterfaceInfo sandbox_info; | |
659 content::InitializeSandboxInfo(&sandbox_info); | |
660 content::InitializeSandbox(&sandbox_info); | |
661 } | |
662 #endif | |
663 return launcher_delegate->RunTestSuite(argc, argv); | |
664 } | |
665 | |
666 int return_code = 0; | |
667 if (launcher_delegate->Run(argc, argv, &return_code)) | |
668 return return_code; | |
669 | |
670 base::AtExitManager at_exit; | |
671 | |
672 int32 total_shards; | |
673 int32 shard_index; | |
674 bool should_shard = ShouldShard(&total_shards, &shard_index); | |
675 | |
676 fprintf(stdout, | |
677 "Starting tests...\n" | |
678 "IMPORTANT DEBUGGING NOTE: each test is run inside its own process.\n" | |
679 "For debugging a test inside a debugger, use the\n" | |
680 "--gtest_filter=<your_test_name> flag along with either\n" | |
681 "--single_process (to run all tests in one launcher/browser process) or\n" | |
682 "--single-process (to do the above, and also run Chrome in single-\n" | |
683 "process mode).\n"); | |
684 | |
685 testing::InitGoogleTest(&argc, argv); | |
686 TestTimeouts::Initialize(); | |
687 int exit_code = 0; | |
688 | |
689 std::string empty_test = launcher_delegate->GetEmptyTestName(); | |
690 if (!empty_test.empty()) { | |
691 // Make sure the entire browser code is loaded into memory. Reading it | |
692 // from disk may be slow on a busy bot, and can easily exceed the default | |
693 // timeout causing flaky test failures. Use an empty test that only starts | |
694 // and closes a browser with a long timeout to avoid those problems. | |
695 // NOTE: We don't do this when specifying a filter because this slows down | |
696 // the common case of running one test locally, and also on trybots when | |
697 // sharding as this one test runs ~200 times and wastes a few minutes. | |
698 bool warmup = command_line->HasSwitch(kWarmupFlag); | |
699 bool has_filter = command_line->HasSwitch(kGTestFilterFlag); | |
700 if (warmup || (!should_shard && !has_filter)) { | |
701 exit_code = RunTest(launcher_delegate, | |
702 NULL, | |
703 empty_test, | |
704 TestTimeouts::large_test_timeout(), | |
705 NULL); | |
706 if (exit_code != 0 || warmup) | |
707 return exit_code; | |
708 } | |
709 } | |
710 | |
711 int cycles = 1; | |
712 if (command_line->HasSwitch(kGTestRepeatFlag)) { | |
713 base::StringToInt(command_line->GetSwitchValueASCII(kGTestRepeatFlag), | |
714 &cycles); | |
715 } | |
716 | |
717 while (cycles != 0) { | |
718 if (!RunTests(launcher_delegate, | |
719 should_shard, | |
720 total_shards, | |
721 shard_index)) { | |
722 exit_code = 1; | |
723 break; | |
724 } | |
725 | |
726 // Special value "-1" means "repeat indefinitely". | |
727 if (cycles != -1) | |
728 cycles--; | |
729 } | |
730 return exit_code; | |
731 } | |
732 | |
733 TestLauncherDelegate* GetCurrentTestLauncherDelegate() { | |
734 return g_launcher_delegate; | |
735 } | |
736 | |
737 } // namespace test_launcher | |
OLD | NEW |