| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # | 2 # |
| 3 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 3 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 4 # Use of this source code is governed by a BSD-style license that can be | 4 # Use of this source code is governed by a BSD-style license that can be |
| 5 # found in the LICENSE file. | 5 # found in the LICENSE file. |
| 6 | 6 |
| 7 """Runs all the native unit tests. | 7 """Runs all the native unit tests. |
| 8 | 8 |
| 9 1. Copy over test binary to /data/local on device. | 9 1. Copy over test binary to /data/local on device. |
| 10 2. Resources: chrome/unit_tests requires resources (chrome.pak and en-US.pak) | 10 2. Resources: chrome/unit_tests requires resources (chrome.pak and en-US.pak) |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 48 import emulator | 48 import emulator |
| 49 from pylib import android_commands | 49 from pylib import android_commands |
| 50 from pylib import buildbot_report | 50 from pylib import buildbot_report |
| 51 from pylib import cmd_helper | 51 from pylib import cmd_helper |
| 52 from pylib import debug_info | 52 from pylib import debug_info |
| 53 from pylib import ports | 53 from pylib import ports |
| 54 from pylib import run_tests_helper | 54 from pylib import run_tests_helper |
| 55 from pylib import test_options_parser | 55 from pylib import test_options_parser |
| 56 from pylib.base_test_sharder import BaseTestSharder | 56 from pylib.base_test_sharder import BaseTestSharder |
| 57 from pylib.single_test_runner import SingleTestRunner | 57 from pylib.single_test_runner import SingleTestRunner |
| 58 from pylib.utils import time_profile |
| 59 from pylib.utils import xvfb |
| 58 | 60 |
| 59 | 61 |
| 60 _TEST_SUITES = ['base_unittests', | 62 _TEST_SUITES = ['base_unittests', |
| 61 'cc_unittests', | 63 'cc_unittests', |
| 62 'content_unittests', | 64 'content_unittests', |
| 63 'gpu_unittests', | 65 'gpu_unittests', |
| 64 'ipc_tests', | 66 'ipc_tests', |
| 65 'media_unittests', | 67 'media_unittests', |
| 66 'net_unittests', | 68 'net_unittests', |
| 67 'sql_unittests', | 69 'sql_unittests', |
| (...skipping 30 matching lines...) Expand all Loading... |
| 98 for t in all_test_suites] | 100 for t in all_test_suites] |
| 99 for t, q in zip(all_test_suites, qualified_test_suites): | 101 for t, q in zip(all_test_suites, qualified_test_suites): |
| 100 if not os.path.exists(q): | 102 if not os.path.exists(q): |
| 101 raise Exception('Test suite %s not found in %s.\n' | 103 raise Exception('Test suite %s not found in %s.\n' |
| 102 'Supported test suites:\n %s\n' | 104 'Supported test suites:\n %s\n' |
| 103 'Ensure it has been built.\n' % | 105 'Ensure it has been built.\n' % |
| 104 (t, q, _TEST_SUITES)) | 106 (t, q, _TEST_SUITES)) |
| 105 return qualified_test_suites | 107 return qualified_test_suites |
| 106 | 108 |
| 107 | 109 |
| 108 class TimeProfile(object): | |
| 109 """Class for simple profiling of action, with logging of cost.""" | |
| 110 | |
| 111 def __init__(self, description): | |
| 112 self._description = description | |
| 113 self.Start() | |
| 114 | |
| 115 def Start(self): | |
| 116 self._starttime = time.time() | |
| 117 | |
| 118 def Stop(self): | |
| 119 """Stop profiling and dump a log.""" | |
| 120 if self._starttime: | |
| 121 stoptime = time.time() | |
| 122 logging.info('%fsec to perform %s', | |
| 123 stoptime - self._starttime, self._description) | |
| 124 self._starttime = None | |
| 125 | |
| 126 | |
| 127 class Xvfb(object): | |
| 128 """Class to start and stop Xvfb if relevant. Nop if not Linux.""" | |
| 129 | |
| 130 def __init__(self): | |
| 131 self._pid = 0 | |
| 132 | |
| 133 def _IsLinux(self): | |
| 134 """Return True if on Linux; else False.""" | |
| 135 return sys.platform.startswith('linux') | |
| 136 | |
| 137 def Start(self): | |
| 138 """Start Xvfb and set an appropriate DISPLAY environment. Linux only. | |
| 139 | |
| 140 Copied from tools/code_coverage/coverage_posix.py | |
| 141 """ | |
| 142 if not self._IsLinux(): | |
| 143 return | |
| 144 proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24', | |
| 145 '-ac'], | |
| 146 stdout=subprocess.PIPE, stderr=subprocess.STDOUT) | |
| 147 self._pid = proc.pid | |
| 148 if not self._pid: | |
| 149 raise Exception('Could not start Xvfb') | |
| 150 os.environ['DISPLAY'] = ':9' | |
| 151 | |
| 152 # Now confirm, giving a chance for it to start if needed. | |
| 153 for _ in range(10): | |
| 154 proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True) | |
| 155 _, retcode = os.waitpid(proc.pid, 0) | |
| 156 if retcode == 0: | |
| 157 break | |
| 158 time.sleep(0.25) | |
| 159 if retcode != 0: | |
| 160 raise Exception('Could not confirm Xvfb happiness') | |
| 161 | |
| 162 def Stop(self): | |
| 163 """Stop Xvfb if needed. Linux only.""" | |
| 164 if self._pid: | |
| 165 try: | |
| 166 os.kill(self._pid, signal.SIGKILL) | |
| 167 except: | |
| 168 pass | |
| 169 del os.environ['DISPLAY'] | |
| 170 self._pid = 0 | |
| 171 | |
| 172 | |
| 173 class TestSharder(BaseTestSharder): | 110 class TestSharder(BaseTestSharder): |
| 174 """Responsible for sharding the tests on the connected devices.""" | 111 """Responsible for sharding the tests on the connected devices.""" |
| 175 | 112 |
| 176 def __init__(self, attached_devices, test_suite, gtest_filter, | 113 def __init__(self, attached_devices, test_suite, gtest_filter, |
| 177 test_arguments, timeout, cleanup_test_files, tool, | 114 test_arguments, timeout, cleanup_test_files, tool, |
| 178 log_dump_name, fast_and_loose, build_type, in_webkit_checkout, | 115 log_dump_name, fast_and_loose, build_type, in_webkit_checkout, |
| 179 flakiness_server=None): | 116 flakiness_server=None): |
| 180 BaseTestSharder.__init__(self, attached_devices, build_type) | 117 BaseTestSharder.__init__(self, attached_devices, build_type) |
| 181 self.test_suite = test_suite | 118 self.test_suite = test_suite |
| 182 self.gtest_filter = gtest_filter or '' | 119 self.gtest_filter = gtest_filter or '' |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 304 Returns: | 241 Returns: |
| 305 0 if successful, number of failing tests otherwise. | 242 0 if successful, number of failing tests otherwise. |
| 306 """ | 243 """ |
| 307 step_name = os.path.basename(options.test_suite).replace('-debug.apk', '') | 244 step_name = os.path.basename(options.test_suite).replace('-debug.apk', '') |
| 308 buildbot_report.PrintNamedStep(step_name) | 245 buildbot_report.PrintNamedStep(step_name) |
| 309 attached_devices = [] | 246 attached_devices = [] |
| 310 buildbot_emulators = [] | 247 buildbot_emulators = [] |
| 311 | 248 |
| 312 if options.use_emulator: | 249 if options.use_emulator: |
| 313 for n in range(options.emulator_count): | 250 for n in range(options.emulator_count): |
| 314 t = TimeProfile('Emulator launch %d' % n) | 251 t = time_profile.TimeProfile('Emulator launch %d' % n) |
| 315 avd_name = None | 252 avd_name = None |
| 316 if n > 0: | 253 if n > 0: |
| 317 # Creates a temporary AVD for the extra emulators. | 254 # Creates a temporary AVD for the extra emulators. |
| 318 avd_name = 'run_tests_avd_%d' % n | 255 avd_name = 'run_tests_avd_%d' % n |
| 319 buildbot_emulator = emulator.Emulator(avd_name, options.fast_and_loose) | 256 buildbot_emulator = emulator.Emulator(avd_name, options.fast_and_loose) |
| 320 buildbot_emulator.Launch(kill_all_emulators=n == 0) | 257 buildbot_emulator.Launch(kill_all_emulators=n == 0) |
| 321 t.Stop() | 258 t.Stop() |
| 322 buildbot_emulators.append(buildbot_emulator) | 259 buildbot_emulators.append(buildbot_emulator) |
| 323 attached_devices.append(buildbot_emulator.device) | 260 attached_devices.append(buildbot_emulator.device) |
| 324 # Wait for all emulators to boot completed. | 261 # Wait for all emulators to boot completed. |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 374 options: options for running the tests. | 311 options: options for running the tests. |
| 375 | 312 |
| 376 Returns: | 313 Returns: |
| 377 0 if successful, number of failing tests otherwise. | 314 0 if successful, number of failing tests otherwise. |
| 378 """ | 315 """ |
| 379 if options.test_suite == 'help': | 316 if options.test_suite == 'help': |
| 380 ListTestSuites() | 317 ListTestSuites() |
| 381 return 0 | 318 return 0 |
| 382 | 319 |
| 383 if options.use_xvfb: | 320 if options.use_xvfb: |
| 384 xvfb = Xvfb() | 321 framebuffer = xvfb.Xvfb() |
| 385 xvfb.Start() | 322 framebuffer.Start() |
| 386 | 323 |
| 387 all_test_suites = FullyQualifiedTestSuites(options.exe, options.test_suite, | 324 all_test_suites = FullyQualifiedTestSuites(options.exe, options.test_suite, |
| 388 options.build_type) | 325 options.build_type) |
| 389 failures = 0 | 326 failures = 0 |
| 390 for suite in all_test_suites: | 327 for suite in all_test_suites: |
| 391 # Give each test suite its own copy of options. | 328 # Give each test suite its own copy of options. |
| 392 test_options = copy.deepcopy(options) | 329 test_options = copy.deepcopy(options) |
| 393 test_options.test_suite = suite | 330 test_options.test_suite = suite |
| 394 failures += _RunATestSuite(test_options) | 331 failures += _RunATestSuite(test_options) |
| 395 | 332 |
| 396 if options.use_xvfb: | 333 if options.use_xvfb: |
| 397 xvfb.Stop() | 334 framebuffer.Stop() |
| 398 return failures | 335 return failures |
| 399 | 336 |
| 400 | 337 |
| 401 def ListTestSuites(): | 338 def ListTestSuites(): |
| 402 """Display a list of available test suites.""" | 339 """Display a list of available test suites.""" |
| 403 print 'Available test suites are:' | 340 print 'Available test suites are:' |
| 404 for test_suite in _TEST_SUITES: | 341 for test_suite in _TEST_SUITES: |
| 405 print test_suite | 342 print test_suite |
| 406 | 343 |
| 407 | 344 |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 478 # the batch (this happens because the exit status is a sum of all failures | 415 # the batch (this happens because the exit status is a sum of all failures |
| 479 # from all suites, but the buildbot associates the exit status only with the | 416 # from all suites, but the buildbot associates the exit status only with the |
| 480 # most recent step). | 417 # most recent step). |
| 481 if options.exit_code: | 418 if options.exit_code: |
| 482 return failed_tests_count | 419 return failed_tests_count |
| 483 return 0 | 420 return 0 |
| 484 | 421 |
| 485 | 422 |
| 486 if __name__ == '__main__': | 423 if __name__ == '__main__': |
| 487 sys.exit(main(sys.argv)) | 424 sys.exit(main(sys.argv)) |
| OLD | NEW |