OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Runs each test cases as a single shard, single process execution. | 6 """Runs each test cases as a single shard, single process execution. |
7 | 7 |
8 Similar to sharding_supervisor.py but finer grained. Runs multiple instances in | 8 Similar to sharding_supervisor.py but finer grained. Runs multiple instances in |
9 parallel. | 9 parallel. |
10 """ | 10 """ |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
110 ] | 110 ] |
111 if whitelist: | 111 if whitelist: |
112 tests = [ | 112 tests = [ |
113 t for t in tests if any(fnmatch.fnmatch(t, s) for s in whitelist) | 113 t for t in tests if any(fnmatch.fnmatch(t, s) for s in whitelist) |
114 ] | 114 ] |
115 logging.info( | 115 logging.info( |
116 'Found %d test cases in %s' % (len(tests), os.path.basename(executable))) | 116 'Found %d test cases in %s' % (len(tests), os.path.basename(executable))) |
117 return tests | 117 return tests |
118 | 118 |
119 | 119 |
120 def run_test_cases(executable, whitelist, blacklist, jobs, timeout, stats_only): | 120 def run_test_cases( |
| 121 executable, whitelist, blacklist, jobs, timeout, stats_only, no_dump): |
121 """Traces test cases one by one.""" | 122 """Traces test cases one by one.""" |
122 test_cases = get_test_cases(executable, whitelist, blacklist) | 123 test_cases = get_test_cases(executable, whitelist, blacklist) |
123 if not test_cases: | 124 if not test_cases: |
124 return | 125 return |
125 | 126 |
126 progress = worker_pool.Progress(len(test_cases)) | 127 progress = worker_pool.Progress(len(test_cases)) |
127 with worker_pool.ThreadPool(jobs or multiprocessing.cpu_count()) as pool: | 128 with worker_pool.ThreadPool(jobs or multiprocessing.cpu_count()) as pool: |
128 function = Runner(executable, os.getcwd(), timeout, progress).map | 129 function = Runner(executable, os.getcwd(), timeout, progress).map |
129 for test_case in test_cases: | 130 for test_case in test_cases: |
130 pool.add_task(function, test_case) | 131 pool.add_task(function, test_case) |
131 results = pool.join(progress, 0.1) | 132 results = pool.join(progress, 0.1) |
132 duration = time.time() - progress.start | 133 duration = time.time() - progress.start |
133 results = dict((item[0]['test_case'], item) for item in results) | 134 results = dict((item[0]['test_case'], item) for item in results) |
134 trace_inputs.write_json('%s.run_test_cases' % executable, results, False) | 135 if not no_dump: |
135 print '' | 136 trace_inputs.write_json('%s.run_test_cases' % executable, results, False) |
| 137 sys.stderr.write('\n') |
136 total = len(results) | 138 total = len(results) |
137 if not total: | 139 if not total: |
138 return 1 | 140 return 1 |
139 | 141 |
140 # Classify the results | 142 # Classify the results |
141 success = [] | 143 success = [] |
142 flaky = [] | 144 flaky = [] |
143 fail = [] | 145 fail = [] |
144 nb_runs = 0 | 146 nb_runs = 0 |
145 for test_case in sorted(results): | 147 for test_case in sorted(results): |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
200 help='Timeout for a single test case, in seconds default:%default') | 202 help='Timeout for a single test case, in seconds default:%default') |
201 parser.add_option( | 203 parser.add_option( |
202 '-s', '--stats', | 204 '-s', '--stats', |
203 action='store_true', | 205 action='store_true', |
204 help='Only prints stats, not output') | 206 help='Only prints stats, not output') |
205 parser.add_option( | 207 parser.add_option( |
206 '-v', '--verbose', | 208 '-v', '--verbose', |
207 action='count', | 209 action='count', |
208 default=int(os.environ.get('ISOLATE_DEBUG', 0)), | 210 default=int(os.environ.get('ISOLATE_DEBUG', 0)), |
209 help='Use multiple times') | 211 help='Use multiple times') |
| 212 parser.add_option( |
| 213 '--no-dump', |
| 214 action='store_true', |
| 215 help='do not generate a .test_cases file') |
210 options, args = parser.parse_args() | 216 options, args = parser.parse_args() |
211 levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] | 217 levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] |
212 logging.basicConfig( | 218 logging.basicConfig( |
213 level=levels[min(len(levels)-1, options.verbose)], | 219 level=levels[min(len(levels)-1, options.verbose)], |
214 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') | 220 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') |
215 | 221 |
216 if len(args) != 1: | 222 if len(args) != 1: |
217 parser.error( | 223 parser.error( |
218 'Please provide the executable line to run, if you need fancy things ' | 224 'Please provide the executable line to run, if you need fancy things ' |
219 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster' | 225 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster' |
220 '.') | 226 '.') |
221 return run_test_cases( | 227 return run_test_cases( |
222 args[0], | 228 args[0], |
223 options.whitelist, | 229 options.whitelist, |
224 options.blacklist, | 230 options.blacklist, |
225 options.jobs, | 231 options.jobs, |
226 options.timeout, | 232 options.timeout, |
227 options.stats) | 233 options.stats, |
| 234 options.no_dump) |
228 | 235 |
229 | 236 |
230 if __name__ == '__main__': | 237 if __name__ == '__main__': |
231 sys.exit(main()) | 238 sys.exit(main()) |
OLD | NEW |