OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Traces each test cases of a google-test executable individually. | 6 """Traces each test cases of a google-test executable individually. |
7 | 7 |
8 Gives detailed information about each test case. The logs can be read afterward | 8 Gives detailed information about each test case. The logs can be read afterward |
9 with ./trace_inputs.py read -l /path/to/executable.logs | 9 with ./trace_inputs.py read -l /path/to/executable.logs |
10 """ | 10 """ |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
128 # Strips to root_dir. | 128 # Strips to root_dir. |
129 results_processed = {} | 129 results_processed = {} |
130 for item in results: | 130 for item in results: |
131 if 'results' in item: | 131 if 'results' in item: |
132 item = item.copy() | 132 item = item.copy() |
133 item['results'] = item['results'].strip_root(root_dir) | 133 item['results'] = item['results'].strip_root(root_dir) |
134 results_processed[item['trace']] = item | 134 results_processed[item['trace']] = item |
135 else: | 135 else: |
136 print >> sys.stderr, 'Got exception while tracing %s: %s' % ( | 136 print >> sys.stderr, 'Got exception while tracing %s: %s' % ( |
137 item['trace'], item['exception']) | 137 item['trace'], item['exception']) |
| 138 print '%.1fs Done stripping root.' % ( |
| 139 time.time() - progress.start) |
138 | 140 |
139 # Flatten. | 141 # Flatten. |
140 flattened = {} | 142 flattened = {} |
141 for item_list in values: | 143 for item_list in values: |
142 for item in item_list: | 144 for item in item_list: |
143 if item['valid']: | 145 if item['valid']: |
144 test_case = item['test_case'] | 146 test_case = item['test_case'] |
145 tracename = test_case.replace('/', '-') | 147 tracename = test_case.replace('/', '-') |
146 flattened[test_case] = results_processed[tracename].copy() | 148 flattened[test_case] = results_processed[tracename].copy() |
147 item_results = flattened[test_case]['results'] | 149 item_results = flattened[test_case]['results'] |
148 flattened[test_case].update({ | 150 flattened[test_case].update({ |
149 'processes': len(list(item_results.process.all)), | 151 'processes': len(list(item_results.process.all)), |
150 'results': item_results.flatten(), | 152 'results': item_results.flatten(), |
151 'duration': item['duration'], | 153 'duration': item['duration'], |
152 'returncode': item['returncode'], | 154 'returncode': item['returncode'], |
153 'valid': item['valid'], | 155 'valid': item['valid'], |
154 'variables': | 156 'variables': |
155 isolate_common.generate_dict( | 157 isolate_common.generate_simplified( |
156 sorted(f.path for f in item_results.files), | 158 item_results.existent, |
157 cwd_dir, | 159 root_dir, |
158 variables['<(PRODUCT_DIR)']), | 160 variables, |
| 161 cwd_dir), |
159 }) | 162 }) |
160 del flattened[test_case]['trace'] | 163 del flattened[test_case]['trace'] |
| 164 print '%.1fs Done flattening.' % ( |
| 165 time.time() - progress.start) |
| 166 |
161 # Make it dense if there is more than 20 results. | 167 # Make it dense if there is more than 20 results. |
162 trace_inputs.write_json( | 168 trace_inputs.write_json( |
163 output_file, | 169 output_file, |
164 flattened, | 170 flattened, |
165 False) | 171 False) |
166 | 172 |
167 # Also write the .isolate file. | 173 # Also write the .isolate file. |
168 # First, get all the files from all results. Use a map to remove dupes. | 174 # First, get all the files from all results. Use a map to remove dupes. |
169 files = {} | 175 files = {} |
170 for item in results_processed.itervalues(): | 176 for item in results_processed.itervalues(): |
171 files.update((f.full_path, f) for f in item['results'].existent) | 177 files.update((f.full_path, f) for f in item['results'].existent) |
172 # Convert back to a list, discard the keys. | 178 # Convert back to a list, discard the keys. |
173 files = files.values() | 179 files = files.values() |
174 | 180 |
175 # TODO(maruel): Have isolate_common process a dict of variables. | 181 value = isolate_common.generate_isolate( |
176 value = isolate_common.generate_dict( | 182 files, |
177 sorted(f.path for f in files), cwd_dir, variables['<(PRODUCT_DIR)']) | 183 root_dir, |
| 184 variables, |
| 185 cwd_dir) |
178 with open('%s.isolate' % output_file, 'wb') as f: | 186 with open('%s.isolate' % output_file, 'wb') as f: |
179 isolate_common.pretty_print(value, f) | 187 isolate_common.pretty_print(value, f) |
180 return 0 | 188 return 0 |
181 | 189 |
182 | 190 |
183 def main(): | 191 def main(): |
184 """CLI frontend to validate arguments.""" | 192 """CLI frontend to validate arguments.""" |
| 193 default_variables = [('OS', isolate_common.get_flavor())] |
| 194 if sys.platform in ('win32', 'cygwin'): |
| 195 default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) |
| 196 else: |
| 197 default_variables.append(('EXECUTABLE_SUFFIX', '')) |
185 parser = optparse.OptionParser( | 198 parser = optparse.OptionParser( |
186 usage='%prog <options> [gtest]', | 199 usage='%prog <options> [gtest]', |
187 description=sys.modules['__main__'].__doc__) | 200 description=sys.modules['__main__'].__doc__) |
188 parser.format_description = lambda *_: parser.description | 201 parser.format_description = lambda *_: parser.description |
189 parser.add_option( | 202 parser.add_option( |
190 '-c', '--cwd', | 203 '-c', '--cwd', |
191 default='chrome', | 204 default='chrome', |
192 help='Signal to start the process from this relative directory. When ' | 205 help='Signal to start the process from this relative directory. When ' |
193 'specified, outputs the inputs files in a way compatible for ' | 206 'specified, outputs the inputs files in a way compatible for ' |
194 'gyp processing. Should be set to the relative path containing the ' | 207 'gyp processing. Should be set to the relative path containing the ' |
195 'gyp file, e.g. \'chrome\' or \'net\'') | 208 'gyp file, e.g. \'chrome\' or \'net\'') |
196 parser.add_option( | 209 parser.add_option( |
197 '-p', '--product-dir', | 210 '-V', '--variable', |
198 default='out/Release', | 211 nargs=2, |
199 help='Directory for PRODUCT_DIR. Default: %default') | 212 action='append', |
| 213 default=default_variables, |
| 214 dest='variables', |
| 215 metavar='FOO BAR', |
| 216 help='Variables to process in the .isolate file, default: %default') |
200 parser.add_option( | 217 parser.add_option( |
201 '--root-dir', | 218 '--root-dir', |
202 default=ROOT_DIR, | 219 default=ROOT_DIR, |
203 help='Root directory to base everything off. Default: %default') | 220 help='Root directory to base everything off. Default: %default') |
204 parser.add_option( | 221 parser.add_option( |
205 '-o', '--out', | 222 '-o', '--out', |
206 help='output file, defaults to <executable>.test_cases') | 223 help='output file, defaults to <executable>.test_cases') |
207 parser.add_option( | 224 parser.add_option( |
208 '-w', '--whitelist', | 225 '-w', '--whitelist', |
209 default=[], | 226 default=[], |
210 action='append', | 227 action='append', |
211 help='filter to apply to test cases to run, wildcard-style, defaults to ' | 228 help='filter to apply to test cases to run, wildcard-style, defaults to ' |
212 'all test') | 229 'all test') |
213 parser.add_option( | 230 parser.add_option( |
214 '-b', '--blacklist', | 231 '-b', '--blacklist', |
215 default=[], | 232 default=[], |
216 action='append', | 233 action='append', |
217 help='filter to apply to test cases to skip, wildcard-style, defaults to ' | 234 help='filter to apply to test cases to skip, wildcard-style, defaults to ' |
218 'no test') | 235 'no test') |
219 parser.add_option( | 236 parser.add_option( |
220 '-j', '--jobs', | 237 '-j', '--jobs', |
221 type='int', | 238 type='int', |
222 help='number of parallel jobs') | 239 help='number of parallel jobs') |
223 parser.add_option( | 240 parser.add_option( |
224 '-t', '--timeout', | 241 '-t', '--timeout', |
225 default=120, | 242 default=120, |
226 type='int', | 243 type='int', |
227 help='number of parallel jobs') | 244 help='number of parallel jobs') |
| 245 parser.add_option( |
| 246 '-v', '--verbose', |
| 247 action='count', |
| 248 default=0, |
| 249 help='Use multiple times to increase verbosity') |
228 options, args = parser.parse_args() | 250 options, args = parser.parse_args() |
229 | 251 |
| 252 levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] |
| 253 logging.basicConfig( |
| 254 level=levels[min(len(levels)-1, options.verbose)], |
| 255 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') |
| 256 |
230 if len(args) != 1: | 257 if len(args) != 1: |
231 parser.error( | 258 parser.error( |
232 'Please provide the executable line to run, if you need fancy things ' | 259 'Please provide the executable line to run, if you need fancy things ' |
233 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster' | 260 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster' |
234 '.') | 261 '.') |
235 executable = args[0] | 262 executable = args[0] |
236 if not os.path.isabs(executable): | 263 if not os.path.isabs(executable): |
237 executable = os.path.join(options.root_dir, options.product_dir, args[0]) | 264 executable = os.path.abspath(os.path.join(options.root_dir, executable)) |
238 if not options.out: | 265 if not options.out: |
239 options.out = '%s.test_cases' % executable | 266 options.out = '%s.test_cases' % executable |
240 variables = {'<(PRODUCT_DIR)': options.product_dir} | |
241 return trace_test_cases( | 267 return trace_test_cases( |
242 executable, | 268 executable, |
243 options.root_dir, | 269 options.root_dir, |
244 options.cwd, | 270 options.cwd, |
245 variables, | 271 dict(options.variables), |
246 options.whitelist, | 272 options.whitelist, |
247 options.blacklist, | 273 options.blacklist, |
248 options.jobs, | 274 options.jobs, |
249 # TODO(maruel): options.timeout, | 275 # TODO(maruel): options.timeout, |
250 options.out) | 276 options.out) |
251 | 277 |
252 | 278 |
253 if __name__ == '__main__': | 279 if __name__ == '__main__': |
254 sys.exit(main()) | 280 sys.exit(main()) |
OLD | NEW |