OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Does one of the following depending on the --mode argument: | 6 """Does one of the following depending on the --mode argument: |
7 check Verifies all the inputs exist, touches the file specified with | 7 check Verifies all the inputs exist, touches the file specified with |
8 --result and exits. | 8 --result and exits. |
9 hashtable Puts a manifest file and hard links each of the inputs into the | 9 hashtable Puts a manifest file and hard links each of the inputs into the |
10 output directory. | 10 output directory. |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
222 | 222 |
223 for relfile in infiles: | 223 for relfile in infiles: |
224 infile = os.path.join(indir, relfile) | 224 infile = os.path.join(indir, relfile) |
225 outfile = os.path.join(outdir, relfile) | 225 outfile = os.path.join(outdir, relfile) |
226 outsubdir = os.path.dirname(outfile) | 226 outsubdir = os.path.dirname(outfile) |
227 if not os.path.isdir(outsubdir): | 227 if not os.path.isdir(outsubdir): |
228 os.makedirs(outsubdir) | 228 os.makedirs(outsubdir) |
229 run_test_from_archive.link_file(outfile, infile, action) | 229 run_test_from_archive.link_file(outfile, infile, action) |
230 | 230 |
231 | 231 |
232 def isolate( | 232 def load_results(resultfile): |
233 outdir, indir, infiles, mode, read_only, cmd, relative_cwd, resultfile): | 233 """Loads the previous results as an optimization.""" |
234 """Main function to isolate a target with its dependencies. | 234 data = {} |
235 | |
236 Arguments: | |
237 - outdir: Output directory where the result is stored. Depends on |mode|. | |
238 - indir: Root directory to be used as the base directory for infiles. | |
239 - infiles: List of files, with relative path, to process. | |
240 - mode: Action to do. See file level docstring. | |
241 - read_only: Makes the temporary directory read only. | |
242 - cmd: Command to execute. | |
243 - relative_cwd: Directory relative to the base directory where to start the | |
244 command from. In general, this path will be the path | |
245 containing the gyp file where the target was defined. This | |
246 relative directory may be created implicitely if a file from | |
247 this directory is needed to run the test. Otherwise it won't | |
248 be created and the process creation will fail. It's up to the | |
249 caller to create this directory manually before starting the | |
250 test. | |
251 - resultfile: Path where to read and write the metadata. | |
252 | |
253 Some arguments are optional, dependending on |mode|. See the corresponding | |
254 MODE<mode> function for the exact behavior. | |
255 """ | |
256 mode_fn = getattr(sys.modules[__name__], 'MODE' + mode) | |
257 assert mode_fn | |
258 | |
259 # Load the previous results as an optimization. | |
260 prevdict = {} | |
261 if resultfile and os.path.isfile(resultfile): | 235 if resultfile and os.path.isfile(resultfile): |
262 resultfile = os.path.abspath(resultfile) | 236 resultfile = os.path.abspath(resultfile) |
263 with open(resultfile, 'rb') as f: | 237 with open(resultfile, 'r') as f: |
264 prevdict = json.load(f) | 238 data = json.load(f) |
265 else: | 239 else: |
266 resultfile = os.path.abspath(resultfile) | 240 resultfile = os.path.abspath(resultfile) |
241 | |
267 # Works with native os.path.sep but stores as '/'. | 242 # Works with native os.path.sep but stores as '/'. |
268 if 'files' in prevdict and os.path.sep != '/': | 243 if 'files' in data and os.path.sep != '/': |
269 prevdict['files'] = dict( | 244 data['files'] = dict( |
270 (k.replace('/', os.path.sep), v) | 245 (k.replace('/', os.path.sep), v) |
271 for k, v in prevdict['files'].iteritems()) | 246 for k, v in data['files'].iteritems()) |
247 return data | |
272 | 248 |
273 | 249 |
274 infiles = expand_directories( | 250 def save_results(resultfile, data): |
275 indir, infiles, lambda x: re.match(r'.*\.(svn|pyc)$', x)) | 251 data = data.copy() |
276 | |
277 # Only hashtable mode really needs the sha-1. | |
278 level = { | |
279 'check': NO_INFO, | |
280 'hashtable': WITH_HASH, | |
281 'remap': STATS_ONLY, | |
282 'run': STATS_ONLY, | |
283 'trace': STATS_ONLY, | |
284 } | |
285 dictfiles = process_inputs( | |
286 prevdict.get('files', {}), indir, infiles, level[mode], read_only) | |
287 | |
288 result = mode_fn( | |
289 outdir, indir, dictfiles, read_only, cmd, relative_cwd, resultfile) | |
290 out = { | |
291 'command': cmd, | |
292 'relative_cwd': relative_cwd, | |
293 'files': dictfiles, | |
294 # Makes the directories read-only in addition to the files. | |
295 'read_only': read_only, | |
296 } | |
297 | 252 |
298 # Works with native os.path.sep but stores as '/'. | 253 # Works with native os.path.sep but stores as '/'. |
299 if os.path.sep != '/': | 254 if os.path.sep != '/': |
300 out['files'] = dict( | 255 data['files'] = dict( |
301 (k.replace(os.path.sep, '/'), v) for k, v in out['files'].iteritems()) | 256 (k.replace(os.path.sep, '/'), v) for k, v in data['files'].iteritems()) |
302 | 257 |
303 f = None | 258 f = None |
304 try: | 259 try: |
305 if resultfile: | 260 if resultfile: |
306 f = open(resultfile, 'wb') | 261 f = open(resultfile, 'wb') |
307 else: | 262 else: |
308 f = sys.stdout | 263 f = sys.stdout |
309 json.dump(out, f, indent=2, sort_keys=True) | 264 json.dump(data, f, indent=2, sort_keys=True) |
310 f.write('\n') | 265 f.write('\n') |
311 finally: | 266 finally: |
312 if resultfile and f: | 267 if resultfile and f: |
313 f.close() | 268 f.close() |
314 | 269 |
315 total_bytes = sum(i.get('size', 0) for i in out['files'].itervalues()) | 270 total_bytes = sum(i.get('size', 0) for i in data['files'].itervalues()) |
316 if total_bytes: | 271 if total_bytes: |
317 logging.debug('Total size: %d bytes' % total_bytes) | 272 logging.debug('Total size: %d bytes' % total_bytes) |
318 return result | |
319 | 273 |
320 | 274 |
321 def MODEcheck( | 275 def isolate(outdir, mode, indir, infiles, data): |
322 _outdir, _indir, _dictfiles, _read_only, _cmd, _relative_cwd, _resultfile): | 276 """Main function to isolate a target with its dependencies. |
277 | |
278 Arguments: | |
279 - outdir: Output directory where the result is stored. Depends on |mode|. | |
280 - indir: Root directory to be used as the base directory for infiles. | |
281 - infiles: List of files, with relative path, to process. | |
282 - mode: Action to do. See file level docstring. | |
283 - data: Contains all the command specific meta-data. | |
284 | |
285 Some arguments are optional, dependending on |mode|. See the corresponding | |
286 MODE<mode> function for the exact behavior. | |
287 """ | |
288 # The code below is functionaly equivalent to this line: | |
289 #mode_fn = getattr(sys.modules[__name__], 'MODE' + mode) | |
nsylvain
2012/04/16 15:51:43
remove?
| |
290 modes = { | |
291 'check': MODEcheck, | |
292 'hashtable': MODEhashtable, | |
293 'remap': MODEremap, | |
294 'run': MODErun, | |
295 'trace': MODEtrace, | |
296 } | |
297 mode_fn = modes[mode] | |
298 | |
299 infiles = expand_directories( | |
300 indir, infiles, lambda x: re.match(r'.*\.(git|svn|pyc)$', x)) | |
301 | |
302 # Only hashtable mode really needs the sha-1. | |
303 level = { | |
304 'check': NO_INFO, | |
305 'hashtable': WITH_HASH, | |
306 'remap': STATS_ONLY, | |
307 'run': STATS_ONLY, | |
308 'trace': STATS_ONLY, | |
309 } | |
310 # Regenerate data['files'] from infiles. | |
311 data['files'] = process_inputs( | |
312 data.get('files', {}), indir, infiles, level[mode], data.get('read_only')) | |
313 | |
314 result = mode_fn(outdir, indir, data) | |
315 return result, data | |
316 | |
317 | |
318 def MODEcheck(_outdir, _indir, _data): | |
323 """No-op.""" | 319 """No-op.""" |
324 return 0 | 320 return 0 |
325 | 321 |
326 | 322 |
327 def MODEhashtable( | 323 def MODEhashtable(outdir, indir, data): |
328 outdir, indir, dictfiles, _read_only, _cmd, _relative_cwd, resultfile): | 324 outdir = ( |
329 outdir = outdir or os.path.join(os.path.dirname(resultfile), 'hashtable') | 325 outdir or os.path.join(os.path.dirname(data['resultdir']), 'hashtable')) |
330 if not os.path.isdir(outdir): | 326 if not os.path.isdir(outdir): |
331 os.makedirs(outdir) | 327 os.makedirs(outdir) |
332 for relfile, properties in dictfiles.iteritems(): | 328 for relfile, properties in data['files'].iteritems(): |
333 infile = os.path.join(indir, relfile) | 329 infile = os.path.join(indir, relfile) |
334 outfile = os.path.join(outdir, properties['sha-1']) | 330 outfile = os.path.join(outdir, properties['sha-1']) |
335 if os.path.isfile(outfile): | 331 if os.path.isfile(outfile): |
336 # Just do a quick check that the file size matches. No need to stat() | 332 # Just do a quick check that the file size matches. No need to stat() |
337 # again the input file, grab the value from the dict. | 333 # again the input file, grab the value from the dict. |
338 out_size = os.stat(outfile).st_size | 334 out_size = os.stat(outfile).st_size |
339 in_size = dictfiles.get(infile, {}).get('size') or os.stat(infile).st_size | 335 in_size = ( |
336 data.get('files', {}).get(infile, {}).get('size') or | |
337 os.stat(infile).st_size) | |
340 if in_size == out_size: | 338 if in_size == out_size: |
341 continue | 339 continue |
342 # Otherwise, an exception will be raised. | 340 # Otherwise, an exception will be raised. |
343 run_test_from_archive.link_file( | 341 run_test_from_archive.link_file( |
344 outfile, infile, run_test_from_archive.HARDLINK) | 342 outfile, infile, run_test_from_archive.HARDLINK) |
345 return 0 | 343 return 0 |
346 | 344 |
347 | 345 |
348 def MODEremap( | 346 def MODEremap(outdir, indir, data): |
349 outdir, indir, dictfiles, read_only, _cmd, _relative_cwd, _resultfile): | |
350 if not outdir: | 347 if not outdir: |
351 outdir = tempfile.mkdtemp(prefix='isolate') | 348 outdir = tempfile.mkdtemp(prefix='isolate') |
352 else: | 349 else: |
353 if not os.path.isdir(outdir): | 350 if not os.path.isdir(outdir): |
354 os.makedirs(outdir) | 351 os.makedirs(outdir) |
355 print 'Remapping into %s' % outdir | 352 print 'Remapping into %s' % outdir |
356 if len(os.listdir(outdir)): | 353 if len(os.listdir(outdir)): |
357 print 'Can\'t remap in a non-empty directory' | 354 print 'Can\'t remap in a non-empty directory' |
358 return 1 | 355 return 1 |
359 recreate_tree(outdir, indir, dictfiles.keys(), run_test_from_archive.HARDLINK) | 356 recreate_tree( |
360 if read_only: | 357 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) |
358 if data['read_only']: | |
361 run_test_from_archive.make_writable(outdir, True) | 359 run_test_from_archive.make_writable(outdir, True) |
362 return 0 | 360 return 0 |
363 | 361 |
364 | 362 |
365 def MODErun( | 363 def MODErun(_outdir, indir, data): |
366 _outdir, indir, dictfiles, read_only, cmd, relative_cwd, _resultfile): | |
367 """Always uses a temporary directory.""" | 364 """Always uses a temporary directory.""" |
368 try: | 365 try: |
369 outdir = tempfile.mkdtemp(prefix='isolate') | 366 outdir = tempfile.mkdtemp(prefix='isolate') |
370 recreate_tree( | 367 recreate_tree( |
371 outdir, indir, dictfiles.keys(), run_test_from_archive.HARDLINK) | 368 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) |
372 cwd = os.path.join(outdir, relative_cwd) | 369 cwd = os.path.join(outdir, data['relative_cwd']) |
373 if not os.path.isdir(cwd): | 370 if not os.path.isdir(cwd): |
374 os.makedirs(cwd) | 371 os.makedirs(cwd) |
375 if read_only: | 372 if data['read_only']: |
376 run_test_from_archive.make_writable(outdir, True) | 373 run_test_from_archive.make_writable(outdir, True) |
377 if not cmd: | 374 if not data['command']: |
378 print 'No command to run' | 375 print 'No command to run' |
379 return 1 | 376 return 1 |
380 cmd = trace_inputs.fix_python_path(cmd) | 377 cmd = trace_inputs.fix_python_path(data['command']) |
381 logging.info('Running %s, cwd=%s' % (cmd, cwd)) | 378 logging.info('Running %s, cwd=%s' % (cmd, cwd)) |
382 return subprocess.call(cmd, cwd=cwd) | 379 return subprocess.call(cmd, cwd=cwd) |
383 finally: | 380 finally: |
384 run_test_from_archive.rmtree(outdir) | 381 run_test_from_archive.rmtree(outdir) |
385 | 382 |
386 | 383 |
387 def MODEtrace( | 384 def MODEtrace(_outdir, indir, data): |
388 _outdir, indir, _dictfiles, _read_only, cmd, relative_cwd, resultfile): | |
389 """Shortcut to use trace_inputs.py properly. | 385 """Shortcut to use trace_inputs.py properly. |
390 | 386 |
391 It constructs the equivalent of dictfiles. It is hardcoded to base the | 387 It constructs the equivalent of dictfiles. It is hardcoded to base the |
392 checkout at src/. | 388 checkout at src/. |
393 """ | 389 """ |
394 logging.info('Running %s, cwd=%s' % (cmd, os.path.join(indir, relative_cwd))) | 390 logging.info( |
395 if resultfile: | 391 'Running %s, cwd=%s' % ( |
396 # Guesswork here. | 392 data['command'], os.path.join(indir, data['relative_cwd']))) |
397 product_dir = os.path.dirname(resultfile) | 393 product_dir = None |
398 if product_dir and indir: | 394 if data['resultdir'] and indir: |
399 product_dir = os.path.relpath(product_dir, indir) | 395 # Defaults to none if both are the same directory. |
400 else: | 396 product_dir = os.path.relpath(data['resultdir'], indir) or None |
401 product_dir = None | 397 if not data['command']: |
402 if not cmd: | |
403 print 'No command to run' | 398 print 'No command to run' |
404 return 1 | 399 return 1 |
405 return trace_inputs.trace_inputs( | 400 return trace_inputs.trace_inputs( |
406 '%s.log' % resultfile, | 401 data['resultfile'], |
407 cmd, | 402 data['command'], |
408 indir, | 403 indir, |
409 relative_cwd, | 404 data['relative_cwd'], |
410 product_dir, | 405 product_dir, |
411 False) | 406 False) |
412 | 407 |
413 | 408 |
414 def get_valid_modes(): | 409 def get_valid_modes(): |
415 """Returns the modes that can be used.""" | 410 """Returns the modes that can be used.""" |
416 return sorted( | 411 return sorted( |
417 i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) | 412 i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) |
418 | 413 |
419 | 414 |
415 def process_options(variables_as_list, resultfile, input_file, error): | |
416 """Processes the options and loads the input file. Returns the processed | |
417 values. | |
418 """ | |
419 input_file = os.path.abspath(input_file) | |
420 isolate_dir = os.path.dirname(input_file) | |
421 resultfile = os.path.abspath(resultfile) | |
422 | |
423 # Extract the variables. | |
424 variables = dict(i.split('=', 1) for i in variables_as_list) | |
425 # Process path variables as a special case. First normalize it, verifies it | |
426 # exists, convert it to an absolute path, then set it as relative to | |
427 # isolate_dir. | |
428 for i in ('DEPTH', 'PRODUCT_DIR'): | |
429 if i not in variables: | |
430 continue | |
431 variable = os.path.normpath(variables[i]) | |
432 if not os.path.isdir(variable): | |
433 error('%s=%s is not a directory' % (i, variable)) | |
434 variable = os.path.abspath(variable) | |
435 # All variables are relative to the input file. | |
436 variables[i] = os.path.relpath(isolate_dir, variable) | |
437 | |
438 command, infiles, read_only = load_isolate( | |
439 open(input_file, 'r').read(), variables, error) | |
440 | |
441 # The trick used to determine the root directory is to look at "how far" back | |
442 # up it is looking up. | |
443 root_dir = isolate_dir | |
444 for i in infiles: | |
445 x = isolate_dir | |
446 while i.startswith('../'): | |
447 i = i[3:] | |
448 assert not i.startswith('/') | |
449 x = posixpath.dirname(x) | |
450 if root_dir.startswith(x): | |
451 root_dir = x | |
452 # The relative directory is automatically determined by the relative path | |
453 # between root_dir and the directory containing the .isolate file. | |
454 relative_dir = os.path.relpath(isolate_dir, root_dir) | |
455 logging.debug('relative_dir: %s' % relative_dir) | |
456 | |
457 logging.debug( | |
458 'variables: %s' % ', '.join( | |
459 '%s=%s' % (k, v) for k, v in variables.iteritems())) | |
460 | |
461 data = load_results(resultfile) | |
462 | |
463 command, infiles, read_only = load_isolate( | |
464 open(input_file, 'r').read(), variables, error) | |
465 | |
466 # Update data with the up to date information: | |
467 data['command'] = command | |
468 data['read_only'] = read_only | |
469 data['relative_cwd'] = relative_dir | |
470 data['resultfile'] = resultfile | |
471 data['resultdir'] = os.path.dirname(resultfile) | |
472 | |
473 # Keep the old variables. | |
474 data.setdefault('variables', {}).update(variables) | |
475 | |
476 logging.debug('command: %s' % command) | |
477 logging.debug('infiles: %s' % infiles) | |
478 logging.debug('read_only: %s' % read_only) | |
479 infiles = [normpath(os.path.join(relative_dir, f)) for f in infiles] | |
480 logging.debug('processed infiles: %s' % infiles) | |
481 return root_dir, infiles, data | |
482 | |
483 | |
420 def main(): | 484 def main(): |
421 default_variables = ['OS=%s' % trace_inputs.get_flavor()] | 485 default_variables = ['OS=%s' % trace_inputs.get_flavor()] |
422 if sys.platform in ('win32', 'cygwin'): | 486 if sys.platform in ('win32', 'cygwin'): |
423 default_variables.append('EXECUTABLE_SUFFIX=.exe') | 487 default_variables.append('EXECUTABLE_SUFFIX=.exe') |
424 else: | 488 else: |
425 default_variables.append('EXECUTABLE_SUFFIX=') | 489 default_variables.append('EXECUTABLE_SUFFIX=') |
426 valid_modes = get_valid_modes() | 490 valid_modes = get_valid_modes() |
427 parser = optparse.OptionParser( | 491 parser = optparse.OptionParser( |
428 usage='%prog [options] [.isolate file]', | 492 usage='%prog [options] [.isolate file]', |
429 description=sys.modules[__name__].__doc__) | 493 description=sys.modules[__name__].__doc__) |
(...skipping 29 matching lines...) Expand all Loading... | |
459 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)] | 523 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)] |
460 logging.basicConfig( | 524 logging.basicConfig( |
461 level=level, | 525 level=level, |
462 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') | 526 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') |
463 | 527 |
464 if not options.mode: | 528 if not options.mode: |
465 parser.error('--mode is required') | 529 parser.error('--mode is required') |
466 if len(args) != 1: | 530 if len(args) != 1: |
467 parser.error('Use only one argument which should be a .isolate file') | 531 parser.error('Use only one argument which should be a .isolate file') |
468 | 532 |
469 input_file = os.path.abspath(args[0]) | 533 root_dir, infiles, data = process_options( |
470 isolate_dir = os.path.dirname(input_file) | 534 options.variables, options.result, args[0], parser.error) |
471 | |
472 # Extract the variables. | |
473 variables = dict(i.split('=', 1) for i in options.variables) | |
474 # Process path variables as a special case. First normalize it, verifies it | |
475 # exists, convert it to an absolute path, then set it as relative to | |
476 # isolate_dir. | |
477 for i in ('PRODUCT_DIR',): | |
478 if i not in variables: | |
479 continue | |
480 variable = os.path.normpath(variables[i]) | |
481 if not os.path.isdir(variable): | |
482 parser.error('%s=%s is not a directory' % (i, variable)) | |
483 variable = os.path.abspath(variable) | |
484 # All variables are relative to the input file. | |
485 variables[i] = os.path.relpath(isolate_dir, variable) | |
486 | |
487 command, infiles, read_only = load_isolate( | |
488 open(input_file, 'r').read(), variables, parser.error) | |
489 | |
490 # The trick used to determine the root directory is to look at "how far" back | |
491 # up it is looking up. | |
492 root_dir = isolate_dir | |
493 for i in infiles: | |
494 x = isolate_dir | |
495 while i.startswith('../'): | |
496 i = i[3:] | |
497 assert not i.startswith('/') | |
498 x = posixpath.dirname(x) | |
499 if root_dir.startswith(x): | |
500 root_dir = x | |
501 # The relative directory is automatically determined by the relative path | |
502 # between root_dir and the directory containing the .isolate file. | |
503 relative_dir = os.path.relpath(isolate_dir, root_dir) | |
504 logging.debug('relative_dir: %s' % relative_dir) | |
505 | |
506 logging.debug( | |
507 'variables: %s' % ', '.join( | |
508 '%s=%s' % (k, v) for k, v in variables.iteritems())) | |
509 logging.debug('command: %s' % command) | |
510 logging.debug('infiles: %s' % infiles) | |
511 logging.debug('read_only: %s' % read_only) | |
512 infiles = [normpath(os.path.join(relative_dir, f)) for f in infiles] | |
513 logging.debug('processed infiles: %s' % infiles) | |
514 | 535 |
515 try: | 536 try: |
516 return isolate( | 537 resultcode, data = isolate( |
517 options.outdir, | 538 options.outdir, |
539 options.mode, | |
518 root_dir, | 540 root_dir, |
519 infiles, | 541 infiles, |
520 options.mode, | 542 data) |
521 read_only, | |
522 command, | |
523 relative_dir, | |
524 options.result) | |
525 except run_test_from_archive.MappingError, e: | 543 except run_test_from_archive.MappingError, e: |
526 print >> sys.stderr, str(e) | 544 print >> sys.stderr, str(e) |
527 return 1 | 545 return 1 |
546 save_results(options.result, data) | |
547 return resultcode | |
528 | 548 |
529 | 549 |
530 if __name__ == '__main__': | 550 if __name__ == '__main__': |
531 sys.exit(main()) | 551 sys.exit(main()) |
OLD | NEW |