OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Does one of the following depending on the --mode argument: | 6 """Does one of the following depending on the --mode argument: |
7 check Verifies all the inputs exist, touches the file specified with | 7 check Verifies all the inputs exist, touches the file specified with |
8 --result and exits. | 8 --result and exits. |
9 hashtable Puts a manifest file and hard links each of the inputs into the | 9 hashtable Puts a manifest file and hard links each of the inputs into the |
10 output directory. | 10 output directory. |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
222 | 222 |
223 for relfile in infiles: | 223 for relfile in infiles: |
224 infile = os.path.join(indir, relfile) | 224 infile = os.path.join(indir, relfile) |
225 outfile = os.path.join(outdir, relfile) | 225 outfile = os.path.join(outdir, relfile) |
226 outsubdir = os.path.dirname(outfile) | 226 outsubdir = os.path.dirname(outfile) |
227 if not os.path.isdir(outsubdir): | 227 if not os.path.isdir(outsubdir): |
228 os.makedirs(outsubdir) | 228 os.makedirs(outsubdir) |
229 run_test_from_archive.link_file(outfile, infile, action) | 229 run_test_from_archive.link_file(outfile, infile, action) |
230 | 230 |
231 | 231 |
232 def isolate( | 232 def load_results(resultfile): |
233 outdir, indir, infiles, mode, read_only, cmd, relative_cwd, resultfile): | 233 """Loads the previous results as an optimization.""" |
234 """Main function to isolate a target with its dependencies. | 234 data = {} |
235 | |
236 Arguments: | |
237 - outdir: Output directory where the result is stored. Depends on |mode|. | |
238 - indir: Root directory to be used as the base directory for infiles. | |
239 - infiles: List of files, with relative path, to process. | |
240 - mode: Action to do. See file level docstring. | |
241 - read_only: Makes the temporary directory read only. | |
242 - cmd: Command to execute. | |
243 - relative_cwd: Directory relative to the base directory where to start the | |
244 command from. In general, this path will be the path | |
245 containing the gyp file where the target was defined. This | |
246 relative directory may be created implicitely if a file from | |
247 this directory is needed to run the test. Otherwise it won't | |
248 be created and the process creation will fail. It's up to the | |
249 caller to create this directory manually before starting the | |
250 test. | |
251 - resultfile: Path where to read and write the metadata. | |
252 | |
253 Some arguments are optional, dependending on |mode|. See the corresponding | |
254 MODE<mode> function for the exact behavior. | |
255 """ | |
256 mode_fn = getattr(sys.modules[__name__], 'MODE' + mode) | |
257 assert mode_fn | |
258 | |
259 # Load the previous results as an optimization. | |
260 prevdict = {} | |
261 if resultfile and os.path.isfile(resultfile): | 235 if resultfile and os.path.isfile(resultfile): |
262 resultfile = os.path.abspath(resultfile) | 236 resultfile = os.path.abspath(resultfile) |
263 with open(resultfile, 'rb') as f: | 237 with open(resultfile, 'r') as f: |
264 prevdict = json.load(f) | 238 data = json.load(f) |
265 else: | 239 else: |
266 resultfile = os.path.abspath(resultfile) | 240 resultfile = os.path.abspath(resultfile) |
241 | |
267 # Works with native os.path.sep but stores as '/'. | 242 # Works with native os.path.sep but stores as '/'. |
268 if 'files' in prevdict and os.path.sep != '/': | 243 if 'files' in data and os.path.sep != '/': |
269 prevdict['files'] = dict( | 244 data['files'] = dict( |
270 (k.replace('/', os.path.sep), v) | 245 (k.replace('/', os.path.sep), v) |
271 for k, v in prevdict['files'].iteritems()) | 246 for k, v in data['files'].iteritems()) |
247 return data | |
272 | 248 |
273 | 249 |
274 infiles = expand_directories( | 250 def save_results(resultfile, data): |
275 indir, infiles, lambda x: re.match(r'.*\.(svn|pyc)$', x)) | 251 data = data.copy() |
276 | |
277 # Only hashtable mode really needs the sha-1. | |
278 level = { | |
279 'check': NO_INFO, | |
280 'hashtable': WITH_HASH, | |
281 'remap': STATS_ONLY, | |
282 'run': STATS_ONLY, | |
283 'trace': STATS_ONLY, | |
284 } | |
285 dictfiles = process_inputs( | |
286 prevdict.get('files', {}), indir, infiles, level[mode], read_only) | |
287 | |
288 result = mode_fn( | |
289 outdir, indir, dictfiles, read_only, cmd, relative_cwd, resultfile) | |
290 out = { | |
291 'command': cmd, | |
292 'relative_cwd': relative_cwd, | |
293 'files': dictfiles, | |
294 # Makes the directories read-only in addition to the files. | |
295 'read_only': read_only, | |
296 } | |
297 | 252 |
298 # Works with native os.path.sep but stores as '/'. | 253 # Works with native os.path.sep but stores as '/'. |
299 if os.path.sep != '/': | 254 if os.path.sep != '/': |
300 out['files'] = dict( | 255 data['files'] = dict( |
301 (k.replace(os.path.sep, '/'), v) for k, v in out['files'].iteritems()) | 256 (k.replace(os.path.sep, '/'), v) for k, v in data['files'].iteritems()) |
302 | 257 |
303 f = None | 258 f = None |
304 try: | 259 try: |
305 if resultfile: | 260 if resultfile: |
306 f = open(resultfile, 'wb') | 261 f = open(resultfile, 'wb') |
307 else: | 262 else: |
308 f = sys.stdout | 263 f = sys.stdout |
309 json.dump(out, f, indent=2, sort_keys=True) | 264 json.dump(data, f, indent=2, sort_keys=True) |
310 f.write('\n') | 265 f.write('\n') |
311 finally: | 266 finally: |
312 if resultfile and f: | 267 if resultfile and f: |
313 f.close() | 268 f.close() |
314 | 269 |
315 total_bytes = sum(i.get('size', 0) for i in out['files'].itervalues()) | 270 total_bytes = sum(i.get('size', 0) for i in data['files'].itervalues()) |
316 if total_bytes: | 271 if total_bytes: |
317 logging.debug('Total size: %d bytes' % total_bytes) | 272 logging.debug('Total size: %d bytes' % total_bytes) |
318 return result | |
319 | 273 |
320 | 274 |
321 def MODEcheck( | 275 def isolate(outdir, mode, indir, infiles, data): |
322 _outdir, _indir, _dictfiles, _read_only, _cmd, _relative_cwd, _resultfile): | 276 """Main function to isolate a target with its dependencies. |
277 | |
278 Arguments: | |
279 - outdir: Output directory where the result is stored. Depends on |mode|. | |
280 - indir: Root directory to be used as the base directory for infiles. | |
281 - infiles: List of files, with relative path, to process. | |
282 - mode: Action to do. See file level docstring. | |
283 - data: Contains all the command specific meta-data. | |
284 | |
285 Some arguments are optional, dependending on |mode|. See the corresponding | |
286 MODE<mode> function for the exact behavior. | |
287 """ | |
288 mode_fn = getattr(sys.modules[__name__], 'MODE' + mode) | |
nsylvain
2012/04/14 21:49:02
This is weird. I wish there was a better way...
Marc-Antoine Ruel (Google)
2012/04/14 23:51:24
Used an explicit map.
| |
289 assert mode_fn | |
290 | |
291 infiles = expand_directories( | |
292 indir, infiles, lambda x: re.match(r'.*\.(git|svn|pyc)$', x)) | |
293 | |
294 # Only hashtable mode really needs the sha-1. | |
295 level = { | |
296 'check': NO_INFO, | |
297 'hashtable': WITH_HASH, | |
298 'remap': STATS_ONLY, | |
299 'run': STATS_ONLY, | |
300 'trace': STATS_ONLY, | |
301 } | |
302 # Regenerate data['files'] from infiles. | |
303 data['files'] = process_inputs( | |
304 data.get('files', {}), indir, infiles, level[mode], data.get('read_only')) | |
305 | |
306 result = mode_fn(outdir, indir, data) | |
307 return result, data | |
308 | |
309 | |
310 def MODEcheck(_outdir, _indir, _data): | |
nsylvain
2012/04/14 21:49:02
TThe getattr above makes it hard to understand who
| |
323 """No-op.""" | 311 """No-op.""" |
324 return 0 | 312 return 0 |
325 | 313 |
326 | 314 |
327 def MODEhashtable( | 315 def MODEhashtable(outdir, indir, data): |
328 outdir, indir, dictfiles, _read_only, _cmd, _relative_cwd, resultfile): | 316 outdir = ( |
329 outdir = outdir or os.path.join(os.path.dirname(resultfile), 'hashtable') | 317 outdir or os.path.join(os.path.dirname(data['resultdir']), 'hashtable')) |
330 if not os.path.isdir(outdir): | 318 if not os.path.isdir(outdir): |
331 os.makedirs(outdir) | 319 os.makedirs(outdir) |
332 for relfile, properties in dictfiles.iteritems(): | 320 for relfile, properties in data['files'].iteritems(): |
333 infile = os.path.join(indir, relfile) | 321 infile = os.path.join(indir, relfile) |
334 outfile = os.path.join(outdir, properties['sha-1']) | 322 outfile = os.path.join(outdir, properties['sha-1']) |
335 if os.path.isfile(outfile): | 323 if os.path.isfile(outfile): |
336 # Just do a quick check that the file size matches. No need to stat() | 324 # Just do a quick check that the file size matches. No need to stat() |
337 # again the input file, grab the value from the dict. | 325 # again the input file, grab the value from the dict. |
338 out_size = os.stat(outfile).st_size | 326 out_size = os.stat(outfile).st_size |
339 in_size = dictfiles.get(infile, {}).get('size') or os.stat(infile).st_size | 327 in_size = ( |
328 data.get('files', {}).get(infile, {}).get('size') or | |
329 os.stat(infile).st_size) | |
340 if in_size == out_size: | 330 if in_size == out_size: |
341 continue | 331 continue |
342 # Otherwise, an exception will be raised. | 332 # Otherwise, an exception will be raised. |
343 run_test_from_archive.link_file( | 333 run_test_from_archive.link_file( |
344 outfile, infile, run_test_from_archive.HARDLINK) | 334 outfile, infile, run_test_from_archive.HARDLINK) |
345 return 0 | 335 return 0 |
346 | 336 |
347 | 337 |
348 def MODEremap( | 338 def MODEremap(outdir, indir, data): |
349 outdir, indir, dictfiles, read_only, _cmd, _relative_cwd, _resultfile): | |
350 if not outdir: | 339 if not outdir: |
351 outdir = tempfile.mkdtemp(prefix='isolate') | 340 outdir = tempfile.mkdtemp(prefix='isolate') |
352 else: | 341 else: |
353 if not os.path.isdir(outdir): | 342 if not os.path.isdir(outdir): |
354 os.makedirs(outdir) | 343 os.makedirs(outdir) |
355 print 'Remapping into %s' % outdir | 344 print 'Remapping into %s' % outdir |
356 if len(os.listdir(outdir)): | 345 if len(os.listdir(outdir)): |
357 print 'Can\'t remap in a non-empty directory' | 346 print 'Can\'t remap in a non-empty directory' |
358 return 1 | 347 return 1 |
359 recreate_tree(outdir, indir, dictfiles.keys(), run_test_from_archive.HARDLINK) | 348 recreate_tree( |
360 if read_only: | 349 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) |
350 if data['read_only']: | |
361 run_test_from_archive.make_writable(outdir, True) | 351 run_test_from_archive.make_writable(outdir, True) |
362 return 0 | 352 return 0 |
363 | 353 |
364 | 354 |
365 def MODErun( | 355 def MODErun(_outdir, indir, data): |
366 _outdir, indir, dictfiles, read_only, cmd, relative_cwd, _resultfile): | |
367 """Always uses a temporary directory.""" | 356 """Always uses a temporary directory.""" |
368 try: | 357 try: |
369 outdir = tempfile.mkdtemp(prefix='isolate') | 358 outdir = tempfile.mkdtemp(prefix='isolate') |
370 recreate_tree( | 359 recreate_tree( |
371 outdir, indir, dictfiles.keys(), run_test_from_archive.HARDLINK) | 360 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) |
372 cwd = os.path.join(outdir, relative_cwd) | 361 cwd = os.path.join(outdir, data['relative_cwd']) |
373 if not os.path.isdir(cwd): | 362 if not os.path.isdir(cwd): |
374 os.makedirs(cwd) | 363 os.makedirs(cwd) |
375 if read_only: | 364 if data['read_only']: |
376 run_test_from_archive.make_writable(outdir, True) | 365 run_test_from_archive.make_writable(outdir, True) |
377 if not cmd: | 366 if not data['command']: |
378 print 'No command to run' | 367 print 'No command to run' |
379 return 1 | 368 return 1 |
380 cmd = trace_inputs.fix_python_path(cmd) | 369 cmd = trace_inputs.fix_python_path(data['command']) |
381 logging.info('Running %s, cwd=%s' % (cmd, cwd)) | 370 logging.info('Running %s, cwd=%s' % (cmd, cwd)) |
382 return subprocess.call(cmd, cwd=cwd) | 371 return subprocess.call(cmd, cwd=cwd) |
383 finally: | 372 finally: |
384 run_test_from_archive.rmtree(outdir) | 373 run_test_from_archive.rmtree(outdir) |
385 | 374 |
386 | 375 |
387 def MODEtrace( | 376 def MODEtrace(_outdir, indir, data): |
388 _outdir, indir, _dictfiles, _read_only, cmd, relative_cwd, resultfile): | |
389 """Shortcut to use trace_inputs.py properly. | 377 """Shortcut to use trace_inputs.py properly. |
390 | 378 |
391 It constructs the equivalent of dictfiles. It is hardcoded to base the | 379 It constructs the equivalent of dictfiles. It is hardcoded to base the |
392 checkout at src/. | 380 checkout at src/. |
393 """ | 381 """ |
394 logging.info('Running %s, cwd=%s' % (cmd, os.path.join(indir, relative_cwd))) | 382 logging.info( |
395 if resultfile: | 383 'Running %s, cwd=%s' % ( |
396 # Guesswork here. | 384 data['command'], os.path.join(indir, data['relative_cwd']))) |
397 product_dir = os.path.dirname(resultfile) | 385 product_dir = None |
398 if product_dir and indir: | 386 if data['resultdir'] and indir: |
399 product_dir = os.path.relpath(product_dir, indir) | 387 # Defaults to none if both are the same directory. |
400 else: | 388 product_dir = os.path.relpath(data['resultdir'], indir) or None |
401 product_dir = None | 389 if not data['command']: |
402 if not cmd: | |
403 print 'No command to run' | 390 print 'No command to run' |
404 return 1 | 391 return 1 |
405 return trace_inputs.trace_inputs( | 392 return trace_inputs.trace_inputs( |
406 '%s.log' % resultfile, | 393 data['resultfile'], |
407 cmd, | 394 data['command'], |
408 indir, | 395 indir, |
409 relative_cwd, | 396 data['relative_cwd'], |
410 product_dir, | 397 product_dir, |
411 False) | 398 False) |
412 | 399 |
413 | 400 |
414 def get_valid_modes(): | 401 def get_valid_modes(): |
415 """Returns the modes that can be used.""" | 402 """Returns the modes that can be used.""" |
416 return sorted( | 403 return sorted( |
417 i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) | 404 i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) |
418 | 405 |
419 | 406 |
407 def process_options(variables_as_list, resultfile, input_file, error): | |
408 """Processes the options and loads the input file. Returns the processed | |
409 values. | |
410 """ | |
411 input_file = os.path.abspath(input_file) | |
412 isolate_dir = os.path.dirname(input_file) | |
413 resultfile = os.path.abspath(resultfile) | |
414 | |
415 # Extract the variables. | |
416 variables = dict(i.split('=', 1) for i in variables_as_list) | |
417 # Process path variables as a special case. First normalize it, verifies it | |
418 # exists, convert it to an absolute path, then set it as relative to | |
419 # isolate_dir. | |
420 for i in ('PRODUCT_DIR',): | |
nsylvain
2012/04/14 21:49:02
?
Marc-Antoine Ruel (Google)
2012/04/14 23:51:24
Added back DEPTH, it should make more sense now.
| |
421 if i not in variables: | |
422 continue | |
423 variable = os.path.normpath(variables[i]) | |
424 if not os.path.isdir(variable): | |
425 error('%s=%s is not a directory' % (i, variable)) | |
426 variable = os.path.abspath(variable) | |
427 # All variables are relative to the input file. | |
428 variables[i] = os.path.relpath(isolate_dir, variable) | |
429 | |
430 command, infiles, read_only = load_isolate( | |
431 open(input_file, 'r').read(), variables, error) | |
432 | |
433 # The trick used to determine the root directory is to look at "how far" back | |
434 # up it is looking up. | |
435 root_dir = isolate_dir | |
436 for i in infiles: | |
437 x = isolate_dir | |
438 while i.startswith('../'): | |
439 i = i[3:] | |
440 assert not i.startswith('/') | |
441 x = posixpath.dirname(x) | |
442 if root_dir.startswith(x): | |
443 root_dir = x | |
444 # The relative directory is automatically determined by the relative path | |
445 # between root_dir and the directory containing the .isolate file. | |
446 relative_dir = os.path.relpath(isolate_dir, root_dir) | |
447 logging.debug('relative_dir: %s' % relative_dir) | |
448 | |
449 logging.debug( | |
450 'variables: %s' % ', '.join( | |
451 '%s=%s' % (k, v) for k, v in variables.iteritems())) | |
452 | |
453 data = load_results(resultfile) | |
454 | |
455 command, infiles, read_only = load_isolate( | |
456 open(input_file, 'r').read(), variables, error) | |
457 | |
458 # Update data with the up to date information: | |
459 data['command'] = command | |
460 data['read_only'] = read_only | |
461 data['relative_cwd'] = relative_dir | |
462 data['resultfile'] = resultfile | |
463 data['resultdir'] = os.path.dirname(resultfile) | |
464 | |
465 # Keep the old variables. | |
466 data.setdefault('variables', {}).update(variables) | |
467 | |
468 logging.debug('command: %s' % command) | |
469 logging.debug('infiles: %s' % infiles) | |
470 logging.debug('read_only: %s' % read_only) | |
471 infiles = [normpath(os.path.join(relative_dir, f)) for f in infiles] | |
472 logging.debug('processed infiles: %s' % infiles) | |
473 return root_dir, infiles, data | |
474 | |
475 | |
420 def main(): | 476 def main(): |
421 default_variables = ['OS=%s' % trace_inputs.get_flavor()] | 477 default_variables = ['OS=%s' % trace_inputs.get_flavor()] |
422 if sys.platform in ('win32', 'cygwin'): | 478 if sys.platform in ('win32', 'cygwin'): |
423 default_variables.append('EXECUTABLE_SUFFIX=.exe') | 479 default_variables.append('EXECUTABLE_SUFFIX=.exe') |
424 else: | 480 else: |
425 default_variables.append('EXECUTABLE_SUFFIX=') | 481 default_variables.append('EXECUTABLE_SUFFIX=') |
426 valid_modes = get_valid_modes() | 482 valid_modes = get_valid_modes() |
427 parser = optparse.OptionParser( | 483 parser = optparse.OptionParser( |
428 usage='%prog [options] [.isolate file]', | 484 usage='%prog [options] [.isolate file]', |
429 description=sys.modules[__name__].__doc__) | 485 description=sys.modules[__name__].__doc__) |
(...skipping 29 matching lines...) Expand all Loading... | |
459 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)] | 515 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)] |
460 logging.basicConfig( | 516 logging.basicConfig( |
461 level=level, | 517 level=level, |
462 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') | 518 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') |
463 | 519 |
464 if not options.mode: | 520 if not options.mode: |
465 parser.error('--mode is required') | 521 parser.error('--mode is required') |
466 if len(args) != 1: | 522 if len(args) != 1: |
467 parser.error('Use only one argument which should be a .isolate file') | 523 parser.error('Use only one argument which should be a .isolate file') |
468 | 524 |
469 input_file = os.path.abspath(args[0]) | 525 root_dir, infiles, data = process_options( |
470 isolate_dir = os.path.dirname(input_file) | 526 options.variables, options.result, args[0], parser.error) |
471 | |
472 # Extract the variables. | |
473 variables = dict(i.split('=', 1) for i in options.variables) | |
474 # Process path variables as a special case. First normalize it, verifies it | |
475 # exists, convert it to an absolute path, then set it as relative to | |
476 # isolate_dir. | |
477 for i in ('PRODUCT_DIR',): | |
478 if i not in variables: | |
479 continue | |
480 variable = os.path.normpath(variables[i]) | |
481 if not os.path.isdir(variable): | |
482 parser.error('%s=%s is not a directory' % (i, variable)) | |
483 variable = os.path.abspath(variable) | |
484 # All variables are relative to the input file. | |
485 variables[i] = os.path.relpath(isolate_dir, variable) | |
486 | |
487 command, infiles, read_only = load_isolate( | |
488 open(input_file, 'r').read(), variables, parser.error) | |
489 | |
490 # The trick used to determine the root directory is to look at "how far" back | |
491 # up it is looking up. | |
492 root_dir = isolate_dir | |
493 for i in infiles: | |
494 x = isolate_dir | |
495 while i.startswith('../'): | |
496 i = i[3:] | |
497 assert not i.startswith('/') | |
498 x = posixpath.dirname(x) | |
499 if root_dir.startswith(x): | |
500 root_dir = x | |
501 # The relative directory is automatically determined by the relative path | |
502 # between root_dir and the directory containing the .isolate file. | |
503 relative_dir = os.path.relpath(isolate_dir, root_dir) | |
504 logging.debug('relative_dir: %s' % relative_dir) | |
505 | |
506 logging.debug( | |
507 'variables: %s' % ', '.join( | |
508 '%s=%s' % (k, v) for k, v in variables.iteritems())) | |
509 logging.debug('command: %s' % command) | |
510 logging.debug('infiles: %s' % infiles) | |
511 logging.debug('read_only: %s' % read_only) | |
512 infiles = [normpath(os.path.join(relative_dir, f)) for f in infiles] | |
513 logging.debug('processed infiles: %s' % infiles) | |
514 | 527 |
515 try: | 528 try: |
516 return isolate( | 529 resultcode, data = isolate( |
517 options.outdir, | 530 options.outdir, |
531 options.mode, | |
518 root_dir, | 532 root_dir, |
519 infiles, | 533 infiles, |
520 options.mode, | 534 data) |
521 read_only, | |
522 command, | |
523 relative_dir, | |
524 options.result) | |
525 except run_test_from_archive.MappingError, e: | 535 except run_test_from_archive.MappingError, e: |
526 print >> sys.stderr, str(e) | 536 print >> sys.stderr, str(e) |
527 return 1 | 537 return 1 |
538 save_results(options.result, data) | |
539 return resultcode | |
528 | 540 |
529 | 541 |
530 if __name__ == '__main__': | 542 if __name__ == '__main__': |
531 sys.exit(main()) | 543 sys.exit(main()) |
OLD | NEW |