OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Reads a manifest, creates a tree of hardlinks and runs the test. | 6 """Reads a manifest, creates a tree of hardlinks and runs the test. |
7 | 7 |
8 Keeps a local cache. | 8 Keeps a local cache. |
9 """ | 9 """ |
10 | 10 |
11 import ctypes | 11 import ctypes |
| 12 import hashlib |
12 import json | 13 import json |
13 import logging | 14 import logging |
14 import optparse | 15 import optparse |
15 import os | 16 import os |
16 import Queue | 17 import Queue |
17 import re | 18 import re |
18 import shutil | 19 import shutil |
19 import stat | 20 import stat |
20 import subprocess | 21 import subprocess |
21 import sys | 22 import sys |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
137 # If the drive letter mismatches, assume it's a separate partition. | 138 # If the drive letter mismatches, assume it's a separate partition. |
138 # TODO(maruel): It should look at the underlying drive, a drive letter could | 139 # TODO(maruel): It should look at the underlying drive, a drive letter could |
139 # be a mount point to a directory on another drive. | 140 # be a mount point to a directory on another drive. |
140 assert re.match(r'^[a-zA-Z]\:\\.*', path1), path1 | 141 assert re.match(r'^[a-zA-Z]\:\\.*', path1), path1 |
141 assert re.match(r'^[a-zA-Z]\:\\.*', path2), path2 | 142 assert re.match(r'^[a-zA-Z]\:\\.*', path2), path2 |
142 if path1[0].lower() != path2[0].lower(): | 143 if path1[0].lower() != path2[0].lower(): |
143 return False | 144 return False |
144 return os.stat(path1).st_dev == os.stat(path2).st_dev | 145 return os.stat(path1).st_dev == os.stat(path2).st_dev |
145 | 146 |
146 | 147 |
147 def open_remote(file_or_url): | |
148 """Reads a file or url.""" | |
149 if re.match(r'^https?://.+$', file_or_url): | |
150 return urllib.urlopen(file_or_url) | |
151 return open(file_or_url, 'rb') | |
152 | |
153 | |
154 def get_free_space(path): | 148 def get_free_space(path): |
155 """Returns the number of free bytes.""" | 149 """Returns the number of free bytes.""" |
156 if sys.platform == 'win32': | 150 if sys.platform == 'win32': |
157 free_bytes = ctypes.c_ulonglong(0) | 151 free_bytes = ctypes.c_ulonglong(0) |
158 ctypes.windll.kernel32.GetDiskFreeSpaceExW( | 152 ctypes.windll.kernel32.GetDiskFreeSpaceExW( |
159 ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes)) | 153 ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes)) |
160 return free_bytes.value | 154 return free_bytes.value |
161 f = os.statvfs(path) | 155 f = os.statvfs(path) |
162 return f.f_bfree * f.f_frsize | 156 return f.f_bfree * f.f_frsize |
163 | 157 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
209 raise ConfigError('Expected sha-1, got %r' % subsubvalue) | 203 raise ConfigError('Expected sha-1, got %r' % subsubvalue) |
210 elif subsubkey == 'timestamp': | 204 elif subsubkey == 'timestamp': |
211 if not isinstance(subsubvalue, int): | 205 if not isinstance(subsubvalue, int): |
212 raise ConfigError('Expected int, got %r' % subsubvalue) | 206 raise ConfigError('Expected int, got %r' % subsubvalue) |
213 else: | 207 else: |
214 raise ConfigError('Unknown key %s' % subsubkey) | 208 raise ConfigError('Unknown key %s' % subsubkey) |
215 if bool('sha-1' in subvalue) and bool('link' in subvalue): | 209 if bool('sha-1' in subvalue) and bool('link' in subvalue): |
216 raise ConfigError( | 210 raise ConfigError( |
217 'Did not expect both \'sha-1\' and \'link\', got: %r' % subvalue) | 211 'Did not expect both \'sha-1\' and \'link\', got: %r' % subvalue) |
218 | 212 |
| 213 elif key == 'includes': |
| 214 if not isinstance(value, list): |
| 215 raise ConfigError('Expected list, got %r' % value) |
| 216 for subvalue in value: |
| 217 if not RE_IS_SHA1.match(subvalue): |
| 218 raise ConfigError('Expected sha-1, got %r' % subvalue) |
| 219 |
219 elif key == 'read_only': | 220 elif key == 'read_only': |
220 if not isinstance(value, bool): | 221 if not isinstance(value, bool): |
221 raise ConfigError('Expected bool, got %r' % value) | 222 raise ConfigError('Expected bool, got %r' % value) |
222 | 223 |
223 elif key == 'relative_cwd': | 224 elif key == 'relative_cwd': |
224 if not isinstance(value, basestring): | 225 if not isinstance(value, basestring): |
225 raise ConfigError('Expected string, got %r' % value) | 226 raise ConfigError('Expected string, got %r' % value) |
226 | 227 |
227 else: | 228 else: |
228 raise ConfigError('Unknown key %s' % subkey) | 229 raise ConfigError('Unknown key %s' % subkey) |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
406 - policies: cache retention policies. | 407 - policies: cache retention policies. |
407 """ | 408 """ |
408 self.cache_dir = cache_dir | 409 self.cache_dir = cache_dir |
409 self.remote = remote | 410 self.remote = remote |
410 self.policies = policies | 411 self.policies = policies |
411 self.state_file = os.path.join(cache_dir, self.STATE_FILE) | 412 self.state_file = os.path.join(cache_dir, self.STATE_FILE) |
412 # The files are kept as an array in a LRU style. E.g. self.state[0] is the | 413 # The files are kept as an array in a LRU style. E.g. self.state[0] is the |
413 # oldest item. | 414 # oldest item. |
414 self.state = [] | 415 self.state = [] |
415 | 416 |
| 417 # Items currently being fetched. Keep it local to reduce lock contention. |
| 418 self._pending_queue = set() |
| 419 |
416 # Profiling values. | 420 # Profiling values. |
417 # The files added and removed are stored as tuples of the filename and | 421 # The files added and removed are stored as tuples of the filename and |
418 # the file size. | 422 # the file size. |
419 self.files_added = [] | 423 self.files_added = [] |
420 self.files_removed = [] | 424 self.files_removed = [] |
421 self.time_retrieving_files = 0 | |
422 | 425 |
423 if not os.path.isdir(self.cache_dir): | 426 if not os.path.isdir(self.cache_dir): |
424 os.makedirs(self.cache_dir) | 427 os.makedirs(self.cache_dir) |
425 if os.path.isfile(self.state_file): | 428 if os.path.isfile(self.state_file): |
426 try: | 429 try: |
427 self.state = json.load(open(self.state_file, 'r')) | 430 self.state = json.load(open(self.state_file, 'r')) |
428 except (IOError, ValueError), e: | 431 except (IOError, ValueError), e: |
429 # Too bad. The file will be overwritten and the cache cleared. | 432 # Too bad. The file will be overwritten and the cache cleared. |
430 logging.error( | 433 logging.error( |
431 'Broken state file %s, ignoring.\n%s' % (self.STATE_FILE, e)) | 434 'Broken state file %s, ignoring.\n%s' % (self.STATE_FILE, e)) |
432 with Profiler('SetupTrimming'): | 435 with Profiler('SetupTrimming'): |
433 self.trim() | 436 self.trim() |
434 | 437 |
435 def __enter__(self): | 438 def __enter__(self): |
436 return self | 439 return self |
437 | 440 |
438 def __exit__(self, _exc_type, _exec_value, _traceback): | 441 def __exit__(self, _exc_type, _exec_value, _traceback): |
439 with Profiler('CleanupTrimming'): | 442 with Profiler('CleanupTrimming'): |
440 self.trim() | 443 self.trim() |
441 | 444 |
442 logging.info('Number of files added to cache: %i', | 445 logging.info('Number of files added to cache: %i', |
443 len(self.files_added)) | 446 len(self.files_added)) |
444 logging.info('Size of files added to cache: %i', | 447 logging.info('Size of files added to cache: %i', |
445 sum(item[1] for item in self.files_added)) | 448 sum(item[1] for item in self.files_added)) |
446 logging.info('Time taken (in seconds) to add files to cache: %s', | |
447 self.time_retrieving_files) | |
448 logging.debug('All files added:') | 449 logging.debug('All files added:') |
449 logging.debug(self.files_added) | 450 logging.debug(self.files_added) |
450 | 451 |
451 logging.info('Number of files removed from cache: %i', | 452 logging.info('Number of files removed from cache: %i', |
452 len(self.files_removed)) | 453 len(self.files_removed)) |
453 logging.info('Size of files removed from cache: %i', | 454 logging.info('Size of files removed from cache: %i', |
454 sum(item[1] for item in self.files_removed)) | 455 sum(item[1] for item in self.files_removed)) |
455 logging.debug('All files remove:') | 456 logging.debug('All files remove:') |
456 logging.debug(self.files_added) | 457 logging.debug(self.files_added) |
457 | 458 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
503 self.remove_lru_file() | 504 self.remove_lru_file() |
504 sizes.pop(0) | 505 sizes.pop(0) |
505 | 506 |
506 # Ensure maximum number of items in the cache. | 507 # Ensure maximum number of items in the cache. |
507 if self.policies.max_items and self.state: | 508 if self.policies.max_items and self.state: |
508 while len(self.state) > self.policies.max_items: | 509 while len(self.state) > self.policies.max_items: |
509 self.remove_lru_file() | 510 self.remove_lru_file() |
510 | 511 |
511 self.save() | 512 self.save() |
512 | 513 |
513 def retrieve(self, item): | 514 def retrieve(self, priority, item): |
514 """Retrieves a file from the remote and add it to the cache.""" | 515 """Retrieves a file from the remote, if not already cached, and adds it to |
| 516 the cache. |
| 517 """ |
515 assert not '/' in item | 518 assert not '/' in item |
| 519 path = self.path(item) |
516 try: | 520 try: |
517 index = self.state.index(item) | 521 index = self.state.index(item) |
518 # Was already in cache. Update it's LRU value. | 522 # Was already in cache. Update it's LRU value. |
519 self.state.pop(index) | 523 self.state.pop(index) |
520 self.state.append(item) | 524 self.state.append(item) |
521 return False | 525 os.utime(path, None) |
522 except ValueError: | 526 except ValueError: |
523 out = self.path(item) | 527 if item in self._pending_queue: |
524 start_retrieve = time.time() | 528 # Already pending. The same object could be referenced multiple times. |
525 self.remote.fetch_item(Remote.MED, item, out) | 529 return |
526 # TODO(maruel): Temporarily fetch the files serially. | 530 self.remote.fetch_item(priority, item, path) |
527 self.remote.get_result() | 531 self._pending_queue.add(item) |
528 if os.path.exists(out): | 532 |
529 self.state.append(item) | 533 def add(self, filepath, obj): |
530 self.files_added.append((out, os.stat(out).st_size)) | 534 """Forcibly adds a file to the cache.""" |
531 else: | 535 if not obj in self.state: |
532 logging.error('File, %s, not placed in cache' % item) | 536 link_file(self.path(obj), filepath, HARDLINK) |
533 self.time_retrieving_files += time.time() - start_retrieve | 537 self.state.append(obj) |
534 return True | |
535 finally: | |
536 self.save() | |
537 | 538 |
538 def path(self, item): | 539 def path(self, item): |
539 """Returns the path to one item.""" | 540 """Returns the path to one item.""" |
540 return os.path.join(self.cache_dir, item) | 541 return os.path.join(self.cache_dir, item) |
541 | 542 |
542 def save(self): | 543 def save(self): |
543 """Saves the LRU ordering.""" | 544 """Saves the LRU ordering.""" |
544 json.dump(self.state, open(self.state_file, 'wb'), separators=(',',':')) | 545 json.dump(self.state, open(self.state_file, 'wb'), separators=(',',':')) |
545 | 546 |
546 | 547 def wait_for(self, items): |
547 def run_tha_test(manifest, cache_dir, remote, policies): | 548 """Starts a loop that waits for at least one of |items| to be retrieved. |
| 549 |
| 550 Returns the first item retrieved. |
| 551 """ |
| 552 # Flush items already present. |
| 553 for item in items: |
| 554 if item in self.state: |
| 555 return item |
| 556 |
| 557 assert all(i in self._pending_queue for i in items), ( |
| 558 items, self._pending_queue) |
| 559 # Note that: |
| 560 # len(self._pending_queue) == |
| 561 # ( len(self.remote._workers) - self.remote._ready + |
| 562 # len(self._remote._queue) + len(self._remote.done)) |
| 563 # There is no lock-free way to verify that. |
| 564 while self._pending_queue: |
| 565 item = self.remote.get_result() |
| 566 self._pending_queue.remove(item) |
| 567 self.state.append(item) |
| 568 if item in items: |
| 569 return item |
| 570 |
| 571 |
| 572 class Manifest(object): |
| 573 """Represents a single parsed manifest, e.g. a .results file.""" |
| 574 def __init__(self, obj_hash): |
| 575 """|obj_hash| is really the sha-1 of the file.""" |
| 576 logging.debug('Manifest(%s)' % obj_hash) |
| 577 self.obj_hash = obj_hash |
| 578 # Set once all the left-side of the tree is parsed. 'Tree' here means the |
| 579 # manifest and all the manifest recursively included by it with 'includes' |
| 580 # key. The order of each manifest sha-1 in 'includes' is important, as the |
| 581 # later ones are not processed until the firsts are retrieved and read. |
| 582 self.can_fetch = False |
| 583 |
| 584 # Raw data. |
| 585 self.data = {} |
| 586 # A Manifest instance, one per object in self.includes. |
| 587 self.children = [] |
| 588 |
| 589 # Set once the manifest is loaded. |
| 590 self._manifest_parsed = False |
| 591 # Set once the files are fetched. |
| 592 self.files_fetched = False |
| 593 |
| 594 def load(self, content): |
| 595 """Verifies the manifest is valid and loads this object with the json data. |
| 596 """ |
| 597 logging.debug('Manifest.load(%s)' % self.obj_hash) |
| 598 assert not self._manifest_parsed |
| 599 self.data = load_manifest(content) |
| 600 self.children = [Manifest(i) for i in self.data.get('includes', [])] |
| 601 self._manifest_parsed = True |
| 602 |
| 603 def fetch_files(self, cache, files): |
| 604 """Adds files in this manifest not present in files dictionary. |
| 605 |
| 606 Preemptively request files. |
| 607 |
| 608 Note that |files| is modified by this function. |
| 609 """ |
| 610 assert self.can_fetch |
| 611 if not self._manifest_parsed or self.files_fetched: |
| 612 return |
| 613 logging.info('fetch_files(%s)' % self.obj_hash) |
| 614 for filepath, properties in self.data.get('files', {}).iteritems(): |
| 615 # Root manifest has priority on the files being mapped. In particular, |
| 616 # overriden files must not be fetched. |
| 617 if filepath not in files: |
| 618 files[filepath] = properties |
| 619 if 'sha-1' in properties: |
| 620 # Preemptively request files. |
| 621 logging.info('fetching %s' % filepath) |
| 622 cache.retrieve(Remote.MED, properties['sha-1']) |
| 623 self.files_fetched = True |
| 624 |
| 625 |
| 626 class Settings(object): |
| 627 """Results of a completely parsed manifest.""" |
| 628 def __init__(self): |
| 629 self.command = [] |
| 630 self.files = {} |
| 631 self.read_only = None |
| 632 self.relative_cwd = None |
| 633 # The main manifest. |
| 634 self.root = None |
| 635 logging.debug('Settings') |
| 636 |
| 637 def load(self, cache, root_manifest_hash): |
| 638 """Loads the manifest and all the included manifests asynchronously. |
| 639 |
| 640 It enables support for included manifest. They are processed in strict order |
| 641 but fetched asynchronously from the cache. This is important so that a file |
| 642 in an included manifest that is overridden by an embedding manifest is not |
| 643 fetched neededlessly. The includes are fetched in one pass and the files are |
| 644 fetched as soon as all the manifests on the left-side of the tree were |
| 645 fetched. |
| 646 |
| 647 The prioritization is very important here for nested manifests. 'includes' |
| 648 have the highest priority and the algorithm is optimized for both deep and |
| 649 wide manifests. A deep one is a long link of manifest referenced one at a |
| 650 time by one item in 'includes'. A wide one has a large number of 'includes' |
| 651 in a single manifest. 'left' is defined as an included manifest earlier in |
| 652 the 'includes' list. So the order of the elements in 'includes' is |
| 653 important. |
| 654 """ |
| 655 self.root = Manifest(root_manifest_hash) |
| 656 cache.retrieve(Remote.HIGH, root_manifest_hash) |
| 657 pending = {root_manifest_hash: self.root} |
| 658 # Keeps the list of retrieved items to refuse recursive includes. |
| 659 retrieved = [root_manifest_hash] |
| 660 |
| 661 def update_self(node): |
| 662 node.fetch_files(cache, self.files) |
| 663 # Grabs properties. |
| 664 if not self.command and node.data.get('command'): |
| 665 self.command = node.data['command'] |
| 666 if self.read_only is None and node.data.get('read_only') is not None: |
| 667 self.read_only = node.data['read_only'] |
| 668 if (self.relative_cwd is None and |
| 669 node.data.get('relative_cwd') is not None): |
| 670 self.relative_cwd = node.data['relative_cwd'] |
| 671 |
| 672 def traverse_tree(node): |
| 673 if node.can_fetch: |
| 674 if not node.files_fetched: |
| 675 update_self(node) |
| 676 will_break = False |
| 677 for i in node.children: |
| 678 if not i.can_fetch: |
| 679 if will_break: |
| 680 break |
| 681 # Automatically mark the first one as fetcheable. |
| 682 i.can_fetch = True |
| 683 will_break = True |
| 684 traverse_tree(i) |
| 685 |
| 686 while pending: |
| 687 item_hash = cache.wait_for(pending) |
| 688 item = pending.pop(item_hash) |
| 689 item.load(open(cache.path(item_hash), 'r').read()) |
| 690 if item_hash == root_manifest_hash: |
| 691 # It's the root item. |
| 692 item.can_fetch = True |
| 693 |
| 694 for new_child in item.children: |
| 695 h = new_child.obj_hash |
| 696 if h in retrieved: |
| 697 raise ConfigError('Manifest %s is retrieved recursively' % h) |
| 698 pending[h] = new_child |
| 699 cache.retrieve(Remote.HIGH, h) |
| 700 |
| 701 # Traverse the whole tree to see if files can now be fetched. |
| 702 traverse_tree(self.root) |
| 703 def check(n): |
| 704 return all(check(x) for x in n.children) and n.files_fetched |
| 705 assert check(self.root) |
| 706 self.relative_cwd = self.relative_cwd or '' |
| 707 self.read_only = self.read_only or False |
| 708 |
| 709 |
| 710 def run_tha_test(manifest_hash, cache_dir, remote, policies): |
548 """Downloads the dependencies in the cache, hardlinks them into a temporary | 711 """Downloads the dependencies in the cache, hardlinks them into a temporary |
549 directory and runs the executable. | 712 directory and runs the executable. |
550 """ | 713 """ |
| 714 settings = Settings() |
551 with Cache(cache_dir, Remote(remote), policies) as cache: | 715 with Cache(cache_dir, Remote(remote), policies) as cache: |
552 outdir = make_temp_dir('run_tha_test', cache_dir) | 716 outdir = make_temp_dir('run_tha_test', cache_dir) |
553 | |
554 if not 'files' in manifest: | |
555 print >> sys.stderr, 'No file to map' | |
556 return 1 | |
557 if not 'command' in manifest: | |
558 print >> sys.stderr, 'No command to map run' | |
559 return 1 | |
560 | |
561 try: | 717 try: |
562 with Profiler('GetFiles') as _prof: | 718 # Initiate all the files download. |
563 for filepath, properties in manifest['files'].iteritems(): | 719 with Profiler('GetManifests') as _prof: |
| 720 # Optionally support local files. |
| 721 if not RE_IS_SHA1.match(manifest_hash): |
| 722 # Adds it in the cache. While not strictly necessary, this simplifies |
| 723 # the rest. |
| 724 h = hashlib.sha1(open(manifest_hash, 'r').read()).hexdigest() |
| 725 cache.add(manifest_hash, h) |
| 726 manifest_hash = h |
| 727 settings.load(cache, manifest_hash) |
| 728 |
| 729 if not settings.command: |
| 730 print >> sys.stderr, 'No command to run' |
| 731 return 1 |
| 732 |
| 733 with Profiler('GetRest') as _prof: |
| 734 logging.debug('Creating directories') |
| 735 # Creates the tree of directories to create. |
| 736 directories = set(os.path.dirname(f) for f in settings.files) |
| 737 for item in directories: |
| 738 directories.add(os.path.dirname(item)) |
| 739 for d in sorted(directories): |
| 740 if d: |
| 741 os.mkdir(os.path.join(outdir, d)) |
| 742 |
| 743 # Creates the links if necessary. |
| 744 for filepath, properties in settings.files.iteritems(): |
| 745 if 'link' not in properties: |
| 746 continue |
564 outfile = os.path.join(outdir, filepath) | 747 outfile = os.path.join(outdir, filepath) |
565 outfiledir = os.path.dirname(outfile) | 748 os.symlink(properties['link'], outfile) |
566 if not os.path.isdir(outfiledir): | |
567 os.makedirs(outfiledir) | |
568 if 'sha-1' in properties: | |
569 # A normal file. | |
570 infile = properties['sha-1'] | |
571 cache.retrieve(infile) | |
572 link_file(outfile, cache.path(infile), HARDLINK) | |
573 elif 'link' in properties: | |
574 # A symlink. | |
575 os.symlink(properties['link'], outfile) | |
576 else: | |
577 raise ConfigError('Unexpected entry: %s' % properties) | |
578 if 'mode' in properties: | 749 if 'mode' in properties: |
579 # It's not set on Windows. | 750 # It's not set on Windows. |
580 os.chmod(outfile, properties['mode']) | 751 os.chmod(outfile, properties['mode']) |
581 | 752 |
582 cwd = os.path.join(outdir, manifest.get('relative_cwd', '')) | 753 # Remaining files to be processed. |
583 if not os.path.isdir(cwd): | 754 # Note that files could still be not be downloaded yet here. |
584 os.makedirs(cwd) | 755 remaining = dict( |
585 if manifest.get('read_only'): | 756 (props['sha-1'], (filepath, props)) |
| 757 for filepath, props in settings.files.iteritems() |
| 758 if 'sha-1' in props) |
| 759 |
| 760 # Do bookkeeping while files are being downloaded in the background. |
| 761 cwd = os.path.join(outdir, settings.relative_cwd) |
| 762 if not os.path.isdir(cwd): |
| 763 os.makedirs(cwd) |
| 764 cmd = settings.command[:] |
| 765 # Ensure paths are correctly separated on windows. |
| 766 cmd[0] = cmd[0].replace('/', os.path.sep) |
| 767 cmd = fix_python_path(cmd) |
| 768 |
| 769 # Now block on the remaining files to be downloaded and mapped. |
| 770 while remaining: |
| 771 obj = cache.wait_for(remaining) |
| 772 filepath, properties = remaining.pop(obj) |
| 773 outfile = os.path.join(outdir, filepath) |
| 774 link_file(outfile, cache.path(obj), HARDLINK) |
| 775 if 'mode' in properties: |
| 776 # It's not set on Windows. |
| 777 os.chmod(outfile, properties['mode']) |
| 778 |
| 779 if settings.read_only: |
586 make_writable(outdir, True) | 780 make_writable(outdir, True) |
587 cmd = manifest['command'] | |
588 # Ensure paths are correctly separated on windows. | |
589 cmd[0] = cmd[0].replace('/', os.path.sep) | |
590 cmd = fix_python_path(cmd) | |
591 logging.info('Running %s, cwd=%s' % (cmd, cwd)) | 781 logging.info('Running %s, cwd=%s' % (cmd, cwd)) |
592 try: | 782 try: |
593 with Profiler('RunTest') as _prof: | 783 with Profiler('RunTest') as _prof: |
594 return subprocess.call(cmd, cwd=cwd) | 784 return subprocess.call(cmd, cwd=cwd) |
595 except OSError: | 785 except OSError: |
596 print >> sys.stderr, 'Failed to run %s; cwd=%s' % (cmd, cwd) | 786 print >> sys.stderr, 'Failed to run %s; cwd=%s' % (cmd, cwd) |
597 raise | 787 raise |
598 finally: | 788 finally: |
599 rmtree(outdir) | 789 rmtree(outdir) |
600 | 790 |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
652 level=level, | 842 level=level, |
653 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') | 843 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') |
654 | 844 |
655 if bool(options.manifest) == bool(options.hash): | 845 if bool(options.manifest) == bool(options.hash): |
656 parser.error('One and only one of --manifest or --hash is required.') | 846 parser.error('One and only one of --manifest or --hash is required.') |
657 if not options.remote: | 847 if not options.remote: |
658 parser.error('--remote is required.') | 848 parser.error('--remote is required.') |
659 if args: | 849 if args: |
660 parser.error('Unsupported args %s' % ' '.join(args)) | 850 parser.error('Unsupported args %s' % ' '.join(args)) |
661 | 851 |
662 if options.hash: | |
663 # First calculate the reference to it. | |
664 options.manifest = '%s/%s' % (options.remote.rstrip('/'), options.hash) | |
665 try: | |
666 manifest = load_manifest(open_remote(options.manifest).read()) | |
667 except IOError as e: | |
668 parser.error( | |
669 'Failed to read manifest %s; remote:%s; hash:%s; %s' % | |
670 (options.manifest, options.remote, options.hash, str(e))) | |
671 | |
672 policies = CachePolicies( | 852 policies = CachePolicies( |
673 options.max_cache_size, options.min_free_space, options.max_items) | 853 options.max_cache_size, options.min_free_space, options.max_items) |
674 try: | 854 try: |
675 return run_tha_test( | 855 return run_tha_test( |
676 manifest, | 856 options.manifest or options.hash, |
677 os.path.abspath(options.cache), | 857 os.path.abspath(options.cache), |
678 options.remote, | 858 options.remote, |
679 policies) | 859 policies) |
680 except (ConfigError, MappingError), e: | 860 except (ConfigError, MappingError), e: |
681 print >> sys.stderr, str(e) | 861 print >> sys.stderr, str(e) |
682 return 1 | 862 return 1 |
683 | 863 |
684 | 864 |
685 if __name__ == '__main__': | 865 if __name__ == '__main__': |
686 sys.exit(main()) | 866 sys.exit(main()) |
OLD | NEW |