OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Reads a manifest, creates a tree of hardlinks and runs the test. | 6 """Reads a manifest, creates a tree of hardlinks and runs the test. |
7 | 7 |
8 Keeps a local cache. | 8 Keeps a local cache. |
9 """ | 9 """ |
10 | 10 |
11 import ctypes | 11 import ctypes |
12 import hashlib | |
12 import json | 13 import json |
13 import logging | 14 import logging |
14 import optparse | 15 import optparse |
15 import os | 16 import os |
16 import Queue | 17 import Queue |
17 import re | 18 import re |
18 import shutil | 19 import shutil |
19 import stat | 20 import stat |
20 import subprocess | 21 import subprocess |
21 import sys | 22 import sys |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
137 # If the drive letter mismatches, assume it's a separate partition. | 138 # If the drive letter mismatches, assume it's a separate partition. |
138 # TODO(maruel): It should look at the underlying drive, a drive letter could | 139 # TODO(maruel): It should look at the underlying drive, a drive letter could |
139 # be a mount point to a directory on another drive. | 140 # be a mount point to a directory on another drive. |
140 assert re.match(r'^[a-zA-Z]\:\\.*', path1), path1 | 141 assert re.match(r'^[a-zA-Z]\:\\.*', path1), path1 |
141 assert re.match(r'^[a-zA-Z]\:\\.*', path2), path2 | 142 assert re.match(r'^[a-zA-Z]\:\\.*', path2), path2 |
142 if path1[0].lower() != path2[0].lower(): | 143 if path1[0].lower() != path2[0].lower(): |
143 return False | 144 return False |
144 return os.stat(path1).st_dev == os.stat(path2).st_dev | 145 return os.stat(path1).st_dev == os.stat(path2).st_dev |
145 | 146 |
146 | 147 |
147 def open_remote(file_or_url): | |
148 """Reads a file or url.""" | |
149 if re.match(r'^https?://.+$', file_or_url): | |
150 return urllib.urlopen(file_or_url) | |
151 return open(file_or_url, 'rb') | |
152 | |
153 | |
154 def get_free_space(path): | 148 def get_free_space(path): |
155 """Returns the number of free bytes.""" | 149 """Returns the number of free bytes.""" |
156 if sys.platform == 'win32': | 150 if sys.platform == 'win32': |
157 free_bytes = ctypes.c_ulonglong(0) | 151 free_bytes = ctypes.c_ulonglong(0) |
158 ctypes.windll.kernel32.GetDiskFreeSpaceExW( | 152 ctypes.windll.kernel32.GetDiskFreeSpaceExW( |
159 ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes)) | 153 ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes)) |
160 return free_bytes.value | 154 return free_bytes.value |
161 f = os.statvfs(path) | 155 f = os.statvfs(path) |
162 return f.f_bfree * f.f_frsize | 156 return f.f_bfree * f.f_frsize |
163 | 157 |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
248 | 242 |
249 files_key = { | 243 files_key = { |
250 # Any file has to fit files_keys specification. | 244 # Any file has to fit files_keys specification. |
251 None: get_assert_is_dict(assert_is_str, file_keys), | 245 None: get_assert_is_dict(assert_is_str, file_keys), |
252 } | 246 } |
253 | 247 |
254 keys = { | 248 keys = { |
255 'command': get_assert_is_list(assert_is_str), | 249 'command': get_assert_is_list(assert_is_str), |
256 # Could use assert_is_valid_filename instead of assert_is_str. | 250 # Could use assert_is_valid_filename instead of assert_is_str. |
257 'files': get_assert_is_dict(assert_is_str, files_key), | 251 'files': get_assert_is_dict(assert_is_str, files_key), |
252 'includes': get_assert_is_list(assert_is_sha1), | |
258 'read_only': assert_is_bool, | 253 'read_only': assert_is_bool, |
259 'relative_cwd': assert_is_str, | 254 'relative_cwd': assert_is_str, |
260 } | 255 } |
261 | 256 |
262 get_assert_is_dict(assert_is_str, keys)(data) | 257 get_assert_is_dict(assert_is_str, keys)(data) |
263 # Add a special verification. | 258 # Add a special verification. |
264 for value in data.get('files', {}).itervalues(): | 259 for value in data.get('files', {}).itervalues(): |
265 if bool('sha-1' in value) and bool('link' in value): | 260 if bool('sha-1' in value) and bool('link' in value): |
266 raise ConfigError( | 261 raise ConfigError( |
267 'Did not expect both \'sha-1\' and \'link\', got: %r' % value) | 262 'Did not expect both \'sha-1\' and \'link\', got: %r' % value) |
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
443 - policies: cache retention policies. | 438 - policies: cache retention policies. |
444 """ | 439 """ |
445 self.cache_dir = cache_dir | 440 self.cache_dir = cache_dir |
446 self.remote = remote | 441 self.remote = remote |
447 self.policies = policies | 442 self.policies = policies |
448 self.state_file = os.path.join(cache_dir, self.STATE_FILE) | 443 self.state_file = os.path.join(cache_dir, self.STATE_FILE) |
449 # The files are kept as an array in a LRU style. E.g. self.state[0] is the | 444 # The files are kept as an array in a LRU style. E.g. self.state[0] is the |
450 # oldest item. | 445 # oldest item. |
451 self.state = [] | 446 self.state = [] |
452 | 447 |
448 # Items currently being fetched. Keep it local to reduce lock contention. | |
449 self._pending_queue = set() | |
450 # Items fetched but the users didn't cache about yet. | |
csharp
2012/08/28 19:03:31
Nit: Not sure what you mean here.
users didn't ca
M-A Ruel
2012/08/28 21:59:36
Me neither. Bad sign.
| |
451 self._done = [] | |
452 | |
453 # Profiling values. | 453 # Profiling values. |
454 # The files added and removed are stored as tuples of the filename and | 454 # The files added and removed are stored as tuples of the filename and |
455 # the file size. | 455 # the file size. |
456 self.files_added = [] | 456 self.files_added = [] |
457 self.files_removed = [] | 457 self.files_removed = [] |
458 self.time_retrieving_files = 0 | |
459 | 458 |
460 if not os.path.isdir(self.cache_dir): | 459 if not os.path.isdir(self.cache_dir): |
461 os.makedirs(self.cache_dir) | 460 os.makedirs(self.cache_dir) |
462 if os.path.isfile(self.state_file): | 461 if os.path.isfile(self.state_file): |
463 try: | 462 try: |
464 self.state = json.load(open(self.state_file, 'r')) | 463 self.state = json.load(open(self.state_file, 'r')) |
465 except (IOError, ValueError), e: | 464 except (IOError, ValueError), e: |
466 # Too bad. The file will be overwritten and the cache cleared. | 465 # Too bad. The file will be overwritten and the cache cleared. |
467 logging.error( | 466 logging.error( |
468 'Broken state file %s, ignoring.\n%s' % (self.STATE_FILE, e)) | 467 'Broken state file %s, ignoring.\n%s' % (self.STATE_FILE, e)) |
469 with Profiler('SetupTrimming'): | 468 with Profiler('SetupTrimming'): |
470 self.trim() | 469 self.trim() |
471 | 470 |
472 def __enter__(self): | 471 def __enter__(self): |
473 return self | 472 return self |
474 | 473 |
475 def __exit__(self, _exc_type, _exec_value, _traceback): | 474 def __exit__(self, _exc_type, _exec_value, _traceback): |
476 with Profiler('CleanupTrimming'): | 475 with Profiler('CleanupTrimming'): |
477 self.trim() | 476 self.trim() |
478 | 477 |
479 logging.info('Number of files added to cache: %i', | 478 logging.info('Number of files added to cache: %i', |
480 len(self.files_added)) | 479 len(self.files_added)) |
481 logging.info('Size of files added to cache: %i', | 480 logging.info('Size of files added to cache: %i', |
482 sum(item[1] for item in self.files_added)) | 481 sum(item[1] for item in self.files_added)) |
483 logging.info('Time taken (in seconds) to add files to cache: %s', | |
484 self.time_retrieving_files) | |
485 logging.debug('All files added:') | 482 logging.debug('All files added:') |
486 logging.debug(self.files_added) | 483 logging.debug(self.files_added) |
487 | 484 |
488 logging.info('Number of files removed from cache: %i', | 485 logging.info('Number of files removed from cache: %i', |
489 len(self.files_removed)) | 486 len(self.files_removed)) |
490 logging.info('Size of files removed from cache: %i', | 487 logging.info('Size of files removed from cache: %i', |
491 sum(item[1] for item in self.files_removed)) | 488 sum(item[1] for item in self.files_removed)) |
492 logging.debug('All files remove:') | 489 logging.debug('All files remove:') |
493 logging.debug(self.files_added) | 490 logging.debug(self.files_added) |
494 | 491 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
540 self.remove_lru_file() | 537 self.remove_lru_file() |
541 sizes.pop(0) | 538 sizes.pop(0) |
542 | 539 |
543 # Ensure maximum number of items in the cache. | 540 # Ensure maximum number of items in the cache. |
544 if self.policies.max_items and self.state: | 541 if self.policies.max_items and self.state: |
545 while len(self.state) > self.policies.max_items: | 542 while len(self.state) > self.policies.max_items: |
546 self.remove_lru_file() | 543 self.remove_lru_file() |
547 | 544 |
548 self.save() | 545 self.save() |
549 | 546 |
550 def retrieve(self, item): | 547 def retrieve(self, priority, item): |
551 """Retrieves a file from the remote and add it to the cache.""" | 548 """Retrieves a file from the remote, if not already cached, and adds it to |
549 the cache. | |
550 """ | |
552 assert not '/' in item | 551 assert not '/' in item |
552 path = self.path(item) | |
553 try: | 553 try: |
554 index = self.state.index(item) | 554 index = self.state.index(item) |
555 # Was already in cache. Update it's LRU value. | 555 # Was already in cache. Update it's LRU value. |
556 self.state.pop(index) | 556 self.state.pop(index) |
557 self.state.append(item) | 557 self.state.append(item) |
558 return False | 558 os.utime(path, None) |
559 except ValueError: | 559 except ValueError: |
560 out = self.path(item) | 560 if item in self._pending_queue: |
561 start_retrieve = time.time() | 561 # Already pending. The same object could be referenced multiple times. |
562 self.remote.fetch_item(Remote.MED, item, out) | 562 return |
563 # TODO(maruel): Temporarily fetch the files serially. | 563 self.remote.fetch_item(priority, item, path) |
564 self.remote.get_result() | 564 self._pending_queue.add(item) |
565 if os.path.exists(out): | 565 |
566 self.state.append(item) | 566 def add(self, filepath, obj): |
567 self.files_added.append((out, os.stat(out).st_size)) | 567 """Forcibly adds a file to the cache.""" |
568 else: | 568 if not obj in self.state: |
569 logging.error('File, %s, not placed in cache' % item) | 569 link_file(self.path(obj), filepath, HARDLINK) |
570 self.time_retrieving_files += time.time() - start_retrieve | 570 self.state.append(obj) |
571 return True | |
572 finally: | |
573 self.save() | |
574 | 571 |
575 def path(self, item): | 572 def path(self, item): |
576 """Returns the path to one item.""" | 573 """Returns the path to one item.""" |
577 return os.path.join(self.cache_dir, item) | 574 return os.path.join(self.cache_dir, item) |
578 | 575 |
579 def save(self): | 576 def save(self): |
580 """Saves the LRU ordering.""" | 577 """Saves the LRU ordering.""" |
581 json.dump(self.state, open(self.state_file, 'wb'), separators=(',',':')) | 578 json.dump(self.state, open(self.state_file, 'wb'), separators=(',',':')) |
582 | 579 |
583 | 580 def wait_for(self, items): |
584 def run_tha_test(manifest, cache_dir, remote, policies): | 581 """Starts a loop that waits for at least one of |items| to be retrieved. |
582 | |
583 Returns the first item retrieved. | |
584 """ | |
585 # Flush items already present. | |
586 for item in items: | |
587 if item in self.state: | |
588 return item | |
589 | |
590 assert all(i in self._pending_queue for i in items), ( | |
591 items, self._pending_queue) | |
592 # Note that: | |
593 # len(self._pending_queue) == | |
594 # ( len(self.remote._workers) - self.remote._ready + | |
595 # len(self._remote._queue) + len(self._remote.done)) | |
596 # There is no lock-free way to verify that. | |
597 while self._pending_queue: | |
598 item = self.remote.get_result() | |
599 self._pending_queue.remove(item) | |
600 self.state.append(item) | |
601 if item in items: | |
602 return item | |
603 | |
604 | |
605 class Manifest(object): | |
606 """Represents a single parsed manifest, e.g. a .results file.""" | |
607 def __init__(self, obj_hash): | |
608 """|obj| is really the sha-1 of the file.""" | |
csharp
2012/08/28 19:03:31
nit: obj->obj_hash
M-A Ruel
2012/08/28 21:59:36
done
| |
609 logging.debug('Manifest(%s)' % obj_hash) | |
610 self.obj_hash = obj_hash | |
611 # Set once all the left-side of the tree is parsed. | |
csharp
2012/08/28 19:03:31
What tree?
M-A Ruel
2012/08/28 21:59:36
Added more info.
| |
612 self.can_fetch = False | |
613 | |
614 # Raw data. | |
615 self.data = {} | |
616 # A Manifest instance, one per object in self.includes. | |
617 self.children = [] | |
618 | |
619 # Set once the manifest is loaded. | |
620 self._manifest_parsed = False | |
621 # Set once the files are fetched. | |
622 self.files_fetched = False | |
623 | |
624 def load(self, content): | |
625 """Verifies the manifest is valid and loads this object with the json data. | |
626 """ | |
627 logging.debug('Manifest.load(%s)' % self.obj_hash) | |
628 assert not self._manifest_parsed | |
629 self.data = load_manifest(content) | |
630 self.children = [Manifest(i) for i in self.data.get('includes', [])] | |
631 self._manifest_parsed = True | |
632 | |
633 def fetch_files(self, cache, files): | |
634 """Adds files in this manifest not present in files dictionary. | |
635 | |
636 Preemptively request files. | |
637 | |
638 Note that |files| is modified by this function. | |
639 """ | |
640 assert self.can_fetch | |
641 if not self._manifest_parsed or self.files_fetched: | |
642 return | |
643 logging.info('fetch_files(%s)' % self.obj_hash) | |
644 for filepath, properties in self.data.get('files', {}).iteritems(): | |
645 # Root manifest has priority on the files being mapped. In particular, | |
646 # overriden files must not be fetched. | |
647 if filepath not in files: | |
648 files[filepath] = properties | |
649 if 'sha-1' in properties: | |
650 # Preemptively request files. | |
651 logging.info('fetching %s' % filepath) | |
652 cache.retrieve(Remote.MED, properties['sha-1']) | |
653 self.files_fetched = True | |
654 | |
655 | |
656 class Settings(object): | |
657 """Results of a completely parsed manifest.""" | |
658 def __init__(self): | |
659 self.command = [] | |
660 self.files = {} | |
661 self.read_only = None | |
662 self.relative_cwd = None | |
663 # The main manifest. | |
664 self.root = None | |
665 logging.debug('Settings') | |
666 | |
667 def load(self, cache, root_manifest_hash): | |
668 """Loads the manifest and all the included manifest asynchronously. | |
csharp
2012/08/28 19:03:31
Nit: manifests
M-A Ruel
2012/08/28 21:59:36
done
| |
669 | |
670 It enables support for included manifest. They are processed in strict order | |
671 but fetched asynchronously from the cache. This is important so that a file | |
672 in an included manifest that is overridden by an embedding manifest is not | |
673 fetched neededlessly. The includes are fetched in one pass and the files are | |
674 fetched as soon as all the manifests on the left-side of the tree were | |
675 fetched. | |
676 | |
677 The prioritization is very important here for nested manifests. 'includes' | |
678 have the highest priority and the algorithm is optimized for both deep and | |
679 wide manifests. A deep one is a long link of manifest referenced one at a | |
680 time by one item in 'includes'. A wide one has a large number of 'includes' | |
681 in a single manifest. 'left' is defined as an included manifest earlier in | |
682 the 'includes' list. So the order of the elements in 'includes' is | |
683 important. | |
684 """ | |
685 self.root = Manifest(root_manifest_hash) | |
686 cache.retrieve(Remote.HIGH, root_manifest_hash) | |
687 pending = {root_manifest_hash: self.root} | |
688 # Keeps the list of retrieved items to refuse recursive includes. | |
689 retrieved = [root_manifest_hash] | |
690 | |
691 def update_self(node): | |
692 node.fetch_files(cache, self.files) | |
693 # Grabs properties. | |
694 if not self.command and node.data.get('command'): | |
695 self.command = node.data['command'] | |
696 if self.read_only is None and node.data.get('read_only') is not None: | |
697 self.read_only = node.data['read_only'] | |
698 if (self.relative_cwd is None and | |
699 node.data.get('relative_cwd') is not None): | |
700 self.relative_cwd = node.data['relative_cwd'] | |
701 | |
702 def traverse_tree(node): | |
703 if node.can_fetch: | |
704 if not node.files_fetched: | |
705 update_self(node) | |
706 will_break = False | |
707 for i in node.children: | |
708 if not i.can_fetch: | |
709 if will_break: | |
710 break | |
711 # Automatically mark the first one as fetcheable. | |
712 i.can_fetch = True | |
713 will_break = True | |
714 traverse_tree(i) | |
715 | |
716 while pending: | |
717 item_hash = cache.wait_for(pending) | |
718 item = pending.pop(item_hash) | |
719 item.load(open(cache.path(item_hash), 'r').read()) | |
720 if item_hash == root_manifest_hash: | |
721 # It's the root item. | |
722 item.can_fetch = True | |
723 | |
724 for new_child in item.children: | |
725 h = new_child.obj_hash | |
726 if h in retrieved: | |
727 raise ConfigError('Manifest %s is retrieved recursively' % h) | |
728 pending[h] = new_child | |
729 cache.retrieve(Remote.HIGH, h) | |
730 | |
731 # Traverse the whole tree to see if files can now be fetched. | |
732 traverse_tree(self.root) | |
733 def check(n): | |
734 return all(check(x) for x in n.children) and n.files_fetched | |
735 assert check(self.root) | |
736 self.relative_cwd = self.relative_cwd or '' | |
737 self.read_only = self.read_only or False | |
738 | |
739 | |
740 def run_tha_test(manifest_hash, cache_dir, remote, policies): | |
585 """Downloads the dependencies in the cache, hardlinks them into a temporary | 741 """Downloads the dependencies in the cache, hardlinks them into a temporary |
586 directory and runs the executable. | 742 directory and runs the executable. |
587 """ | 743 """ |
744 settings = Settings() | |
588 with Cache(cache_dir, Remote(remote), policies) as cache: | 745 with Cache(cache_dir, Remote(remote), policies) as cache: |
589 outdir = make_temp_dir('run_tha_test', cache_dir) | 746 outdir = make_temp_dir('run_tha_test', cache_dir) |
590 | |
591 if not 'files' in manifest: | |
592 print >> sys.stderr, 'No file to map' | |
593 return 1 | |
594 if not 'command' in manifest: | |
595 print >> sys.stderr, 'No command to map run' | |
596 return 1 | |
597 | |
598 try: | 747 try: |
599 with Profiler('GetFiles') as _prof: | 748 # Initiate all the files download. |
600 for filepath, properties in manifest['files'].iteritems(): | 749 with Profiler('GetManifests') as _prof: |
750 # Optionally support local files. | |
751 if not RE_IS_SHA1.match(manifest_hash): | |
752 # Adds it in the cache. While not strictly necessary, this simplifies | |
753 # the rest. | |
754 h = hashlib.sha1(open(manifest_hash, 'r').read()).hexdigest() | |
755 cache.add(manifest_hash, h) | |
756 manifest_hash = h | |
757 | |
cmp
2012/08/28 17:49:30
let's drop this empty line
M-A Ruel
2012/08/28 21:59:36
done
| |
758 settings.load(cache, manifest_hash) | |
759 | |
760 if not settings.command: | |
761 print >> sys.stderr, 'No command to run' | |
762 return 1 | |
763 | |
764 with Profiler('GetRest') as _prof: | |
765 logging.debug('Creating directories') | |
766 # Creates the tree of directories to create. | |
767 directories = set(os.path.dirname(f) for f in settings.files) | |
768 for item in directories: | |
769 directories.add(os.path.dirname(item)) | |
770 for d in sorted(directories): | |
771 if d: | |
772 os.mkdir(os.path.join(outdir, d)) | |
773 | |
774 # Creates the links if necessary | |
cmp
2012/08/28 17:49:30
append a period
M-A Ruel
2012/08/28 21:59:36
done
| |
775 for filepath, properties in settings.files.iteritems(): | |
776 if 'link' not in properties: | |
777 continue | |
601 outfile = os.path.join(outdir, filepath) | 778 outfile = os.path.join(outdir, filepath) |
602 outfiledir = os.path.dirname(outfile) | 779 os.symlink(properties['link'], outfile) |
603 if not os.path.isdir(outfiledir): | |
604 os.makedirs(outfiledir) | |
605 if 'sha-1' in properties: | |
606 # A normal file. | |
607 infile = properties['sha-1'] | |
608 cache.retrieve(infile) | |
609 link_file(outfile, cache.path(infile), HARDLINK) | |
610 elif 'link' in properties: | |
611 # A symlink. | |
612 os.symlink(properties['link'], outfile) | |
613 else: | |
614 raise ConfigError('Unexpected entry: %s' % properties) | |
615 if 'mode' in properties: | 780 if 'mode' in properties: |
616 # It's not set on Windows. | 781 # It's not set on Windows. |
617 os.chmod(outfile, properties['mode']) | 782 os.chmod(outfile, properties['mode']) |
618 | 783 |
619 cwd = os.path.join(outdir, manifest.get('relative_cwd', '')) | 784 # Remaining files to be processed. |
620 if not os.path.isdir(cwd): | 785 # Note that files could still be not be downloaded yet here. |
621 os.makedirs(cwd) | 786 remaining = dict( |
622 if manifest.get('read_only'): | 787 (props['sha-1'], (filepath, props)) |
788 for filepath, props in settings.files.iteritems() | |
789 if 'sha-1' in props) | |
790 | |
791 # Do bookeeping while files are being downloaded in the background. | |
cmp
2012/08/28 17:49:30
bookeeping -> bookkeeping
M-A Ruel
2012/08/28 21:59:36
done
| |
792 cwd = os.path.join(outdir, settings.relative_cwd) | |
793 if not os.path.isdir(cwd): | |
794 os.makedirs(cwd) | |
795 cmd = settings.command[:] | |
796 # Ensure paths are correctly separated on windows. | |
797 cmd[0] = cmd[0].replace('/', os.path.sep) | |
798 cmd = fix_python_path(cmd) | |
799 | |
800 # Now block on the remaining files to be downloaded and mapped. | |
801 while remaining: | |
802 obj = cache.wait_for(remaining) | |
803 filepath, properties = remaining.pop(obj) | |
804 outfile = os.path.join(outdir, filepath) | |
805 link_file(outfile, cache.path(obj), HARDLINK) | |
806 if 'mode' in properties: | |
807 # It's not set on Windows. | |
808 os.chmod(outfile, properties['mode']) | |
809 | |
810 if settings.read_only: | |
623 make_writable(outdir, True) | 811 make_writable(outdir, True) |
624 cmd = manifest['command'] | |
625 # Ensure paths are correctly separated on windows. | |
626 cmd[0] = cmd[0].replace('/', os.path.sep) | |
627 cmd = fix_python_path(cmd) | |
628 logging.info('Running %s, cwd=%s' % (cmd, cwd)) | 812 logging.info('Running %s, cwd=%s' % (cmd, cwd)) |
629 try: | 813 try: |
630 with Profiler('RunTest') as _prof: | 814 with Profiler('RunTest') as _prof: |
631 return subprocess.call(cmd, cwd=cwd) | 815 return subprocess.call(cmd, cwd=cwd) |
632 except OSError: | 816 except OSError: |
633 print >> sys.stderr, 'Failed to run %s; cwd=%s' % (cmd, cwd) | 817 print >> sys.stderr, 'Failed to run %s; cwd=%s' % (cmd, cwd) |
634 raise | 818 raise |
635 finally: | 819 finally: |
636 rmtree(outdir) | 820 rmtree(outdir) |
637 | 821 |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
689 level=level, | 873 level=level, |
690 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') | 874 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') |
691 | 875 |
692 if bool(options.manifest) == bool(options.hash): | 876 if bool(options.manifest) == bool(options.hash): |
693 parser.error('One and only one of --manifest or --hash is required.') | 877 parser.error('One and only one of --manifest or --hash is required.') |
694 if not options.remote: | 878 if not options.remote: |
695 parser.error('--remote is required.') | 879 parser.error('--remote is required.') |
696 if args: | 880 if args: |
697 parser.error('Unsupported args %s' % ' '.join(args)) | 881 parser.error('Unsupported args %s' % ' '.join(args)) |
698 | 882 |
699 if options.hash: | |
700 # First calculate the reference to it. | |
701 options.manifest = '%s/%s' % (options.remote.rstrip('/'), options.hash) | |
702 try: | |
703 manifest = load_manifest(open_remote(options.manifest).read()) | |
704 except IOError as e: | |
705 parser.error( | |
706 'Failed to read manifest %s; remote:%s; hash:%s; %s' % | |
707 (options.manifest, options.remote, options.hash, str(e))) | |
708 | |
709 policies = CachePolicies( | 883 policies = CachePolicies( |
710 options.max_cache_size, options.min_free_space, options.max_items) | 884 options.max_cache_size, options.min_free_space, options.max_items) |
711 try: | 885 try: |
712 return run_tha_test( | 886 return run_tha_test( |
713 manifest, | 887 options.manifest or options.hash, |
714 os.path.abspath(options.cache), | 888 os.path.abspath(options.cache), |
715 options.remote, | 889 options.remote, |
716 policies) | 890 policies) |
717 except (ConfigError, MappingError), e: | 891 except (ConfigError, MappingError), e: |
718 print >> sys.stderr, str(e) | 892 print >> sys.stderr, str(e) |
719 return 1 | 893 return 1 |
720 | 894 |
721 | 895 |
722 if __name__ == '__main__': | 896 if __name__ == '__main__': |
723 sys.exit(main()) | 897 sys.exit(main()) |
OLD | NEW |