Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 # run_slavelastic.py: Runs a test based off of a slavelastic manifest file. | 5 # run_slavelastic.py: Runs a test based off of a slavelastic manifest file. |
| 6 | 6 |
| 7 from __future__ import with_statement | 7 from __future__ import with_statement |
| 8 import glob | |
| 9 import json | 8 import json |
| 10 import optparse | 9 import optparse |
| 11 import os | 10 import os |
| 12 import platform | 11 import platform |
| 13 import socket | 12 import socket |
| 13 import shutil | |
| 14 import sys | 14 import sys |
| 15 import time | 15 import time |
| 16 import urllib | |
| 17 import urllib2 | 16 import urllib2 |
| 17 import urlparse | |
| 18 import zipfile | 18 import zipfile |
| 19 | 19 |
| 20 | 20 |
| 21 DESCRIPTION = """This script takes a slavelastic manifest file, packages it, | 21 DESCRIPTION = """This script takes a slavelastic manifest file, packages it, |
| 22 and sends a swarm manifest file to the swarm server. This is expected to be | 22 and sends a swarm manifest file to the swarm server. This is expected to be |
| 23 called as a build step with the cwd as the parent of the src/ directory. | 23 called as a build step with the cwd as the parent of the src/ directory. |
| 24 """ | 24 """ |
| 25 | 25 |
| 26 class Manifest(object): | 26 class Manifest(object): |
| 27 run_test_path = os.path.join( | 27 run_test_path = os.path.join( |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 43 | 43 |
| 44 self.manifest_name = filename | 44 self.manifest_name = filename |
| 45 | 45 |
| 46 self.g_shards = switches.num_shards | 46 self.g_shards = switches.num_shards |
| 47 # Random name for the output zip file | 47 # Random name for the output zip file |
| 48 self.zipfile_name = test_name + '.zip' | 48 self.zipfile_name = test_name + '.zip' |
| 49 self.tasks = [] | 49 self.tasks = [] |
| 50 self.target_platform = platform_mapping[switches.os_image] | 50 self.target_platform = platform_mapping[switches.os_image] |
| 51 self.working_dir = switches.working_dir | 51 self.working_dir = switches.working_dir |
| 52 self.test_name = test_name | 52 self.test_name = test_name |
| 53 self.data_url = switches.data_url | |
| 54 self.data_dest_dir = switches.data_dest_dir | |
| 53 | 55 |
| 54 def add_task(self, task_name, actions): | 56 def add_task(self, task_name, actions): |
| 55 """Appends a new task to the swarm manifest file.""" | 57 """Appends a new task to the swarm manifest file.""" |
| 56 self.tasks.append({ | 58 self.tasks.append({ |
| 57 'test_name': task_name, | 59 'test_name': task_name, |
| 58 'action': actions, | 60 'action': actions, |
| 59 }) | 61 }) |
| 60 | 62 |
| 61 def zip(self): | 63 def zip(self): |
| 62 """Zip up all the files in self.files""" | 64 """Zip up all the files in self.files""" |
| 63 start_time = time.time() | 65 start_time = time.time() |
| 64 | 66 |
| 65 zip_file = zipfile.ZipFile(self.zipfile_name, 'w') | 67 zip_file = zipfile.ZipFile( |
| 68 os.path.join(self.data_dest_dir, self.zipfile_name), | |
| 69 'w') | |
| 66 zip_file.write(self.manifest_name) | 70 zip_file.write(self.manifest_name) |
| 67 zip_file.write(self.run_test_path) | 71 zip_file.write(self.run_test_path) |
| 68 zip_file.close() | 72 zip_file.close() |
| 69 | 73 |
| 70 print 'Zipping completed, time elapsed: %f' % (time.time() - start_time) | 74 print 'Zipping completed, time elapsed: %f' % (time.time() - start_time) |
| 71 | 75 |
| 72 def to_json(self): | 76 def to_json(self): |
| 73 """Export the current configuration into a swarm-readable manifest file""" | 77 """Export the current configuration into a swarm-readable manifest file""" |
| 74 hostname = socket.gethostbyname(socket.gethostname()) | 78 hashtable_url = urlparse.urljoin(self.data_url, 'hashtable') |
| 75 | |
| 76 # Adjust the port used to access the data via the python simpleserver. | |
| 77 # TODO(csharp): Remove this once file accesses between build and swarm bots | |
| 78 # has been fixed. | |
| 79 hostname += ':8080' | |
| 80 | |
| 81 filepath = os.path.relpath(self.zipfile_name, '../..') | |
| 82 filepath_url = urllib.pathname2url(filepath) | |
| 83 | |
| 84 hashtable_url = 'http://%s/hashtable/' % hostname | |
| 85 self.add_task( | 79 self.add_task( |
| 86 'Run Test', | 80 'Run Test', |
| 87 ['python', self.run_test_path, '-m', self.manifest_name, | 81 ['python', self.run_test_path, '-m', self.manifest_name, |
| 88 '-r', hashtable_url]) | 82 '-r', hashtable_url]) |
| 89 | 83 |
| 90 # Clean up | 84 # Clean up |
| 91 # TODO(csharp) This can be removed once the swarm cleanup parameter is | 85 # TODO(csharp) This can be removed once the swarm cleanup parameter is |
| 92 # properly handled. | 86 # properly handled. |
| 93 if self.target_platform == 'Linux' or self.target_platform == 'Mac': | 87 if self.target_platform == 'Linux' or self.target_platform == 'Mac': |
| 94 cleanup_commands = ['rm', '-rf'] | 88 cleanup_commands = ['rm', '-rf'] |
| 95 elif self.target_platform == 'Windows': | 89 elif self.target_platform == 'Windows': |
| 96 cleanup_commands = ['del'] | 90 cleanup_commands = ['del'] |
| 97 self.add_task('Clean Up', cleanup_commands + [self.zipfile_name]) | 91 self.add_task('Clean Up', cleanup_commands + [self.zipfile_name]) |
| 98 | 92 |
| 99 # Call kill_processes.py if on windows | 93 # Call kill_processes.py if on windows |
| 100 if self.target_platform == 'Windows': | 94 if self.target_platform == 'Windows': |
| 101 self.add_task('Kill Processes', | 95 self.add_task('Kill Processes', |
| 102 [sys.executable, '..\\b\\build\\scripts\\slave\\kill_processes.py']) | 96 [sys.executable, '..\\b\\build\\scripts\\slave\\kill_processes.py']) |
| 103 | 97 |
| 104 # Construct test case | 98 # Construct test case |
| 105 test_case = { | 99 test_case = { |
| 106 'test_case_name': self.test_name, | 100 'test_case_name': self.test_name, |
| 107 'data': [ | 101 'data': [ |
| 108 'http://%s/%s' % (hostname, filepath_url), | 102 urlparse.urljoin(self.data_url, self.zipfile_name), |
| 109 ], | 103 ], |
| 110 'tests': self.tasks, | 104 'tests': self.tasks, |
| 111 'env_vars': { | 105 'env_vars': { |
| 112 'GTEST_TOTAL_SHARDS': '%(num_instances)s', | 106 'GTEST_TOTAL_SHARDS': '%(num_instances)s', |
| 113 'GTEST_SHARD_INDEX': '%(instance_index)s', | 107 'GTEST_SHARD_INDEX': '%(instance_index)s', |
| 114 }, | 108 }, |
| 115 'configurations': [ | 109 'configurations': [ |
| 116 { | 110 { |
| 117 'min_instances': self.g_shards, | 111 'min_instances': self.g_shards, |
| 118 'max_instances': self.g_shards, | 112 'max_instances': self.g_shards, |
| 119 'config_name': self.target_platform, | 113 'config_name': self.target_platform, |
| 120 'dimensions': { | 114 'dimensions': { |
| 121 'os': self.target_platform, | 115 'os': self.target_platform, |
| 122 }, | 116 }, |
| 123 }, | 117 }, |
| 124 ], | 118 ], |
| 125 'working_dir': self.working_dir, | 119 'working_dir': self.working_dir, |
| 126 'cleanup': 'data', | 120 'cleanup': 'data', |
| 127 } | 121 } |
| 128 | 122 |
| 129 return json.dumps(test_case) | 123 return json.dumps(test_case) |
| 130 | 124 |
| 131 | 125 |
| 132 def RemoveOldFiles(): | |
| 133 """Removes older swarm zip files as they are no longer needed.""" | |
| 134 for filename in glob.glob('swarm_tempfile_*.zip'): | |
| 135 os.remove(filename) | |
| 136 | |
| 137 | |
| 138 def ProcessManifest(filename, options): | 126 def ProcessManifest(filename, options): |
| 139 """Process the manifest file and send off the swarm test request.""" | 127 """Process the manifest file and send off the swarm test request.""" |
| 140 # Parses manifest file | 128 # Parses manifest file |
| 141 print "Parsing file %s..." % filename | 129 print "Parsing file %s..." % filename |
| 142 | 130 |
| 143 file_name_tail = os.path.split(filename)[1] | 131 file_name_tail = os.path.split(filename)[1] |
| 144 test_name = os.path.splitext(file_name_tail)[0] | 132 test_name = os.path.splitext(file_name_tail)[0] |
| 145 test_full_name = options.test_name_prefix + test_name | 133 test_full_name = options.test_name_prefix + test_name |
| 146 | 134 |
| 147 manifest = Manifest(filename, test_full_name, options) | 135 manifest = Manifest(filename, test_full_name, options) |
| 148 | 136 |
| 149 # Zip up relevent files | 137 # Zip up relevent files |
| 150 print "Zipping up files..." | 138 print "Zipping up files..." |
| 151 manifest.zip() | 139 manifest.zip() |
| 152 | 140 |
| 153 # Send test requests off to swarm. | 141 # Send test requests off to swarm. |
| 154 print 'Sending test requests to swarm' | 142 print 'Sending test requests to swarm' |
| 155 test_url = options.url.rstrip('/') + '/test' | 143 test_url = urlparse.urljoin(options.swarm_url, 'test') |
| 156 manifest_text = manifest.to_json() | 144 manifest_text = manifest.to_json() |
| 157 result = urllib2.urlopen(test_url, manifest_text).read() | 145 result = urllib2.urlopen(test_url, manifest_text).read() |
| 158 | 146 |
| 159 # Check that we can read the output as a JSON string | 147 # Check that we can read the output as a JSON string |
| 160 try: | 148 try: |
| 161 json.loads(result) | 149 json.loads(result) |
| 162 except (ValueError, TypeError), e: | 150 except (ValueError, TypeError), e: |
| 163 print 'Failed to send test for ' + test_name | 151 print 'Failed to send test for ' + test_name |
| 164 print e | 152 print e |
| 165 return 1 | 153 return 1 |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 185 'Defaults to %default.') | 173 'Defaults to %default.') |
| 186 parser.add_option('-m', '--min_shards', type='int', default=1, | 174 parser.add_option('-m', '--min_shards', type='int', default=1, |
| 187 help='Minimum number of shards to request. CURRENTLY NOT ' | 175 help='Minimum number of shards to request. CURRENTLY NOT ' |
| 188 'SUPPORTED.') | 176 'SUPPORTED.') |
| 189 parser.add_option('-s', '--num_shards', type='int', default=1, | 177 parser.add_option('-s', '--num_shards', type='int', default=1, |
| 190 help='Desired number of shards to request. Must be ' | 178 help='Desired number of shards to request. Must be ' |
| 191 'greater than or equal to min_shards.') | 179 'greater than or equal to min_shards.') |
| 192 parser.add_option('-o', '--os_image', | 180 parser.add_option('-o', '--os_image', |
| 193 help='Swarm OS image to request. Defaults to the ' | 181 help='Swarm OS image to request. Defaults to the ' |
| 194 'current platform.') | 182 'current platform.') |
| 195 parser.add_option('-u', '--url', default='http://localhost:8080', | 183 parser.add_option('-u', '--swarm-url', default='http://localhost:8080', |
| 196 help='Specify the url of the Swarm server. ' | 184 help='Specify the url of the Swarm server. ' |
| 197 'Defaults to %default') | 185 'Defaults to %default') |
| 198 parser.add_option('-t', '--test_name_prefix', default='', | 186 parser.add_option('-d', '--data-url', |
| 187 help='The url where the test data can be retrieved from. ' | |
| 188 'Defaults to the current machine\'s hostname') | |
| 189 parser.add_option('--hashtable-dir', | |
| 190 help='The path to the hashtable directory storing the test ' | |
| 191 'data') | |
| 192 parser.add_option('--data-dest-dir', | |
| 193 help='The directory where all the test data needs to be' | |
| 194 'placed to get served to the swarm bots') | |
| 195 parser.add_option('-t', '--test-name-prefix', default='', | |
| 199 help='Specify the prefix to give the swarm test request. ' | 196 help='Specify the prefix to give the swarm test request. ' |
| 200 'Defaults to %default') | 197 'Defaults to %default') |
| 201 parser.add_option('-v', '--verbose', action='store_true', | 198 parser.add_option('-v', '--verbose', action='store_true', |
| 202 help='Print verbose logging') | 199 help='Print verbose logging') |
| 203 (options, args) = parser.parse_args() | 200 (options, args) = parser.parse_args() |
| 204 | 201 |
| 205 if not args: | 202 if not args: |
| 206 parser.error('Must specify at least one filename') | 203 parser.error('Must specify at least one filename') |
| 207 | 204 |
| 208 if not options.os_image: | 205 if not options.os_image: |
| 209 options.os_image = '%s %d' % (platform.uname()[0], 32) | 206 options.os_image = '%s %d' % (platform.uname()[0], 32) |
| 207 if not options.data_url: | |
| 208 options.data_url = 'http://%s/' % socket.gethostbyname(socket.gethostname()) | |
|
M-A Ruel
2012/05/11 15:35:32
you could use default= instead.
csharp
2012/05/11 19:16:41
Done.
| |
| 209 if not options.hastable_dir: | |
| 210 parser.error('Must specify the hashtable directory') | |
| 211 if not options.data_dest_dir: | |
| 212 parser.error('Must specify the server directory') | |
| 210 | 213 |
| 211 # Clean up old files. | 214 # Remove the old data |
| 212 print 'Removing old swarm zip files...' | 215 print 'Removing old swarm files...' |
| 213 RemoveOldFiles() | 216 shutil.rmtree(options.data_dest_dir) |
| 214 | 217 |
| 218 # Copy over the new data | |
| 219 print 'Moving hashtable files to server...' | |
| 220 shutil.copytree(options.hashtable_dir, options.data_path) | |
| 221 | |
| 222 # Send off the swarm test requests. | |
| 215 highest_exit_code = 0 | 223 highest_exit_code = 0 |
| 216 for filename in args: | 224 for filename in args: |
| 217 highest_exit_code = max(highest_exit_code, | 225 highest_exit_code = max(highest_exit_code, |
| 218 ProcessManifest(filename, options)) | 226 ProcessManifest(filename, options)) |
| 219 | 227 |
| 220 return highest_exit_code | 228 return highest_exit_code |
| 221 | 229 |
| 222 | 230 |
| 223 if __name__ == '__main__': | 231 if __name__ == '__main__': |
| 224 sys.exit(main()) | 232 sys.exit(main()) |
| OLD | NEW |