Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(791)

Side by Side Diff: scripts/slave/run_slavelastic.py

Issue 10386096: Get Swarm Bots to Use Network Storage (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/build
Patch Set: Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 # run_slavelastic.py: Runs a test based off of a slavelastic manifest file. 5 # run_slavelastic.py: Runs a test based off of a slavelastic manifest file.
6 6
7 from __future__ import with_statement 7 from __future__ import with_statement
8 import glob
9 import json 8 import json
10 import optparse 9 import optparse
11 import os 10 import os
12 import platform
13 import socket 11 import socket
12 import shutil
14 import sys 13 import sys
15 import time 14 import time
16 import urllib
17 import urllib2 15 import urllib2
16 import urlparse
18 import zipfile 17 import zipfile
19 18
20 19
21 DESCRIPTION = """This script takes a slavelastic manifest file, packages it, 20 DESCRIPTION = """This script takes a slavelastic manifest file, packages it,
22 and sends a swarm manifest file to the swarm server. This is expected to be 21 and sends a swarm manifest file to the swarm server. This is expected to be
23 called as a build step with the cwd as the parent of the src/ directory. 22 called as a build step with the cwd as the parent of the src/ directory.
24 """ 23 """
25 24
26 class Manifest(object): 25 class Manifest(object):
27 run_test_path = os.path.join( 26 run_test_path = os.path.join(
28 'src', 'tools', 'isolate', 'run_test_from_archive.py') 27 'src', 'tools', 'isolate', 'run_test_from_archive.py')
29 28
30 def __init__(self, filename, test_name, switches): 29 def __init__(self, filename, test_name, switches):
31 """Populates a manifest object. 30 """Populates a manifest object.
32 Args: 31 Args:
33 filename - The manifest with the test details. 32 filename - The manifest with the test details.
34 test_name - The name to give the test request. 33 test_name - The name to give the test request.
35 switches - An object with properties to apply to the test request. 34 switches - An object with properties to apply to the test request.
36 """ 35 """
37 platform_mapping = { 36 platform_mapping = {
38 'win32': 'Windows', 37 'darwin': 'Mac',
39 'cygwin': 'Windows', 38 'cygwin': 'Windows',
40 'linux2': 'Linux', 39 'linux2': 'Linux',
41 'darwin': 'Mac' 40 'win32': 'Windows'
42 } 41 }
43 42
44 self.manifest_name = filename 43 self.manifest_name = filename
45 44
46 self.g_shards = switches.num_shards 45 self.g_shards = switches.num_shards
47 # Random name for the output zip file 46 # Random name for the output zip file
48 self.zipfile_name = test_name + '.zip' 47 self.zipfile_name = test_name + '.zip'
49 self.tasks = [] 48 self.tasks = []
50 self.target_platform = platform_mapping[switches.os_image] 49 self.target_platform = platform_mapping[switches.os_image]
51 self.working_dir = switches.working_dir 50 self.working_dir = switches.working_dir
52 self.test_name = test_name 51 self.test_name = test_name
52 self.data_url = switches.data_url
53 self.data_dest_dir = switches.data_dest_dir
53 54
54 def add_task(self, task_name, actions): 55 def add_task(self, task_name, actions):
55 """Appends a new task to the swarm manifest file.""" 56 """Appends a new task to the swarm manifest file."""
56 self.tasks.append({ 57 self.tasks.append({
57 'test_name': task_name, 58 'test_name': task_name,
58 'action': actions, 59 'action': actions,
59 }) 60 })
60 61
61 def zip(self): 62 def zip(self):
62 """Zip up all the files in self.files""" 63 """Zip up all the files in self.files"""
63 start_time = time.time() 64 start_time = time.time()
64 65
65 zip_file = zipfile.ZipFile(self.zipfile_name, 'w') 66 zip_file = zipfile.ZipFile(
67 os.path.join(self.data_dest_dir, self.zipfile_name),
68 'w')
66 zip_file.write(self.manifest_name) 69 zip_file.write(self.manifest_name)
67 zip_file.write(self.run_test_path) 70 zip_file.write(self.run_test_path)
68 zip_file.close() 71 zip_file.close()
69 72
70 print 'Zipping completed, time elapsed: %f' % (time.time() - start_time) 73 print 'Zipping completed, time elapsed: %f' % (time.time() - start_time)
71 74
72 def to_json(self): 75 def to_json(self):
73 """Export the current configuration into a swarm-readable manifest file""" 76 """Export the current configuration into a swarm-readable manifest file"""
74 hostname = socket.gethostbyname(socket.gethostname()) 77 hashtable_url = urlparse.urljoin(self.data_url, 'hashtable')
75
76 # Adjust the port used to access the data via the python simpleserver.
77 # TODO(csharp): Remove this once file accesses between build and swarm bots
78 # has been fixed.
79 hostname += ':8080'
80
81 filepath = os.path.relpath(self.zipfile_name, '../..')
82 filepath_url = urllib.pathname2url(filepath)
83
84 hashtable_url = 'http://%s/hashtable/' % hostname
85 self.add_task( 78 self.add_task(
86 'Run Test', 79 'Run Test',
87 ['python', self.run_test_path, '-m', self.manifest_name, 80 ['python', self.run_test_path, '-m', self.manifest_name,
88 '-r', hashtable_url]) 81 '-r', hashtable_url])
89 82
90 # Clean up 83 # Clean up
91 # TODO(csharp) This can be removed once the swarm cleanup parameter is 84 # TODO(csharp) This can be removed once the swarm cleanup parameter is
92 # properly handled. 85 # properly handled.
93 if self.target_platform == 'Linux' or self.target_platform == 'Mac': 86 if self.target_platform == 'Linux' or self.target_platform == 'Mac':
94 cleanup_commands = ['rm', '-rf'] 87 cleanup_commands = ['rm', '-rf']
95 elif self.target_platform == 'Windows': 88 elif self.target_platform == 'Windows':
96 cleanup_commands = ['del'] 89 cleanup_commands = ['del']
97 self.add_task('Clean Up', cleanup_commands + [self.zipfile_name]) 90 self.add_task('Clean Up', cleanup_commands + [self.zipfile_name])
98 91
99 # Call kill_processes.py if on windows 92 # Call kill_processes.py if on windows
100 if self.target_platform == 'Windows': 93 if self.target_platform == 'Windows':
101 self.add_task('Kill Processes', 94 self.add_task('Kill Processes',
102 [sys.executable, '..\\b\\build\\scripts\\slave\\kill_processes.py']) 95 [sys.executable, '..\\b\\build\\scripts\\slave\\kill_processes.py'])
103 96
104 # Construct test case 97 # Construct test case
105 test_case = { 98 test_case = {
106 'test_case_name': self.test_name, 99 'test_case_name': self.test_name,
107 'data': [ 100 'data': [
108 'http://%s/%s' % (hostname, filepath_url), 101 urlparse.urljoin(self.data_url, self.zipfile_name),
109 ], 102 ],
110 'tests': self.tasks, 103 'tests': self.tasks,
111 'env_vars': { 104 'env_vars': {
112 'GTEST_TOTAL_SHARDS': '%(num_instances)s', 105 'GTEST_TOTAL_SHARDS': '%(num_instances)s',
113 'GTEST_SHARD_INDEX': '%(instance_index)s', 106 'GTEST_SHARD_INDEX': '%(instance_index)s',
114 }, 107 },
115 'configurations': [ 108 'configurations': [
116 { 109 {
117 'min_instances': self.g_shards, 110 'min_instances': self.g_shards,
118 'max_instances': self.g_shards, 111 'max_instances': self.g_shards,
119 'config_name': self.target_platform, 112 'config_name': self.target_platform,
120 'dimensions': { 113 'dimensions': {
121 'os': self.target_platform, 114 'os': self.target_platform,
122 }, 115 },
123 }, 116 },
124 ], 117 ],
125 'working_dir': self.working_dir, 118 'working_dir': self.working_dir,
126 'cleanup': 'data', 119 'cleanup': 'data',
127 } 120 }
128 121
129 return json.dumps(test_case) 122 return json.dumps(test_case)
130 123
131 124
132 def RemoveOldFiles():
133 """Removes older swarm zip files as they are no longer needed."""
134 for filename in glob.glob('swarm_tempfile_*.zip'):
135 os.remove(filename)
136
137
138 def ProcessManifest(filename, options): 125 def ProcessManifest(filename, options):
139 """Process the manifest file and send off the swarm test request.""" 126 """Process the manifest file and send off the swarm test request."""
140 # Parses manifest file 127 # Parses manifest file
141 print "Parsing file %s..." % filename 128 print "Parsing file %s..." % filename
142 129
143 file_name_tail = os.path.split(filename)[1] 130 file_name_tail = os.path.split(filename)[1]
144 test_name = os.path.splitext(file_name_tail)[0] 131 test_name = os.path.splitext(file_name_tail)[0]
145 test_full_name = options.test_name_prefix + test_name 132 test_full_name = options.test_name_prefix + test_name
146 133
147 manifest = Manifest(filename, test_full_name, options) 134 manifest = Manifest(filename, test_full_name, options)
148 135
149 # Zip up relevent files 136 # Zip up relevent files
150 print "Zipping up files..." 137 print "Zipping up files..."
151 manifest.zip() 138 manifest.zip()
152 139
153 # Send test requests off to swarm. 140 # Send test requests off to swarm.
154 print 'Sending test requests to swarm' 141 print 'Sending test requests to swarm'
155 test_url = options.url.rstrip('/') + '/test' 142 test_url = urlparse.urljoin(options.swarm_url, 'test')
156 manifest_text = manifest.to_json() 143 manifest_text = manifest.to_json()
157 result = urllib2.urlopen(test_url, manifest_text).read() 144 result = urllib2.urlopen(test_url, manifest_text).read()
158 145
159 # Check that we can read the output as a JSON string 146 # Check that we can read the output as a JSON string
160 try: 147 try:
161 json.loads(result) 148 json.loads(result)
162 except (ValueError, TypeError), e: 149 except (ValueError, TypeError), e:
163 print 'Failed to send test for ' + test_name 150 print 'Failed to send test for ' + test_name
164 print e 151 print e
165 return 1 152 return 1
(...skipping 17 matching lines...) Expand all
183 parser.add_option('-w', '--working_dir', default='swarm_tests', 170 parser.add_option('-w', '--working_dir', default='swarm_tests',
184 help='Desired working direction on the swarm slave side. ' 171 help='Desired working direction on the swarm slave side. '
185 'Defaults to %default.') 172 'Defaults to %default.')
186 parser.add_option('-m', '--min_shards', type='int', default=1, 173 parser.add_option('-m', '--min_shards', type='int', default=1,
187 help='Minimum number of shards to request. CURRENTLY NOT ' 174 help='Minimum number of shards to request. CURRENTLY NOT '
188 'SUPPORTED.') 175 'SUPPORTED.')
189 parser.add_option('-s', '--num_shards', type='int', default=1, 176 parser.add_option('-s', '--num_shards', type='int', default=1,
190 help='Desired number of shards to request. Must be ' 177 help='Desired number of shards to request. Must be '
191 'greater than or equal to min_shards.') 178 'greater than or equal to min_shards.')
192 parser.add_option('-o', '--os_image', 179 parser.add_option('-o', '--os_image',
193 help='Swarm OS image to request. Defaults to the ' 180 help='Swarm OS image to request.')
194 'current platform.') 181 parser.add_option('-u', '--swarm-url', default='http://localhost:8080',
195 parser.add_option('-u', '--url', default='http://localhost:8080',
196 help='Specify the url of the Swarm server. ' 182 help='Specify the url of the Swarm server. '
197 'Defaults to %default') 183 'Defaults to %default')
198 parser.add_option('-t', '--test_name_prefix', default='', 184 parser.add_option('-d', '--data-url', default=('http://%s/' %
185 socket.gethostbyname(socket.gethostname())),
186 help='The url where the test data can be retrieved from. '
187 'Defaults to %default')
188 parser.add_option('--hashtable-dir',
189 help='The path to the hashtable directory storing the test '
190 'data')
191 parser.add_option('--data-dest-dir',
192 help='The directory where all the test data needs to be'
193 'placed to get served to the swarm bots')
194 parser.add_option('-t', '--test-name-prefix', default='',
199 help='Specify the prefix to give the swarm test request. ' 195 help='Specify the prefix to give the swarm test request. '
200 'Defaults to %default') 196 'Defaults to %default')
201 parser.add_option('-v', '--verbose', action='store_true', 197 parser.add_option('-v', '--verbose', action='store_true',
202 help='Print verbose logging') 198 help='Print verbose logging')
203 (options, args) = parser.parse_args() 199 (options, args) = parser.parse_args()
204 200
205 if not args: 201 if not args:
206 parser.error('Must specify at least one filename') 202 parser.error('Must specify at least one filename')
207 203
208 if not options.os_image: 204 if not options.os_image:
209 options.os_image = '%s %d' % (platform.uname()[0], 32) 205 parser.error('Must specify an os image')
206 if not options.hastable_dir:
207 parser.error('Must specify the hashtable directory')
208 if not options.data_dest_dir:
209 parser.error('Must specify the server directory')
210 210
211 # Clean up old files. 211 # Remove the old data
212 print 'Removing old swarm zip files...' 212 print 'Removing old swarm files...'
213 RemoveOldFiles() 213 shutil.rmtree(options.data_dest_dir)
214 214
215 # Copy over the new data
216 print 'Moving hashtable files to server...'
217 shutil.copytree(options.hashtable_dir, options.data_path)
218
219 # Send off the swarm test requests.
215 highest_exit_code = 0 220 highest_exit_code = 0
216 for filename in args: 221 for filename in args:
217 highest_exit_code = max(highest_exit_code, 222 highest_exit_code = max(highest_exit_code,
218 ProcessManifest(filename, options)) 223 ProcessManifest(filename, options))
219 224
220 return highest_exit_code 225 return highest_exit_code
221 226
222 227
223 if __name__ == '__main__': 228 if __name__ == '__main__':
224 sys.exit(main()) 229 sys.exit(main())
OLDNEW
« no previous file with comments | « scripts/master/factory/swarm_factory.py ('k') | scripts/slave/unittests/run_slavelastic_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698