Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(887)

Side by Side Diff: tools/isolate/isolate.py

Issue 10387037: Complete rewrite of isolate.py to be more modular. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | tools/isolate/isolate_smoke_test.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Does one of the following depending on the --mode argument: 6 """Does one of the following depending on the --mode argument:
7 check Verifies all the inputs exist, touches the file specified with 7 check Verifies all the inputs exist, touches the file specified with
8 --result and exits. 8 --result and exits.
9 hashtable Puts a manifest file and hard links each of the inputs into the 9 hashtable Puts a manifest file and hard links each of the inputs into the
10 output directory. 10 output directory.
11 noop Do nothing, used for transition purposes. 11 noop Do nothing, used for transition purposes.
12 remap Stores all the inputs files in a directory without running the 12 remap Stores all the inputs files in a directory without running the
13 executable. 13 executable.
14 run Recreates a tree with all the inputs files and run the executable 14 run Recreates a tree with all the inputs files and run the executable
15 in it. 15 in it.
16 trace Runs the executable without remapping it but traces all the files 16 trace Runs the executable without remapping it but traces all the files
17 it and its child processes access. 17 it and its child processes access.
18 18
19 See more information at 19 See more information at
20 http://dev.chromium.org/developers/testing/isolated-testing 20 http://dev.chromium.org/developers/testing/isolated-testing
21 """ 21 """
22 22
23 import hashlib 23 import hashlib
24 import json 24 import json
25 import logging 25 import logging
26 import optparse 26 import optparse
27 import os 27 import os
28 import posixpath
29 import re 28 import re
30 import stat 29 import stat
31 import subprocess 30 import subprocess
32 import sys 31 import sys
33 import tempfile 32 import tempfile
34 33
35 import merge_isolate 34 import merge_isolate
36 import trace_inputs 35 import trace_inputs
37 import run_test_from_archive 36 import run_test_from_archive
38 37
39 # Used by process_inputs(). 38 # Used by process_input().
40 NO_INFO, STATS_ONLY, WITH_HASH = range(56, 59) 39 NO_INFO, STATS_ONLY, WITH_HASH = range(56, 59)
41 40
42 41
43 def relpath(path, root): 42 def relpath(path, root):
44 """os.path.relpath() that keeps trailing slash.""" 43 """os.path.relpath() that keeps trailing os.path.sep."""
45 out = os.path.relpath(path, root) 44 out = os.path.relpath(path, root)
46 if path.endswith(os.path.sep): 45 if path.endswith(os.path.sep):
47 out += os.path.sep 46 out += os.path.sep
48 elif sys.platform == 'win32' and path.endswith('/'):
49 # TODO(maruel): Temporary.
50 out += os.path.sep
51 return out 47 return out
52 48
53 49
54 def normpath(path): 50 def normpath(path):
55 """os.path.normpath() that keeps trailing slash.""" 51 """os.path.normpath() that keeps trailing os.path.sep."""
56 out = os.path.normpath(path) 52 out = os.path.normpath(path)
57 if path.endswith(('/', os.path.sep)): 53 if path.endswith(os.path.sep):
58 out += os.path.sep 54 out += os.path.sep
59 return out 55 return out
60 56
61 57
62 def to_relative(path, root, relative):
63 """Converts any absolute path to a relative path, only if under root."""
64 if sys.platform == 'win32':
65 path = path.lower()
66 root = root.lower()
67 relative = relative.lower()
68 if path.startswith(root):
69 logging.info('%s starts with %s' % (path, root))
70 path = os.path.relpath(path, relative)
71 else:
72 logging.info('%s not under %s' % (path, root))
73 return path
74
75
76 def expand_directories(indir, infiles, blacklist): 58 def expand_directories(indir, infiles, blacklist):
77 """Expands the directories, applies the blacklist and verifies files exist.""" 59 """Expands the directories, applies the blacklist and verifies files exist."""
78 logging.debug('expand_directories(%s, %s, %s)' % (indir, infiles, blacklist)) 60 logging.debug('expand_directories(%s, %s, %s)' % (indir, infiles, blacklist))
79 outfiles = [] 61 outfiles = []
80 for relfile in infiles: 62 for relfile in infiles:
81 if os.path.isabs(relfile): 63 if os.path.isabs(relfile):
82 raise run_test_from_archive.MappingError( 64 raise run_test_from_archive.MappingError(
83 'Can\'t map absolute path %s' % relfile) 65 'Can\'t map absolute path %s' % relfile)
84 infile = normpath(os.path.join(indir, relfile)) 66 infile = normpath(os.path.join(indir, relfile))
85 if not infile.startswith(indir): 67 if not infile.startswith(indir):
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
119 return variables[m.group(1)] 101 return variables[m.group(1)]
120 return part 102 return part
121 103
122 104
123 def eval_variables(item, variables): 105 def eval_variables(item, variables):
124 """Replaces the gyp variables in a string item.""" 106 """Replaces the gyp variables in a string item."""
125 return ''.join( 107 return ''.join(
126 replace_variable(p, variables) for p in re.split(r'(<\([A-Z_]+\))', item)) 108 replace_variable(p, variables) for p in re.split(r'(<\([A-Z_]+\))', item))
127 109
128 110
111 def indent(data, indent_length):
112 """Indents text."""
113 spacing = ' ' * indent_length
114 return ''.join(spacing + l for l in str(data).splitlines(True))
115
116
129 def load_isolate(content, error): 117 def load_isolate(content, error):
130 """Loads the .isolate file. Returns the command, dependencies and read_only 118 """Loads the .isolate file and returns the information unprocessed.
131 flag. 119
120 Returns the command, dependencies and read_only flag. The dependencies are
121 fixed to use os.path.sep.
132 """ 122 """
133 # Load the .isolate file, process its conditions, retrieve the command and 123 # Load the .isolate file, process its conditions, retrieve the command and
134 # dependencies. 124 # dependencies.
135 configs = merge_isolate.load_gyp(merge_isolate.eval_content(content)) 125 configs = merge_isolate.load_gyp(merge_isolate.eval_content(content))
136 flavor = trace_inputs.get_flavor() 126 flavor = trace_inputs.get_flavor()
137 config = configs.per_os.get(flavor) or configs.per_os.get(None) 127 config = configs.per_os.get(flavor) or configs.per_os.get(None)
138 if not config: 128 if not config:
139 error('Failed to load configuration for \'%s\'' % flavor) 129 error('Failed to load configuration for \'%s\'' % flavor)
140 # Merge tracked and untracked dependencies, isolate.py doesn't care about the 130 # Merge tracked and untracked dependencies, isolate.py doesn't care about the
141 # trackability of the dependencies, only the build tool does. 131 # trackability of the dependencies, only the build tool does.
142 return config.command, config.tracked + config.untracked, config.read_only 132 dependencies = [
133 f.replace('/', os.path.sep) for f in config.tracked + config.untracked
134 ]
135 return config.command, dependencies, config.read_only
143 136
144 137
145 def process_inputs(prevdict, indir, infiles, level, read_only): 138 def process_input(filepath, prevdict, level, read_only):
146 """Returns a dictionary of input files, populated with the files' mode and 139 """Processes an input file, a dependency, and return meta data about it.
147 hash.
148 140
149 |prevdict| is the previous dictionary. It is used to retrieve the cached sha-1 141 Arguments:
150 to skip recalculating the hash. 142 - filepath: File to act on.
151 143 - prevdict: the previous dictionary. It is used to retrieve the cached sha-1
152 |level| determines the amount of information retrieved. 144 to skip recalculating the hash.
153 1 loads no information. 2 loads minimal stat() information. 3 calculates the 145 - level: determines the amount of information retrieved.
154 sha-1 of the file's content. 146 - read_only: If True, the file mode is manipulated. In practice, only save
155 147 one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On
156 The file mode is manipulated if read_only is True. In practice, we only save 148 windows, mode is not set since all files are 'executable' by
157 one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On windows, mode 149 default.
158 is not set since all files are 'executable' by default.
159 """ 150 """
160 assert level in (NO_INFO, STATS_ONLY, WITH_HASH) 151 assert level in (NO_INFO, STATS_ONLY, WITH_HASH)
161 outdict = {} 152 out = {}
162 for infile in infiles: 153 if level >= STATS_ONLY:
163 filepath = os.path.join(indir, infile) 154 filestats = os.stat(filepath)
164 outdict[infile] = {} 155 if trace_inputs.get_flavor() != 'win':
165 if level >= STATS_ONLY: 156 filemode = stat.S_IMODE(filestats.st_mode)
166 filestats = os.stat(filepath) 157 # Remove write access for non-owner.
167 if trace_inputs.get_flavor() != 'win': 158 filemode &= ~(stat.S_IWGRP | stat.S_IWOTH)
168 filemode = stat.S_IMODE(filestats.st_mode) 159 if read_only:
169 # Remove write access for non-owner. 160 filemode &= ~stat.S_IWUSR
170 filemode &= ~(stat.S_IWGRP | stat.S_IWOTH) 161 if filemode & stat.S_IXUSR:
171 if read_only: 162 filemode |= (stat.S_IXGRP | stat.S_IXOTH)
172 filemode &= ~stat.S_IWUSR 163 else:
173 if filemode & stat.S_IXUSR: 164 filemode &= ~(stat.S_IXGRP | stat.S_IXOTH)
174 filemode |= (stat.S_IXGRP | stat.S_IXOTH) 165 out['mode'] = filemode
175 else: 166 out['size'] = filestats.st_size
176 filemode &= ~(stat.S_IXGRP | stat.S_IXOTH) 167 # Used to skip recalculating the hash. Use the most recent update time.
177 outdict[infile]['mode'] = filemode 168 out['timestamp'] = int(round(filestats.st_mtime))
178 outdict[infile]['size'] = filestats.st_size 169 # If the timestamp wasn't updated, carry on the sha-1.
179 # Used to skip recalculating the hash. Use the most recent update time. 170 if (prevdict.get('timestamp') == out['timestamp'] and
180 outdict[infile]['timestamp'] = int(round(filestats.st_mtime)) 171 'sha-1' in prevdict):
181 # If the timestamp wasn't updated, carry on the sha-1. 172 # Reuse the previous hash.
182 if (prevdict.get(infile, {}).get('timestamp') == 173 out['sha-1'] = prevdict['sha-1']
183 outdict[infile]['timestamp'] and
184 'sha-1' in prevdict[infile]):
185 # Reuse the previous hash.
186 outdict[infile]['sha-1'] = prevdict[infile]['sha-1']
187 174
188 if level >= WITH_HASH and not outdict[infile].get('sha-1'): 175 if level >= WITH_HASH and not out.get('sha-1'):
189 h = hashlib.sha1() 176 h = hashlib.sha1()
190 with open(filepath, 'rb') as f: 177 with open(filepath, 'rb') as f:
191 h.update(f.read()) 178 h.update(f.read())
192 outdict[infile]['sha-1'] = h.hexdigest() 179 out['sha-1'] = h.hexdigest()
193 return outdict 180 return out
194 181
195 182
196 def recreate_tree(outdir, indir, infiles, action): 183 def recreate_tree(outdir, indir, infiles, action):
197 """Creates a new tree with only the input files in it. 184 """Creates a new tree with only the input files in it.
198 185
199 Arguments: 186 Arguments:
200 outdir: Output directory to create the files in. 187 outdir: Output directory to create the files in.
201 indir: Root directory the infiles are based in. 188 indir: Root directory the infiles are based in.
202 infiles: List of files to map from |indir| to |outdir|. 189 infiles: List of files to map from |indir| to |outdir|.
203 action: See assert below. 190 action: See assert below.
(...skipping 15 matching lines...) Expand all
219 206
220 for relfile in infiles: 207 for relfile in infiles:
221 infile = os.path.join(indir, relfile) 208 infile = os.path.join(indir, relfile)
222 outfile = os.path.join(outdir, relfile) 209 outfile = os.path.join(outdir, relfile)
223 outsubdir = os.path.dirname(outfile) 210 outsubdir = os.path.dirname(outfile)
224 if not os.path.isdir(outsubdir): 211 if not os.path.isdir(outsubdir):
225 os.makedirs(outsubdir) 212 os.makedirs(outsubdir)
226 run_test_from_archive.link_file(outfile, infile, action) 213 run_test_from_archive.link_file(outfile, infile, action)
227 214
228 215
229 def load_results(resultfile): 216 def result_to_state(filename):
230 """Loads the previous results as an optimization.""" 217 """Replaces the file's extension."""
231 data = {} 218 return filename.rsplit('.', 1)[0] + '.state'
232 if resultfile and os.path.isfile(resultfile): 219
233 resultfile = os.path.abspath(resultfile) 220
234 with open(resultfile, 'r') as f: 221 def write_json(stream, data):
235 data = json.load(f) 222 """Writes data to a stream as json."""
236 logging.debug('Loaded %s' % resultfile) 223 json.dump(data, stream, indent=2, sort_keys=True)
237 else: 224 stream.write('\n')
238 resultfile = os.path.abspath(resultfile) 225
239 logging.debug('%s was not found' % resultfile) 226
240 227 def determine_root_dir(relative_root, infiles):
241 # Works with native os.path.sep but stores as '/'. 228 """For a list of infiles, determines the deepest root directory that is
242 if 'files' in data and os.path.sep != '/': 229 referenced indirectly.
243 data['files'] = dict( 230
244 (k.replace('/', os.path.sep), v) 231 All arguments must be using os.path.sep.
245 for k, v in data['files'].iteritems())
246 return data
247
248
249 def save_results(resultfile, data):
250 data = data.copy()
251
252 # Works with native os.path.sep but stores as '/'.
253 if os.path.sep != '/':
254 data['files'] = dict(
255 (k.replace(os.path.sep, '/'), v) for k, v in data['files'].iteritems())
256
257 f = None
258 try:
259 if resultfile:
260 f = open(resultfile, 'wb')
261 else:
262 f = sys.stdout
263 json.dump(data, f, indent=2, sort_keys=True)
264 f.write('\n')
265 finally:
266 if resultfile and f:
267 f.close()
268
269 total_bytes = sum(i.get('size', 0) for i in data['files'].itervalues())
270 if total_bytes:
271 logging.debug('Total size: %d bytes' % total_bytes)
272
273
274 def isolate(outdir, mode, indir, infiles, data):
275 """Main function to isolate a target with its dependencies.
276
277 Arguments:
278 - outdir: Output directory where the result is stored. Depends on |mode|.
279 - indir: Root directory to be used as the base directory for infiles.
280 - infiles: List of files, with relative path, to process.
281 - mode: Action to do. See file level docstring.
282 - data: Contains all the command specific meta-data.
283
284 Some arguments are optional, dependending on |mode|. See the corresponding
285 MODE<mode> function for the exact behavior.
286 """ 232 """
287 modes = { 233 # The trick used to determine the root directory is to look at "how far" back
288 'check': MODEcheck, 234 # up it is looking up.
289 'hashtable': MODEhashtable, 235 deepest_root = relative_root
290 'remap': MODEremap, 236 for i in infiles:
291 'run': MODErun, 237 x = relative_root
292 'trace': MODEtrace, 238 while i.startswith('..' + os.path.sep):
293 } 239 i = i[3:]
294 mode_fn = modes[mode] 240 assert not i.startswith(os.path.sep)
295 241 x = os.path.dirname(x)
296 infiles = expand_directories( 242 if deepest_root.startswith(x):
297 indir, infiles, lambda x: re.match(r'.*\.(git|svn|pyc)$', x)) 243 deepest_root = x
298 244 logging.debug(
299 # Only hashtable mode really needs the sha-1. 245 'determine_root_dir(%s, %s) -> %s' % (
300 level = { 246 relative_root, infiles, deepest_root))
301 'check': NO_INFO, 247 return deepest_root
302 'hashtable': WITH_HASH, 248
303 'remap': STATS_ONLY, 249
304 'run': STATS_ONLY, 250 def process_variables(variables, relative_base_dir, error):
305 'trace': STATS_ONLY, 251 """Processes path variables as a special case and returns a copy of the dict.
306 } 252
307 # Regenerate data['files'] from infiles. 253 For each 'path' varaible: first normalizes it, verifies it exists, converts it
308 data['files'] = process_inputs( 254 to an absolute path, then sets it as relative to relative_base_dir.
309 data.get('files', {}), indir, infiles, level[mode], data.get('read_only')) 255 """
310 256 variables = variables.copy()
311 result = mode_fn(outdir, indir, data) 257 for i in ('DEPTH', 'PRODUCT_DIR'):
312 return result, data 258 if i not in variables:
313 259 continue
314 260 variable = os.path.normpath(variables[i])
315 def MODEcheck(_outdir, _indir, _data): 261 if not os.path.isdir(variable):
262 error('%s=%s is not a directory' % (i, variable))
263 # Variables could contain / or \ on windows. Always normalize to
264 # os.path.sep.
265 variable = os.path.abspath(variable.replace('/', os.path.sep))
266 # All variables are relative to the .isolate file.
267 variables[i] = os.path.relpath(variable, relative_base_dir)
268 return variables
269
270
271 class Flattenable(object):
272 """Represents data that can be represented as a json file."""
273 MEMBERS = ()
274
275 def flatten(self):
276 """Returns a json-serializable version of itself."""
277 return dict((member, getattr(self, member)) for member in self.MEMBERS)
278
279 @classmethod
280 def load(cls, data):
281 """Loads a flattened version."""
282 data = data.copy()
283 out = cls()
284 for member in out.MEMBERS:
285 if member in data:
286 value = data.pop(member)
287 setattr(out, member, value)
288 assert not data, data
289 return out
290
291 @classmethod
292 def load_file(cls, filename):
293 """Loads the data from a file or return an empty instance."""
294 out = cls()
295 try:
296 with open(filename, 'r') as f:
297 out = cls.load(json.load(f))
298 logging.debug('Loaded %s(%s)' % (cls.__name__, filename))
299 except IOError:
300 pass
301 return out
302
303
304 class Result(Flattenable):
305 """Describes the content of a .result file.
306
307 This file is used by run_test_from_archive.py so its content is strictly only
308 what is necessary to run the test outside of a checkout.
309 """
310 MEMBERS = (
311 'command',
312 'files',
313 'read_only',
314 'relative_cwd',
315 )
316
317 def __init__(self):
318 super(Result, self).__init__()
319 self.command = []
320 self.files = {}
321 self.read_only = None
322 self.relative_cwd = None
323
324 def update(self, command, infiles, read_only, relative_cwd):
325 """Updates the result state with new information."""
326 self.command = command
327 # Add new files.
328 for f in infiles:
329 self.files.setdefault(f, {})
330 # Prune extraneous files that are not a dependency anymore.
331 for f in set(infiles).difference(self.files.keys()):
332 del self.files[f]
333 if read_only is not None:
334 self.read_only = read_only
335 self.relative_cwd = relative_cwd
336
337 def __str__(self):
338 out = '%s(\n' % self.__class__.__name__
339 out += ' command: %s\n' % self.command
340 out += ' files: %s\n' % ', '.join(sorted(self.files))
341 out += ' read_only: %s\n' % self.read_only
342 out += ' relative_cwd: %s)' % self.relative_cwd
343 return out
344
345
346 class SavedState(Flattenable):
347 """Describes the content of a .state file.
348
349 The items in this file are simply to improve the developer's life and aren't
350 used by run_test_from_archive.py. This file can always be safely removed.
351
352 isolate_file permits to find back root_dir, variables are used for stateful
353 rerun.
354 """
355 MEMBERS = (
356 'isolate_file',
357 'variables',
358 )
359
360 def __init__(self):
361 super(SavedState, self).__init__()
362 self.isolate_file = None
363 self.variables = {}
364
365 def update(self, isolate_file, variables):
366 """Updates the saved state with new information."""
367 self.isolate_file = isolate_file
368 self.variables.update(variables)
369
370 def __str__(self):
371 out = '%s(\n' % self.__class__.__name__
372 out += ' isolate_file: %s\n' % self.isolate_file
373 out += ' variables: %s' % ''.join(
374 '\n %s=%s' % (k, self.variables[k]) for k in sorted(self.variables))
375 out += ')'
376 return out
377
378
379 class CompleteState(object):
380 """Contains all the state to run the task at hand."""
381 def __init__(self, result_file, result, saved_state, out_dir):
382 super(CompleteState, self).__init__()
383 self.result_file = result_file
384 # Contains the data that will be used by run_test_from_archive.py
385 self.result = result
386 # Contains the data to ease developer's use-case but that is not strictly
387 # necessary.
388 self.saved_state = saved_state
389 self.out_dir = out_dir
390
391 @classmethod
392 def load_files(cls, result_file, out_dir):
393 """Loads state from disk."""
394 assert os.path.isabs(result_file), result_file
395 assert result_file.rsplit('.', 1)[1] == 'result', result_file
396 return cls(
397 result_file,
398 Result.load_file(result_file),
399 SavedState.load_file(result_to_state(result_file)),
400 out_dir)
401
402 def load_isolate(self, isolate_file, variables, error):
403 """Updates self.result and self.saved_state with information loaded from a
404 .isolate file.
405
406 Processes the loaded data, deduce root_dir, relative_cwd.
407 """
408 # Make sure to not depend on os.getcwd().
409 assert os.path.isabs(isolate_file), isolate_file
410 logging.info(
411 'CompleteState.load_isolate(%s, %s)' % (isolate_file, variables))
412 relative_base_dir = os.path.dirname(isolate_file)
413
414 # Processes the variables and update the saved state.
415 variables = process_variables(variables, relative_base_dir, error)
416 self.saved_state.update(isolate_file, variables)
417
418 with open(isolate_file, 'r') as f:
419 # At that point, variables are not replaced yet in command and infiles.
420 # infiles may contain directory entries and is in posix style.
421 command, infiles, read_only = load_isolate(f.read(), error)
422 command = [eval_variables(i, variables) for i in command]
423 infiles = [eval_variables(f, variables) for f in infiles]
424 # root_dir is automatically determined by the deepest root accessed with the
425 # form '../../foo/bar'.
426 root_dir = determine_root_dir(relative_base_dir, infiles)
427 # The relative directory is automatically determined by the relative path
428 # between root_dir and the directory containing the .isolate file,
429 # isolate_base_dir.
430 relative_cwd = os.path.relpath(relative_base_dir, root_dir)
431 # Normalize the files based to root_dir. It is important to keep the
432 # trailing os.path.sep at that step.
433 infiles = [
434 relpath(normpath(os.path.join(relative_base_dir, f)), root_dir)
435 for f in infiles
436 ]
437 # Expand the directories by listing each file inside. Up to now, trailing
438 # os.path.sep must be kept.
439 infiles = expand_directories(
440 root_dir,
441 infiles,
442 lambda x: re.match(r'.*\.(git|svn|pyc)$', x))
443
444 # Finally, update the new stuff in the foo.result file, the file that is
445 # used by run_test_from_archive.py.
446 self.result.update(command, infiles, read_only, relative_cwd)
447 logging.debug(self)
448
449 def process_inputs(self, level):
450 """Updates self.result.files with the files' mode and hash.
451
452 See process_input() for more information.
453 """
454 for infile in sorted(self.result.files):
455 filepath = os.path.join(self.root_dir, infile)
456 self.result.files[infile] = process_input(
457 filepath, self.result.files[infile], level, self.result.read_only)
458
459 def save_files(self):
460 """Saves both self.result and self.saved_state."""
461 with open(self.result_file, 'wb') as f:
462 write_json(f, self.result.flatten())
463 total_bytes = sum(i.get('size', 0) for i in self.result.files.itervalues())
464 if total_bytes:
465 logging.debug('Total size: %d bytes' % total_bytes)
466 with open(result_to_state(self.result_file), 'wb') as f:
467 write_json(f, self.saved_state.flatten())
468
469 @property
470 def root_dir(self):
471 """isolate_file is always inside relative_cwd relative to root_dir."""
472 isolate_dir = os.path.dirname(self.saved_state.isolate_file)
473 # Special case '.'.
474 if self.result.relative_cwd == '.':
475 return isolate_dir
476 assert isolate_dir.endswith(self.result.relative_cwd), (
477 isolate_dir, self.result.relative_cwd)
478 return isolate_dir[:-len(self.result.relative_cwd)]
479
480 @property
481 def resultdir(self):
482 """Directory containing the results, usually equivalent to the variable
483 PRODUCT_DIR.
484 """
485 return os.path.dirname(self.result_file)
486
487 def __str__(self):
488 out = '%s(\n' % self.__class__.__name__
489 out += ' root_dir: %s\n' % self.root_dir
490 out += ' result: %s\n' % indent(self.result, 2)
491 out += ' saved_state: %s)' % indent(self.saved_state, 2)
492 return out
493
494
495 def MODEcheck(_outdir, _state):
316 """No-op.""" 496 """No-op."""
317 return 0 497 return 0
318 498
319 499
320 def MODEhashtable(outdir, indir, data): 500 def MODEhashtable(outdir, state):
321 outdir = ( 501 outdir = (
322 outdir or os.path.join(os.path.dirname(data['resultdir']), 'hashtable')) 502 outdir or os.path.join(os.path.dirname(state.resultdir), 'hashtable'))
323 if not os.path.isdir(outdir): 503 if not os.path.isdir(outdir):
324 os.makedirs(outdir) 504 os.makedirs(outdir)
325 for relfile, properties in data['files'].iteritems(): 505 for relfile, properties in state.result.files.iteritems():
326 infile = os.path.join(indir, relfile) 506 infile = os.path.join(state.root_dir, relfile)
327 outfile = os.path.join(outdir, properties['sha-1']) 507 outfile = os.path.join(outdir, properties['sha-1'])
328 if os.path.isfile(outfile): 508 if os.path.isfile(outfile):
329 # Just do a quick check that the file size matches. No need to stat() 509 # Just do a quick check that the file size matches. No need to stat()
330 # again the input file, grab the value from the dict. 510 # again the input file, grab the value from the dict.
331 out_size = os.stat(outfile).st_size 511 out_size = os.stat(outfile).st_size
332 in_size = ( 512 in_size = (
333 data.get('files', {}).get(infile, {}).get('size') or 513 state.result.files[infile].get('size') or
334 os.stat(infile).st_size) 514 os.stat(infile).st_size)
335 if in_size == out_size: 515 if in_size == out_size:
336 continue 516 continue
337 # Otherwise, an exception will be raised. 517 # Otherwise, an exception will be raised.
338 run_test_from_archive.link_file( 518 run_test_from_archive.link_file(
339 outfile, infile, run_test_from_archive.HARDLINK) 519 outfile, infile, run_test_from_archive.HARDLINK)
340 return 0 520 return 0
341 521
342 522
343 def MODEremap(outdir, indir, data): 523 def MODEremap(outdir, state):
344 if not outdir: 524 if not outdir:
345 outdir = tempfile.mkdtemp(prefix='isolate') 525 outdir = tempfile.mkdtemp(prefix='isolate')
346 else: 526 else:
347 if not os.path.isdir(outdir): 527 if not os.path.isdir(outdir):
348 os.makedirs(outdir) 528 os.makedirs(outdir)
349 print 'Remapping into %s' % outdir 529 print 'Remapping into %s' % outdir
350 if len(os.listdir(outdir)): 530 if len(os.listdir(outdir)):
351 print 'Can\'t remap in a non-empty directory' 531 print 'Can\'t remap in a non-empty directory'
352 return 1 532 return 1
353 recreate_tree( 533 recreate_tree(
354 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) 534 outdir,
355 if data['read_only']: 535 state.root_dir,
536 state.result.files.keys(),
537 run_test_from_archive.HARDLINK)
538 if state.result.read_only:
356 run_test_from_archive.make_writable(outdir, True) 539 run_test_from_archive.make_writable(outdir, True)
357 return 0 540 return 0
358 541
359 542
360 def MODErun(_outdir, indir, data): 543 def MODErun(_outdir, state):
361 """Always uses a temporary directory.""" 544 """Always uses a temporary directory."""
362 try: 545 try:
363 outdir = tempfile.mkdtemp(prefix='isolate') 546 outdir = tempfile.mkdtemp(prefix='isolate')
364 recreate_tree( 547 recreate_tree(
365 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) 548 outdir,
366 cwd = os.path.join(outdir, data['relative_cwd']) 549 state.root_dir,
550 state.result.files.keys(),
551 run_test_from_archive.HARDLINK)
552 cwd = os.path.join(outdir, state.result.relative_cwd)
367 if not os.path.isdir(cwd): 553 if not os.path.isdir(cwd):
368 os.makedirs(cwd) 554 os.makedirs(cwd)
369 if data['read_only']: 555 if state.result.read_only:
370 run_test_from_archive.make_writable(outdir, True) 556 run_test_from_archive.make_writable(outdir, True)
371 if not data['command']: 557 if not state.result.command:
372 print 'No command to run' 558 print 'No command to run'
373 return 1 559 return 1
374 cmd = trace_inputs.fix_python_path(data['command']) 560 cmd = trace_inputs.fix_python_path(state.result.command)
375 logging.info('Running %s, cwd=%s' % (cmd, cwd)) 561 logging.info('Running %s, cwd=%s' % (cmd, cwd))
376 return subprocess.call(cmd, cwd=cwd) 562 return subprocess.call(cmd, cwd=cwd)
377 finally: 563 finally:
378 run_test_from_archive.rmtree(outdir) 564 run_test_from_archive.rmtree(outdir)
379 565
380 566
381 def MODEtrace(_outdir, indir, data): 567 def MODEtrace(_outdir, state):
382 """Shortcut to use trace_inputs.py properly. 568 """Shortcut to use trace_inputs.py properly.
383 569
384 It constructs the equivalent of dictfiles. It is hardcoded to base the 570 It constructs the equivalent of dictfiles. It is hardcoded to base the
385 checkout at src/. 571 checkout at src/.
386 """ 572 """
387 logging.info( 573 logging.info(
388 'Running %s, cwd=%s' % ( 574 'Running %s, cwd=%s' % (
389 data['command'], os.path.join(indir, data['relative_cwd']))) 575 state.result.command,
576 os.path.join(state.root_dir, state.result.relative_cwd)))
390 product_dir = None 577 product_dir = None
391 if data['resultdir'] and indir: 578 if state.resultdir and state.root_dir:
392 # Defaults to none if both are the same directory. 579 # Defaults to none if both are the same directory.
393 try: 580 try:
394 product_dir = os.path.relpath(data['resultdir'], indir) or None 581 product_dir = os.path.relpath(state.resultdir, state.root_dir) or None
395 except ValueError: 582 except ValueError:
396 # This happens on Windows if data['resultdir'] is one drive, let's say 583 # This happens on Windows if state.resultdir is one drive, let's say
397 # 'C:\' and indir on another one like 'D:\'. 584 # 'C:\' and state.root_dir on another one like 'D:\'.
398 product_dir = None 585 product_dir = None
399 if not data['command']: 586 if not state.result.command:
400 print 'No command to run' 587 print 'No command to run'
401 return 1 588 return 1
402 return trace_inputs.trace_inputs( 589 return trace_inputs.trace_inputs(
403 data['resultfile'] + '.log', 590 state.result_file + '.log',
404 data['command'], 591 state.result.command,
405 indir, 592 state.root_dir,
406 data['relative_cwd'], 593 state.result.relative_cwd,
407 product_dir, 594 product_dir,
408 False) 595 False)
409 596
410 597
411 def get_valid_modes(): 598 # Must be declared after all the functions.
412 """Returns the modes that can be used.""" 599 VALID_MODES = {
413 return sorted( 600 'check': MODEcheck,
414 i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) 601 'hashtable': MODEhashtable,
602 'remap': MODEremap,
603 'run': MODErun,
604 'trace': MODEtrace,
605 }
415 606
416 607
417 def determine_root_dir(relative_root, infiles): 608 # Only hashtable mode really needs the sha-1.
418 """For a list of infiles, determines the deepest root directory that is 609 LEVELS = {
419 referenced indirectly. 610 'check': NO_INFO,
420 611 'hashtable': WITH_HASH,
421 All the paths are processed as posix-style but are eventually returned as 612 'remap': STATS_ONLY,
422 os.path.sep. 613 'run': STATS_ONLY,
423 """ 614 'trace': STATS_ONLY,
424 # The trick used to determine the root directory is to look at "how far" back 615 }
425 # up it is looking up.
426 relative_root = relative_root.replace(os.path.sep, '/')
427 deepest_root = relative_root
428 for i in infiles:
429 x = relative_root
430 i = i.replace(os.path.sep, '/')
431 while i.startswith('../'):
432 i = i[3:]
433 assert not i.startswith('/')
434 x = posixpath.dirname(x)
435 if deepest_root.startswith(x):
436 deepest_root = x
437 deepest_root = deepest_root.replace('/', os.path.sep)
438 logging.debug(
439 'determine_root_dir(%s, %s) -> %s' % (
440 relative_root, infiles, deepest_root))
441 return deepest_root.replace('/', os.path.sep)
442 616
443 617
444 def process_options(variables, resultfile, input_file, error): 618 assert (
445 """Processes the options and loads the input and result files. 619 sorted(i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) ==
620 sorted(VALID_MODES))
446 621
447 Returns a tuple of: 622
448 - The deepest root directory used as a relative path, to be used to determine 623 def isolate(result_file, isolate_file, mode, variables, out_dir, error):
449 'indir'. 624 """Main function to isolate a target with its dependencies.
450 - The list of dependency files. 625
451 - The 'data' dictionary. It contains all the processed data from the result 626 Arguments:
452 file if it existed, augmented with current data. This permits keeping the 627 - result_file: File to load or save state from.
453 state of data['variables'] across runs, simplifying the command line on 628 - isolate_file: File to load data from. Can be None if result_file contains
454 repeated run, e.g. the variables are kept between runs. 629 the necessary information.
455 Warning: data['files'] is stale at that point and it only use as a cache for 630 - mode: Action to do. See file level docstring.
456 the previous hash if the file wasn't touched between two runs, to speed it 631 - variables: Variables to process, if necessary.
457 up. 'infiles' must be used as the valid list of dependencies. 632 - out_dir: Output directory where the result is stored. It's use depends on
633 |mode|.
634
635 Some arguments are optional, dependending on |mode|. See the corresponding
636 MODE<mode> function for the exact behavior.
458 """ 637 """
459 # Constants 638 # First, load the previous stuff if it was present. Namely, "foo.result" and
460 input_file = os.path.abspath(input_file).replace('/', os.path.sep) 639 # "foo.state".
461 relative_base_dir = os.path.dirname(input_file) 640 complete_state = CompleteState.load_files(result_file, out_dir)
462 resultfile = os.path.abspath(resultfile).replace('/', os.path.sep) 641 isolate_file = isolate_file or complete_state.saved_state.isolate_file
463 logging.info( 642 if not isolate_file:
464 'process_options(%s, %s, %s, ...)' % (variables, resultfile, input_file)) 643 error('A .isolate file is required.')
644 if (complete_state.saved_state.isolate_file and
645 isolate_file != complete_state.saved_state.isolate_file):
646 error(
647 '%s and %s do not match.' % (
648 isolate_file, complete_state.saved_state.isolate_file))
465 649
466 # Process path variables as a special case. First normalize it, verifies it 650 try:
467 # exists, convert it to an absolute path, then set it as relative to 651 # Then process options and expands directories.
468 # relative_base_dir. 652 complete_state.load_isolate(isolate_file, variables, error)
469 for i in ('DEPTH', 'PRODUCT_DIR'):
470 if i not in variables:
471 continue
472 variable = os.path.normpath(variables[i])
473 if not os.path.isdir(variable):
474 error('%s=%s is not a directory' % (i, variable))
475 variable = os.path.abspath(variable).replace('/', os.path.sep)
476 # All variables are relative to the input file.
477 variables[i] = os.path.relpath(variable, relative_base_dir)
478 653
479 # At that point, variables are not replaced yet in command and infiles. 654 # Regenerate complete_state.result.files.
480 command, infiles, read_only = load_isolate( 655 complete_state.process_inputs(LEVELS[mode])
481 open(input_file, 'r').read(), error)
482 656
483 # Load the result file and set the values already known about. 657 # Finally run the mode-specific code.
484 data = load_results(resultfile) 658 result = VALID_MODES[mode](out_dir, complete_state)
485 data['read_only'] = read_only 659 except run_test_from_archive.MappingError, e:
486 data['resultfile'] = resultfile 660 error(str(e))
487 data['resultdir'] = os.path.dirname(resultfile)
488 # Keep the old variables but override them with the new ones.
489 data.setdefault('variables', {}).update(variables)
490 661
491 # Convert the variables. 662 # Then store the result and state.
492 data['command'] = [eval_variables(i, data['variables']) for i in command] 663 complete_state.save_files()
493 infiles = [eval_variables(f, data['variables']) for f in infiles] 664 return result
494 root_dir = determine_root_dir(relative_base_dir, infiles)
495
496 # The relative directory is automatically determined by the relative path
497 # between root_dir and the directory containing the .isolate file,
498 # isolate_base_dir. Keep relative_cwd posix-style.
499 data['relative_cwd'] = os.path.relpath(relative_base_dir, root_dir).replace(
500 os.path.sep, '/')
501
502 logging.debug('relative_cwd: %s' % data['relative_cwd'])
503 logging.debug(
504 'variables: %s' % ', '.join(
505 '%s=%s' % (k, data['variables'][k]) for k in sorted(data['variables'])))
506 logging.debug('command: %s' % data['command'])
507 logging.debug('read_only: %s' % data['read_only'])
508
509 # Normalize the infiles paths in case some absolute paths got in.
510 logging.debug('infiles before normalization: %s' % infiles)
511 infiles = [normpath(os.path.join(data['relative_cwd'], f)) for f in infiles]
512 logging.debug('processed infiles: %s' % infiles)
513 return root_dir, infiles, data
514 665
515 666
516 def main(): 667 def main():
668 """Handles CLI and normalizes the input arguments to pass them to isolate().
669 """
517 default_variables = [('OS', trace_inputs.get_flavor())] 670 default_variables = [('OS', trace_inputs.get_flavor())]
518 if sys.platform in ('win32', 'cygwin'): 671 if sys.platform in ('win32', 'cygwin'):
519 default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) 672 default_variables.append(('EXECUTABLE_SUFFIX', '.exe'))
520 else: 673 else:
521 default_variables.append(('EXECUTABLE_SUFFIX', '')) 674 default_variables.append(('EXECUTABLE_SUFFIX', ''))
522 valid_modes = get_valid_modes() + ['noop'] 675 valid_modes = sorted(VALID_MODES.keys() + ['noop'])
523 parser = optparse.OptionParser( 676 parser = optparse.OptionParser(
524 usage='%prog [options] [.isolate file]', 677 usage='%prog [options] [.isolate file]',
525 description=sys.modules[__name__].__doc__) 678 description=sys.modules[__name__].__doc__)
526 parser.format_description = lambda *_: parser.description 679 parser.format_description = lambda *_: parser.description
527 parser.add_option( 680 parser.add_option(
528 '-v', '--verbose', 681 '-v', '--verbose',
529 action='count', 682 action='count',
530 default=int(os.environ.get('ISOLATE_DEBUG', 0)), 683 default=int(os.environ.get('ISOLATE_DEBUG', 0)),
531 help='Use multiple times') 684 help='Use multiple times')
532 parser.add_option( 685 parser.add_option(
(...skipping 20 matching lines...) Expand all
553 'For the other modes, defaults to the directory containing --result') 706 'For the other modes, defaults to the directory containing --result')
554 707
555 options, args = parser.parse_args() 708 options, args = parser.parse_args()
556 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)] 709 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)]
557 logging.basicConfig( 710 logging.basicConfig(
558 level=level, 711 level=level,
559 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') 712 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s')
560 713
561 if not options.mode: 714 if not options.mode:
562 parser.error('--mode is required') 715 parser.error('--mode is required')
563 if len(args) != 1: 716 if not options.result:
717 parser.error('--result is required.')
718
719 if len(args) > 1:
564 logging.debug('%s' % sys.argv) 720 logging.debug('%s' % sys.argv)
565 parser.error('Use only one argument which should be a .isolate file') 721 parser.error('Use only one argument which should be a .isolate file')
566 722
567 if options.mode == 'noop': 723 if options.mode == 'noop':
568 # This undocumented mode is to help transition since some builders do not 724 # This undocumented mode is to help transition since some builders do not
569 # have all the test data files checked out. Exit silently. 725 # have all the test data files checked out. Exit silently.
570 return 0 726 return 0
571 727
572 root_dir, infiles, data = process_options( 728 # Make sure the paths make sense. On Windows, / and \ are often mixed together
573 dict(options.variables), options.result, args[0], parser.error) 729 # in a path.
730 result_file = os.path.abspath(options.result.replace('/', os.path.sep))
731 # input_file may be None.
732 input_file = (
733 os.path.abspath(args[0].replace('/', os.path.sep)) if args else None)
734 # out_dir may be None.
735 out_dir = (
736 os.path.abspath(options.outdir.replace('/', os.path.sep))
737 if options.outdir else None)
738 # Fix variables.
739 variables = dict(options.variables)
574 740
575 try: 741 # After basic validation, pass this to isolate().
576 resultcode, data = isolate( 742 return isolate(
577 options.outdir, 743 result_file,
578 options.mode, 744 input_file,
579 root_dir, 745 options.mode,
580 infiles, 746 variables,
581 data) 747 out_dir,
582 except run_test_from_archive.MappingError, e: 748 parser.error)
583 print >> sys.stderr, str(e)
584 return 1
585 save_results(options.result, data)
586 return resultcode
587 749
588 750
589 if __name__ == '__main__': 751 if __name__ == '__main__':
590 sys.exit(main()) 752 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | tools/isolate/isolate_smoke_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698