Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(478)

Side by Side Diff: scripts/slave/annotated_run.py

Issue 1501663002: annotated_run.py: Add LogDog bootstrapping. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/build
Patch Set: Comments. Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | scripts/slave/cipd.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 import argparse 6 import argparse
7 import collections 7 import collections
8 import contextlib 8 import contextlib
9 import json 9 import json
10 import logging 10 import logging
11 import os 11 import os
12 import platform 12 import platform
13 import shutil 13 import shutil
14 import socket 14 import socket
15 import subprocess 15 import subprocess
16 import sys 16 import sys
17 import tempfile 17 import tempfile
18 18
19 19
20 # Install Infra build environment. 20 # Install Infra build environment.
21 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname( 21 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(
22 os.path.abspath(__file__)))) 22 os.path.abspath(__file__))))
23 sys.path.insert(0, os.path.join(BUILD_ROOT, 'scripts')) 23 sys.path.insert(0, os.path.join(BUILD_ROOT, 'scripts'))
24 24
25 from common import annotator 25 from common import annotator
26 from common import chromium_utils 26 from common import chromium_utils
27 from common import env 27 from common import env
28 from common import master_cfg_utils 28 from common import master_cfg_utils
29 from slave import gce
29 30
30 # Logging instance. 31 # Logging instance.
31 LOGGER = logging.getLogger('annotated_run') 32 LOGGER = logging.getLogger('annotated_run')
32 33
34 # Return codes used by Butler/Annotee to indicate their failure (as opposed to
35 # a forwarded return code from the underlying process).
36 LOGDOG_ERROR_RETURNCODES = (
37 # Butler runtime error.
38 250,
39 # Annotee runtime error.
40 251,
41 )
42
43 # Sentinel value that, if present in master config, matches all builders
44 # underneath that master.
45 WHITELIST_ALL = '*'
46
47 # Whitelist of {master}=>[{builder}|WHITELIST_ALL] whitelisting specific masters
48 # and builders for experimental LogDog/Annotee export.
49 LOGDOG_WHITELIST_MASTER_BUILDERS = {
50 }
51
52 # Configuration for a Pub/Sub topic.
53 PubSubConfig = collections.namedtuple('PubSubConfig', ('project', 'topic'))
54
55 # LogDogPlatform is the set of platform-specific LogDog bootstrapping
56 # configuration parameters.
57 #
58 # See _logdog_get_streamserver_uri for "streamserver" parameter details.
59 LogDogPlatform = collections.namedtuple('LogDogPlatform', (
60 'butler', 'annotee', 'credential_path', 'streamserver'))
61
62 # A CIPD binary description, including the package name, version, and relative
63 # path of the binary within the package.
64 CipdBinary = collections.namedtuple('CipdBinary',
65 ('package', 'version', 'relpath'))
33 66
34 # RecipeRuntime will probe this for values. 67 # RecipeRuntime will probe this for values.
35 # - First, (system, platform) 68 # - First, (system, platform)
36 # - Then, (system,) 69 # - Then, (system,)
37 # - Finally, (), 70 # - Finally, (),
38 PLATFORM_CONFIG = { 71 PLATFORM_CONFIG = {
39 # All systems. 72 # All systems.
40 (): {}, 73 (): {
74 'logdog_pubsub': PubSubConfig(
75 project='luci-logdog',
76 topic='logs',
77 ),
78 },
41 79
42 # Linux 80 # Linux
43 ('Linux',): { 81 ('Linux',): {
44 'run_cmd': ['/opt/infra-python/run.py'], 82 'run_cmd': ['/opt/infra-python/run.py'],
83 'logdog_platform': LogDogPlatform(
84 butler=CipdBinary('infra/tools/luci/logdog/butler/linux-amd64',
85 'latest', 'logdog_butler'),
86 annotee=CipdBinary('infra/tools/luci/logdog/annotee/linux-amd64',
87 'latest', 'logdog_annotee'),
88 credential_path=(
89 '/creds/service_accounts/service-account-luci-logdog-pubsub.json'),
90 streamserver='unix',
91 ),
45 }, 92 },
46 93
47 # Mac OSX 94 # Mac OSX
48 ('Darwin',): { 95 ('Darwin',): {
49 'run_cmd': ['/opt/infra-python/run.py'], 96 'run_cmd': ['/opt/infra-python/run.py'],
50 }, 97 },
51 98
52 # Windows 99 # Windows
53 ('Windows',): { 100 ('Windows',): {
54 'run_cmd': ['C:\\infra-python\\ENV\\Scripts\\python.exe', 101 'run_cmd': ['C:\\infra-python\\ENV\\Scripts\\python.exe',
55 'C:\\infra-python\\run.py'], 102 'C:\\infra-python\\run.py'],
56 }, 103 },
57 } 104 }
58 105
59 106
60 # Config is the runtime configuration used by `annotated_run.py` to bootstrap 107 # Config is the runtime configuration used by `annotated_run.py` to bootstrap
61 # the recipe engine. 108 # the recipe engine.
62 Config = collections.namedtuple('Config', ( 109 Config = collections.namedtuple('Config', (
63 'run_cmd', 110 'run_cmd',
111 'logdog_pubsub',
112 'logdog_platform',
64 )) 113 ))
65 114
66 115
116 class Runtime(object):
117 """Runtime is the runtime context of the recipe execution.
118
119 It is a ContextManager that tracks generated files and cleans them up at
120 exit.
121 """
122
123 def __init__(self, leak=False):
124 self._tempdirs = []
125 self._leak = leak
126
127 def cleanup(self, path):
128 self._tempdirs.append(path)
129
130 def tempdir(self, base=None):
131 """Creates a temporary recipe-local working directory and yields it.
132
133 This creates a temporary directory for this annotation run. Directory
134 cleanup is appended to the supplied Runtime.
135
136 This creates two levels of directory:
137 <base>/.recipe_runtime
138 <base>/.recipe_runtime/tmpFOO
139
140 On termination, the entire "<base>/.recipe_runtime" directory is deleted,
141 removing the subdirectory created by this instance as well as cleaning up
142 any other temporary subdirectories leaked by previous executions.
143
144 Args:
145 rt (Runtime): Process-wide runtime.
146 base (str/None): The directory under which the tempdir should be created.
147 If None, the default temporary directory root will be used.
148 """
149 base = base or tempfile.gettempdir()
150 basedir = ensure_directory(base, '.recipe_runtime')
151 self.cleanup(basedir)
152 tdir = tempfile.mkdtemp(dir=basedir)
153 return tdir
154
155 def __enter__(self):
156 return self
157
158 def __exit__(self, _et, _ev, _tb):
159 self.close()
160
161 def close(self):
162 if self._leak:
163 LOGGER.warning('(--leak) Leaking temporary paths: %s', self._tempdirs)
164 else:
165 for path in reversed(self._tempdirs):
166 try:
167 if os.path.isdir(path):
168 LOGGER.debug('Cleaning up temporary directory [%s].', path)
169 chromium_utils.RemoveDirectory(path)
170 except BaseException:
171 LOGGER.exception('Failed to clean up temporary directory [%s].',
172 path)
173 del(self._tempdirs[:])
174
175
67 def get_config(): 176 def get_config():
68 """Returns (Config): The constructed Config object. 177 """Returns (Config): The constructed Config object.
69 178
70 The Config object is constructed from: 179 The Config object is constructed from:
71 - Cascading the PLATFORM_CONFIG fields together based on current 180 - Cascading the PLATFORM_CONFIG fields together based on current
72 OS/Architecture. 181 OS/Architecture.
73 182
74 Raises: 183 Raises:
75 KeyError: if a required configuration key/parameter is not available. 184 KeyError: if a required configuration key/parameter is not available.
76 """ 185 """
77 # Cascade the platform configuration. 186 # Cascade the platform configuration.
78 p = (platform.system(), platform.processor()) 187 p = (platform.system(), platform.processor())
79 platform_config = {} 188 platform_config = {}
80 for i in xrange(len(p)+1): 189 for i in xrange(len(p)+1):
81 platform_config.update(PLATFORM_CONFIG.get(p[:i], {})) 190 platform_config.update(PLATFORM_CONFIG.get(p[:i], {}))
82 191
83 # Construct runtime configuration. 192 # Construct runtime configuration.
84 return Config( 193 return Config(
85 run_cmd=platform_config.get('run_cmd'), 194 run_cmd=platform_config.get('run_cmd'),
195 logdog_pubsub=platform_config.get('logdog_pubsub'),
196 logdog_platform=platform_config.get('logdog_platform'),
86 ) 197 )
87 198
88 199
89 def ensure_directory(*path): 200 def ensure_directory(*path):
90 path = os.path.join(*path) 201 path = os.path.join(*path)
91 if not os.path.isdir(path): 202 if not os.path.isdir(path):
92 os.makedirs(path) 203 os.makedirs(path)
93 return path 204 return path
94 205
95 206
207 def _logdog_get_streamserver_uri(rt, typ):
208 """Returns (str): The Butler StreamServer URI.
209
210 Args:
211 rt (Runtime): Process-wide runtime.
212 typ (str): The type of URI to generate. One of: ['unix'].
213 Raises:
214 LogDogBootstrapError: if |typ| is not a known type.
215 """
216 if typ == 'unix':
217 # We have to use a custom temporary directory here. This is due to the path
218 # length limitation on UNIX domain sockets, which is generally 104-108
219 # characters. We can't make that assumption about our standard recipe
220 # temporary directory.
221 sockdir = rt.tempdir()
222 uri = 'unix:%s' % (os.path.join(sockdir, 'butler.sock'),)
223 if len(uri) > 104:
224 raise LogDogBootstrapError('Generated URI exceeds UNIX domain socket '
225 'name size: %s' % (uri,))
226 return uri
227 raise LogDogBootstrapError('No streamserver URI generator.')
228
229
96 def _run_command(cmd, **kwargs): 230 def _run_command(cmd, **kwargs):
97 if kwargs.pop('dry_run', False): 231 if kwargs.pop('dry_run', False):
98 LOGGER.info('(Dry Run) Would have executed command: %s', cmd) 232 LOGGER.info('(Dry Run) Would have executed command: %s', cmd)
99 return 0, '' 233 return 0, ''
100 234
101 LOGGER.debug('Executing command: %s', cmd) 235 LOGGER.debug('Executing command: %s', cmd)
102 kwargs.setdefault('stderr', subprocess.STDOUT) 236 kwargs.setdefault('stderr', subprocess.STDOUT)
103 proc = subprocess.Popen(cmd, **kwargs) 237 proc = subprocess.Popen(cmd, **kwargs)
104 stdout, _ = proc.communicate() 238 stdout, _ = proc.communicate()
105 239
106 LOGGER.debug('Process [%s] returned [%d] with output:\n%s', 240 LOGGER.debug('Process [%s] returned [%d] with output:\n%s',
107 cmd, proc.returncode, stdout) 241 cmd, proc.returncode, stdout)
108 return proc.returncode, stdout 242 return proc.returncode, stdout
109 243
110 244
111 def _check_command(cmd, **kwargs): 245 def _check_command(cmd, **kwargs):
112 rv, stdout = _run_command(cmd, **kwargs) 246 rv, stdout = _run_command(cmd, **kwargs)
113 if rv != 0: 247 if rv != 0:
114 raise subprocess.CalledProcessError(rv, cmd, output=stdout) 248 raise subprocess.CalledProcessError(rv, cmd, output=stdout)
115 return stdout 249 return stdout
116 250
117 251
118 @contextlib.contextmanager 252 class LogDogNotBootstrapped(Exception):
119 def recipe_tempdir(root=None, leak=False): 253 pass
120 """Creates a temporary recipe-local working directory and yields it. 254
121 255
122 This creates a temporary directory for this annotation run that is 256 class LogDogBootstrapError(Exception):
123 automatically cleaned up. It returns the directory. 257 pass
124 258
125 Args: 259
126 root (str/None): If not None, the root directory. Otherwise, |os.cwd| will 260 def ensure_directory(*path):
127 be used. 261 path = os.path.join(*path)
128 leak (bool): If true, don't clean up the temporary directory on exit. 262 if not os.path.isdir(path):
129 """ 263 os.makedirs(path)
130 basedir = ensure_directory((root or os.getcwd()), '.recipe_runtime') 264 return path
265
266
267 def _get_service_account_json(opts, credential_path):
268 """Returns (str/None): If specified, the path to the service account JSON.
269
270 This method probes the local environment and returns a (possibly empty) list
271 of arguments to add to the Butler command line for authentication.
272
273 If we're running on a GCE instance, no arguments will be returned, as GCE
274 service account is implicitly authenticated. If we're running on Baremetal,
275 a path to those credentials will be returned.
276
277 Args:
278 rt (RecipeRuntime): The runtime environment.
279 Raises:
280 |LogDogBootstrapError| if no credentials could be found.
281 """
282 path = opts.logdog_service_account_json
283 if path:
284 return path
285
286 if gce.Authenticator.is_gce():
287 LOGGER.info('Running on GCE. No credentials necessary.')
288 return None
289
290 if os.path.isfile(credential_path):
291 return credential_path
292
293 raise LogDogBootstrapError('Could not find service account credentials. '
294 'Tried: %s' % (credential_path,))
295
296
297 def _logdog_install_cipd(path, *packages):
298 """Returns (list): The paths to the binaries in each of the packages.
299
300 This method bootstraps CIPD in "path", installing the packages specified
301 by "packages" and returning the paths to their binaries.
302
303 Args:
304 path (str): The CIPD installation root.
305 packages (CipdBinary): The set of CIPD binary packages to install.
306 """
307 verbosity = 0
308 level = logging.getLogger().level
309 if level <= logging.INFO:
310 verbosity += 1
311 if level <= logging.DEBUG:
312 verbosity += 1
313
314 packages_path = os.path.join(path, 'packages.json')
315 pmap = {}
316 cmd = [
317 sys.executable,
318 os.path.join(env.Build, 'scripts', 'slave', 'cipd.py'),
319 '--dest-directory', path,
320 '--json-output', packages_path,
321 ] + (['--verbose'] * verbosity)
322 for p in packages:
323 cmd += ['-P', '%s@%s' % (p.package, p.version)]
324 pmap[p.package] = os.path.join(path, p.relpath)
325
131 try: 326 try:
132 tdir = tempfile.mkdtemp(dir=basedir) 327 _check_command(cmd)
133 yield tdir 328 except subprocess.CalledProcessError:
134 finally: 329 LOGGER.exception('Failed to install LogDog CIPD packages.')
135 if basedir and os.path.isdir(basedir): 330 raise LogDogBootstrapError()
136 if not leak: 331
137 LOGGER.debug('Cleaning up temporary directory [%s].', basedir) 332 # Resolve installed packages.
138 try: 333 return tuple(pmap[p.package] for p in packages)
139 chromium_utils.RemoveDirectory(basedir) 334
140 except Exception: 335
141 LOGGER.exception('Failed to clean up temporary directory [%s].', 336 def _build_logdog_prefix(properties):
142 basedir) 337 """Constructs a LogDog stream prefix from the supplied properties.
338
339 The returned prefix is of the form:
340 bb/<mastername>/<buildername>/<buildnumber>
341
342 Any path-incompatible characters will be flattened to underscores.
343 """
344 def normalize(s):
345 parts = []
346 for ch in str(s):
347 if ch.isalnum() or ch in ':_-.':
348 parts.append(ch)
143 else: 349 else:
144 LOGGER.warning('(--leak) Leaking temporary directory [%s].', basedir) 350 parts.append('_')
351 if not parts[0].isalnum():
352 parts.insert(0, 's_')
353 return ''.join(parts)
354
355 components = {}
356 for f in ('mastername', 'buildername', 'buildnumber'):
357 prop = properties.get(f)
358 if not prop:
359 raise LogDogBootstrapError('Missing build property [%s].' % (f,))
360 components[f] = normalize(properties.get(f))
361 return 'bb/%(mastername)s/%(buildername)s/%(buildnumber)s' % components
362
363
364 def _logdog_bootstrap(rt, opts, tempdir, config, properties, cmd):
365 """Executes the recipe engine, bootstrapping it through LogDog/Annotee.
366
367 This method executes the recipe engine, bootstrapping it through
368 LogDog/Annotee so its output and annotations are streamed to LogDog. The
369 bootstrap is configured to tee the annotations through STDOUT/STDERR so they
370 will still be sent to BuildBot.
371
372 The overall setup here is:
373 [annotated_run.py] => [logdog_butler] => [logdog_annotee] => [recipes.py]
374
375 Args:
376 rt (Runtime): Process-wide runtime.
377 opts (argparse.Namespace): Command-line options.
378 tempdir (str): The path to the session temporary directory.
379 config (Config): Recipe runtime configuration.
380 properties (dict): Build properties.
381 cmd (list): The recipe runner command list to bootstrap.
382
383 Returns (int): The return code of the recipe runner process.
384
385 Raises:
386 LogDogNotBootstrapped: if the recipe engine was not executed because the
387 LogDog bootstrap requirements are not available.
388 LogDogBootstrapError: if there was an error bootstrapping the recipe runner
389 through LogDog.
390 """
391 bootstrap_dir = ensure_directory(tempdir, 'logdog_bootstrap')
392
393 plat = config.logdog_platform
394 if not plat:
395 raise LogDogNotBootstrapped('LogDog platform is not configured.')
396
397 # Determine LogDog prefix.
398 prefix = _build_logdog_prefix(properties)
399
400 # TODO(dnj): Consider moving this to a permanent directory on the bot so we
401 # don't CIPD-refresh each time.
402 cipd_path = os.path.join(bootstrap_dir, 'cipd')
403 butler, annotee = _logdog_install_cipd(cipd_path, plat.butler, plat.annotee)
404 if opts.logdog_butler_path:
405 butler = opts.logdog_butler_path
406 if opts.logdog_annotee_path:
407 annotee = opts.logdog_annotee_path
408
409 if not config.logdog_pubsub:
410 raise LogDogNotBootstrapped('No Pub/Sub configured.')
411 if not config.logdog_pubsub.project:
412 raise LogDogNotBootstrapped('No Pub/Sub project configured.')
413 if not config.logdog_pubsub.topic:
414 raise LogDogNotBootstrapped('No Pub/Sub topic configured.')
415
416 # Determine LogDog verbosity.
417 logdog_verbose = []
418 if opts.logdog_verbose == 0:
419 pass
420 elif opts.logdog_verbose == 1:
421 logdog_verbose.append('-log_level=info')
422 else:
423 logdog_verbose.append('-log_level=debug')
424
425 service_account_args = []
426 service_account_json = _get_service_account_json(opts, plat.credential_path)
427 if service_account_json:
428 service_account_args += ['-service-account-json', service_account_json]
429
430 # Generate our Butler stream server URI.
431 streamserver_uri = _logdog_get_streamserver_uri(rt, plat.streamserver)
432
433 # Dump the bootstrapped Annotee command to JSON for Annotee to load.
434 #
435 # Annotee can run accept bootstrap parameters through either JSON or
436 # command-line, but using JSON effectively steps around any sort of command-
437 # line length limits such as those experienced on Windows.
438 cmd_json = os.path.join(bootstrap_dir, 'annotee_cmd.json')
439 with open(cmd_json, 'w') as fd:
440 json.dump(cmd, fd)
441
442 # Butler Command.
443 cmd = [
444 butler,
445 '-prefix', prefix,
446 '-output', 'pubsub,project="%(project)s",topic="%(topic)s"' % (
447 config.logdog_pubsub._asdict()),
448 ]
449 cmd += logdog_verbose
450 cmd += service_account_args
451 cmd += [
452 'run',
453 '-stdout', 'tee=stdout',
454 '-stderr', 'tee=stderr',
455 '-streamserver-uri', streamserver_uri,
456 '--',
457 ]
458
459 # Annotee Command.
460 cmd += [
461 annotee,
462 '-butler-stream-server', streamserver_uri,
463 '-annotate', 'tee',
464 '-name-base', 'recipes',
465 '-print-summary',
466 '-tee',
467 '-json-args-path', cmd_json,
468 ]
469 cmd += logdog_verbose
470
471 rv, _ = _run_command(cmd, dry_run=opts.dry_run)
472 if rv in LOGDOG_ERROR_RETURNCODES:
473 raise LogDogBootstrapError('LogDog Error (%d)' % (rv,))
474 return rv
475
476
477 def _should_run_logdog(properties):
478 """Returns (bool): True if LogDog should be used for this run.
479
480 Args:
481 properties (dict): The factory properties for this recipe run.
482 """
483 mastername = properties.get('mastername')
484 buildername = properties.get('buildername')
485 if not all((mastername, buildername)):
486 LOGGER.warning('Required mastername/buildername is not set.')
487 return False
488
489 # Key on mastername.
490 bdict = LOGDOG_WHITELIST_MASTER_BUILDERS.get(mastername)
491 if bdict is not None:
492 # Key on buildername.
493 if WHITELIST_ALL in bdict or buildername in bdict:
494 LOGGER.info('Whitelisted master %s, builder %s.',
495 mastername, buildername)
496 return True
497
498 LOGGER.info('Master %s, builder %s is not whitelisted for LogDog.',
499 mastername, buildername)
500 return False
145 501
146 502
147 def get_recipe_properties(workdir, build_properties, 503 def get_recipe_properties(workdir, build_properties,
148 use_factory_properties_from_disk): 504 use_factory_properties_from_disk):
149 """Constructs the recipe's properties from buildbot's properties. 505 """Constructs the recipe's properties from buildbot's properties.
150 506
151 This retrieves the current factory properties from the master_config 507 This retrieves the current factory properties from the master_config
152 in the slave's checkout (no factory properties are handed to us from the 508 in the slave's checkout (no factory properties are handed to us from the
153 master), and merges in the build properties. 509 master), and merges in the build properties.
154 510
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
290 help='factory properties in b64 gz JSON format') 646 help='factory properties in b64 gz JSON format')
291 parser.add_argument('--keep-stdin', action='store_true', default=False, 647 parser.add_argument('--keep-stdin', action='store_true', default=False,
292 help='don\'t close stdin when running recipe steps') 648 help='don\'t close stdin when running recipe steps')
293 parser.add_argument('--master-overrides-slave', action='store_true', 649 parser.add_argument('--master-overrides-slave', action='store_true',
294 help='use the property values given on the command line from the master, ' 650 help='use the property values given on the command line from the master, '
295 'not the ones looked up on the slave') 651 'not the ones looked up on the slave')
296 parser.add_argument('--use-factory-properties-from-disk', 652 parser.add_argument('--use-factory-properties-from-disk',
297 action='store_true', default=False, 653 action='store_true', default=False,
298 help='use factory properties loaded from disk on the slave') 654 help='use factory properties loaded from disk on the slave')
299 655
656 group = parser.add_argument_group('LogDog Bootstrap')
657 group.add_argument('--logdog-verbose',
658 action='count', default=0,
659 help='Increase LogDog verbosity. This can be specified multiple times.')
660 group.add_argument('--logdog-force', action='store_true',
661 help='Force LogDog bootstrapping, even if the system is not configured.')
662 group.add_argument('--logdog-butler-path',
663 help='Path to the LogDog Butler. If empty, one will be probed/downloaded '
664 'from CIPD.')
665 group.add_argument('--logdog-annotee-path',
666 help='Path to the LogDog Annotee. If empty, one will be '
667 'probed/downloaded from CIPD.')
668 group.add_argument('--logdog-service-account-json',
669 help='Path to the service account JSON. If one is not provided, the '
670 'local system credentials will be used.')
671
300 return parser.parse_args(argv) 672 return parser.parse_args(argv)
301 673
302 674
303 def update_scripts(): 675 def update_scripts():
304 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): 676 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'):
305 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') 677 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS')
306 return False 678 return False
307 679
308 stream = annotator.StructuredAnnotationStream() 680 stream = annotator.StructuredAnnotationStream()
309 681
310 with stream.step('update_scripts') as s: 682 with stream.step('update_scripts') as s:
311 gclient_name = 'gclient' 683 gclient_name = 'gclient'
312 if sys.platform.startswith('win'): 684 if sys.platform.startswith('win'):
313 gclient_name += '.bat' 685 gclient_name += '.bat'
314 gclient_path = os.path.join(env.Build, '..', 'depot_tools', 686 gclient_path = os.path.join(env.Build, os.pardir, 'depot_tools',
315 gclient_name) 687 gclient_name)
316 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose', '--jobs=2'] 688 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose', '--jobs=2']
317 try: 689 try:
318 fd, output_json = tempfile.mkstemp() 690 fd, output_json = tempfile.mkstemp()
319 os.close(fd) 691 os.close(fd)
320 gclient_cmd += ['--output-json', output_json] 692 gclient_cmd += ['--output-json', output_json]
321 except Exception: 693 except Exception:
322 # Super paranoia try block. 694 # Super paranoia try block.
323 output_json = None 695 output_json = None
324 cmd_dict = { 696 cmd_dict = {
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
417 ] 789 ]
418 # Add this conditionally so that we get an error in 790 # Add this conditionally so that we get an error in
419 # send_monitoring_event log files in case it isn't present. 791 # send_monitoring_event log files in case it isn't present.
420 if hostname: 792 if hostname:
421 cmd += ['--build-event-hostname', hostname] 793 cmd += ['--build-event-hostname', hostname]
422 _check_command(cmd) 794 _check_command(cmd)
423 except Exception: 795 except Exception:
424 LOGGER.warning("Failed to send monitoring event.", exc_info=True) 796 LOGGER.warning("Failed to send monitoring event.", exc_info=True)
425 797
426 798
799 def _exec_recipe(rt, opts, tdir, config, properties):
800 # Find out if the recipe we intend to run is in build_internal's recipes. If
801 # so, use recipes.py from there, otherwise use the one from build.
802 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py'
803
804 # Use the standard recipe runner unless the recipes are explicitly in the
805 # "build_limited" repository.
806 recipe_runner = os.path.join(env.Build,
807 'scripts', 'slave', 'recipes.py')
808 if env.BuildInternal:
809 build_limited = os.path.join(env.BuildInternal, 'scripts', 'slave')
810 if os.path.exists(os.path.join(build_limited, 'recipes', recipe_file)):
811 recipe_runner = os.path.join(build_limited, 'recipes.py')
812
813 # Dump properties to JSON and build recipe command.
814 props_file = os.path.join(tdir, 'recipe_properties.json')
815 with open(props_file, 'w') as fh:
816 json.dump(properties, fh)
817
818 cmd = [
819 sys.executable, '-u', recipe_runner,
820 'run',
821 '--workdir=%s' % os.getcwd(),
822 '--properties-file=%s' % props_file,
823 properties['recipe'],
824 ]
825
826 status = None
827 try:
828 if opts.logdog_force or _should_run_logdog(properties):
829 status = _logdog_bootstrap(rt, opts, tdir, config, properties, cmd)
830 except LogDogNotBootstrapped as e:
831 LOGGER.info('Not bootstrapped: %s', e.message)
832 except LogDogBootstrapError as e:
833 LOGGER.warning('Could not bootstrap LogDog: %s', e.message)
834 except Exception as e:
835 LOGGER.exception('Exception while bootstrapping LogDog.')
836 finally:
837 if status is None:
838 LOGGER.info('Not using LogDog. Invoking `recipes.py` directly.')
839 status, _ = _run_command(cmd, dry_run=opts.dry_run)
840
841 return status
842
843
427 def main(argv): 844 def main(argv):
428 opts = get_args(argv) 845 opts = get_args(argv)
429 846
430 if opts.verbose == 0: 847 if opts.verbose == 0:
431 level = logging.INFO 848 level = logging.INFO
432 else: 849 else:
433 level = logging.DEBUG 850 level = logging.DEBUG
434 logging.getLogger().setLevel(level) 851 logging.getLogger().setLevel(level)
435 852
436 clean_old_recipe_engine() 853 clean_old_recipe_engine()
437 854
438 # Enter our runtime environment. 855 # Enter our runtime environment.
439 with recipe_tempdir(leak=opts.leak) as tdir: 856 with Runtime(leak=opts.leak) as rt:
857 tdir = rt.tempdir(os.getcwd())
440 LOGGER.debug('Using temporary directory: [%s].', tdir) 858 LOGGER.debug('Using temporary directory: [%s].', tdir)
441 859
442 # Load factory properties and configuration. 860 # Load factory properties and configuration.
443 # TODO(crbug.com/551165): remove flag "factory_properties". 861 # TODO(crbug.com/551165): remove flag "factory_properties".
444 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or 862 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or
445 bool(opts.factory_properties)) 863 bool(opts.factory_properties))
446 properties = get_recipe_properties( 864 properties = get_recipe_properties(
447 tdir, opts.build_properties, use_factory_properties_from_disk) 865 tdir, opts.build_properties, use_factory_properties_from_disk)
448 LOGGER.debug('Loaded properties: %s', properties) 866 LOGGER.debug('Loaded properties: %s', properties)
449 867
450 config = get_config() 868 config = get_config()
451 LOGGER.debug('Loaded runtime configuration: %s', config) 869 LOGGER.debug('Loaded runtime configuration: %s', config)
452 870
453 # Find out if the recipe we intend to run is in build_internal's recipes. If
454 # so, use recipes.py from there, otherwise use the one from build.
455 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py'
456
457 # Use the standard recipe runner unless the recipes are explicitly in the
458 # "build_limited" repository.
459 recipe_runner = os.path.join(env.Build,
460 'scripts', 'slave', 'recipes.py')
461 if env.BuildInternal:
462 build_limited = os.path.join(env.BuildInternal, 'scripts', 'slave')
463 if os.path.exists(os.path.join(build_limited, 'recipes', recipe_file)):
464 recipe_runner = os.path.join(build_limited, 'recipes.py')
465
466 # Setup monitoring directory and send a monitoring event. 871 # Setup monitoring directory and send a monitoring event.
467 build_data_dir = ensure_directory(tdir, 'build_data') 872 build_data_dir = ensure_directory(tdir, 'build_data')
468 properties['build_data_dir'] = build_data_dir 873 properties['build_data_dir'] = build_data_dir
469 874
470 # Write our annotated_run.py monitoring event. 875 # Write our annotated_run.py monitoring event.
471 write_monitoring_event(config, build_data_dir, properties) 876 write_monitoring_event(config, build_data_dir, properties)
472 877
473 # Dump properties to JSON and build recipe command. 878 # Execute our recipe.
474 props_file = os.path.join(tdir, 'recipe_properties.json') 879 return _exec_recipe(rt, opts, tdir, config, properties)
475 with open(props_file, 'w') as fh:
476 json.dump(properties, fh)
477 cmd = [
478 sys.executable, '-u', recipe_runner,
479 'run',
480 '--workdir=%s' % os.getcwd(),
481 '--properties-file=%s' % props_file,
482 properties['recipe'],
483 ]
484
485 status, _ = _run_command(cmd, dry_run=opts.dry_run)
486
487 return status
488 880
489 881
490 def shell_main(argv): 882 def shell_main(argv):
491 if update_scripts(): 883 if update_scripts():
492 # Re-execute with the updated annotated_run.py. 884 # Re-execute with the updated annotated_run.py.
493 rv, _ = _run_command([sys.executable] + argv) 885 rv, _ = _run_command([sys.executable] + argv)
494 return rv 886 return rv
495 else: 887 else:
496 return main(argv[1:]) 888 return main(argv[1:])
497 889
498 890
499 if __name__ == '__main__': 891 if __name__ == '__main__':
500 logging.basicConfig(level=logging.INFO) 892 logging.basicConfig(level=logging.INFO)
501 sys.exit(shell_main(sys.argv)) 893 sys.exit(shell_main(sys.argv))
OLDNEW
« no previous file with comments | « no previous file | scripts/slave/cipd.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698