Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
|
iannucci
2015/05/27 02:03:27
let's do this later :)
luqui
2015/05/28 21:47:37
Done.
| |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 # TODO(hinoka): Use logging. | 6 import os |
| 7 import sys | |
| 7 | 8 |
| 8 import cStringIO | 9 SLAVE_DIR = os.path.dirname(os.path.abspath(__file__)) |
| 9 import codecs | 10 BOT_UPDATE = os.path.join( |
| 10 import collections | 11 SLAVE_DIR, 'recipe_modules', 'bot_update', 'resources', 'bot_update.py') |
| 11 import copy | |
| 12 import ctypes | |
| 13 import json | |
| 14 import optparse | |
| 15 import os | |
| 16 import pprint | |
| 17 import re | |
| 18 import socket | |
| 19 import subprocess | |
| 20 import sys | |
| 21 import tempfile | |
| 22 import time | |
| 23 import urllib2 | |
| 24 import urlparse | |
| 25 import uuid | |
| 26 | |
| 27 import os.path as path | |
| 28 | |
| 29 # How many bytes at a time to read from pipes. | |
| 30 BUF_SIZE = 256 | |
| 31 | |
| 32 # Define a bunch of directory paths. | |
| 33 # Relative to the current working directory. | |
| 34 CURRENT_DIR = path.abspath(os.getcwd()) | |
| 35 BUILDER_DIR = path.dirname(CURRENT_DIR) | |
| 36 SLAVE_DIR = path.dirname(BUILDER_DIR) | |
| 37 | |
| 38 # Relative to this script's filesystem path. | |
| 39 THIS_DIR = path.dirname(path.abspath(__file__)) | |
| 40 SCRIPTS_DIR = path.dirname(THIS_DIR) | |
| 41 BUILD_DIR = path.dirname(SCRIPTS_DIR) | |
| 42 ROOT_DIR = path.dirname(BUILD_DIR) | |
| 43 BUILD_INTERNAL_DIR = path.join(ROOT_DIR, 'build_internal') | |
| 44 DEPOT_TOOLS_DIR = path.join(ROOT_DIR, 'depot_tools') | |
| 45 | |
| 46 | |
| 47 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' | |
| 48 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' | |
| 49 | |
| 50 # Official builds use buildspecs, so this is a special case. | |
| 51 BUILDSPEC_TYPE = collections.namedtuple('buildspec', | |
| 52 ('container', 'version')) | |
| 53 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/' | |
| 54 '(build|branches|releases)/(.+)$') | |
| 55 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/' | |
| 56 'buildspec') | |
| 57 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*' | |
| 58 | |
| 59 BUILDSPEC_COMMIT_RE = ( | |
| 60 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'), | |
| 61 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'), | |
| 62 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'), | |
| 63 ) | |
| 64 | |
| 65 # Regular expression that matches a single commit footer line. | |
| 66 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)') | |
| 67 | |
| 68 # Footer metadata keys for regular and gsubtreed mirrored commit positions. | |
| 69 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position' | |
| 70 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position' | |
| 71 # Regular expression to parse a commit position | |
| 72 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') | |
| 73 | |
| 74 # Regular expression to parse gclient's revinfo entries. | |
| 75 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') | |
| 76 | |
| 77 # Used by 'ResolveSvnRevisionFromGitiles' | |
| 78 GIT_SVN_PROJECT_MAP = { | |
| 79 'webkit': { | |
| 80 'svn_url': 'svn://svn.chromium.org/blink', | |
| 81 'branch_map': [ | |
| 82 (r'trunk', r'refs/heads/master'), | |
| 83 (r'branches/([^/]+)', r'refs/branch-heads/\1'), | |
| 84 ], | |
| 85 }, | |
| 86 'v8': { | |
| 87 'svn_url': 'https://v8.googlecode.com/svn', | |
| 88 'branch_map': [ | |
| 89 (r'trunk', r'refs/heads/candidates'), | |
| 90 (r'branches/bleeding_edge', r'refs/heads/master'), | |
| 91 (r'branches/([^/]+)', r'refs/branch-heads/\1'), | |
| 92 ], | |
| 93 }, | |
| 94 'nacl': { | |
| 95 'svn_url': 'svn://svn.chromium.org/native_client', | |
| 96 'branch_map': [ | |
| 97 (r'trunk/src/native_client', r'refs/heads/master'), | |
| 98 ], | |
| 99 }, | |
| 100 } | |
| 101 | |
| 102 # Key for the 'git-svn' ID metadata commit footer entry. | |
| 103 GIT_SVN_ID_FOOTER_KEY = 'git-svn-id' | |
| 104 # e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117 | |
| 105 # ce2b1a6d-e550-0410-aec6-3dcde31c8c00 | |
| 106 GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)') | |
| 107 | |
| 108 | |
| 109 # This is the git mirror of the buildspecs repository. We could rely on the svn | |
| 110 # checkout, now that the git buildspecs are checked in alongside the svn | |
| 111 # buildspecs, but we're going to want to pull all the buildspecs from here | |
| 112 # eventually anyhow, and there's already some logic to pull from git (for the | |
| 113 # old git_buildspecs.git repo), so just stick with that. | |
| 114 GIT_BUILDSPEC_REPO = ( | |
| 115 'https://chrome-internal.googlesource.com/chrome/tools/buildspec') | |
| 116 | |
| 117 # Copied from scripts/recipes/chromium.py. | |
| 118 GOT_REVISION_MAPPINGS = { | |
| 119 '/chrome/trunk/src': { | |
| 120 'src/': 'got_revision', | |
| 121 'src/native_client/': 'got_nacl_revision', | |
| 122 'src/tools/swarm_client/': 'got_swarm_client_revision', | |
| 123 'src/tools/swarming_client/': 'got_swarming_client_revision', | |
| 124 'src/third_party/WebKit/': 'got_webkit_revision', | |
| 125 'src/third_party/webrtc/': 'got_webrtc_revision', | |
| 126 'src/v8/': 'got_v8_revision', | |
| 127 } | |
| 128 } | |
| 129 | |
| 130 | |
| 131 BOT_UPDATE_MESSAGE = """ | |
| 132 What is the "Bot Update" step? | |
| 133 ============================== | |
| 134 | |
| 135 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and | |
| 136 its dependencies) is checked out in a consistent state. This means that all of | |
| 137 the necessary repositories are checked out, no extra repositories are checked | |
| 138 out, and no locally modified files are present. | |
| 139 | |
| 140 These actions used to be taken care of by the "gclient revert" and "update" | |
| 141 steps. However, those steps are known to be buggy and occasionally flaky. This | |
| 142 step has two main advantages over them: | |
| 143 * it only operates in Git, so the logic can be clearer and cleaner; and | |
| 144 * it is a slave-side script, so its behavior can be modified without | |
| 145 restarting the master. | |
| 146 | |
| 147 Why Git, you ask? Because that is the direction that the Chromium project is | |
| 148 heading. This step is an integral part of the transition from using the SVN repo | |
| 149 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while | |
| 150 we fully convert everything to Git. This message will get out of your way | |
| 151 eventually, and the waterfall will be a happier place because of it. | |
| 152 | |
| 153 This step can be activated or deactivated independently on every builder on | |
| 154 every master. When it is active, the "gclient revert" and "update" steps become | |
| 155 no-ops. When it is inactive, it prints this message, cleans up after itself, and | |
| 156 lets everything else continue as though nothing has changed. Eventually, when | |
| 157 everything is stable enough, this step will replace them entirely. | |
| 158 | |
| 159 Debugging information: | |
| 160 (master/builder/slave may be unspecified on recipes) | |
| 161 master: %(master)s | |
| 162 builder: %(builder)s | |
| 163 slave: %(slave)s | |
| 164 forced by recipes: %(recipe)s | |
| 165 bot_update.py is:""" | |
| 166 | |
| 167 ACTIVATED_MESSAGE = """ACTIVE. | |
| 168 The bot will perform a Git checkout in this step. | |
| 169 The "gclient revert" and "update" steps are no-ops. | |
| 170 | |
| 171 """ | |
| 172 | |
| 173 NOT_ACTIVATED_MESSAGE = """INACTIVE. | |
| 174 This step does nothing. You actually want to look at the "update" step. | |
| 175 | |
| 176 """ | |
| 177 | |
| 178 | |
| 179 GCLIENT_TEMPLATE = """solutions = %(solutions)s | |
| 180 | |
| 181 cache_dir = r%(cache_dir)s | |
| 182 %(target_os)s | |
| 183 %(target_os_only)s | |
| 184 """ | |
| 185 | |
| 186 | |
| 187 internal_data = {} | |
| 188 if os.path.isdir(BUILD_INTERNAL_DIR): | |
| 189 local_vars = {} | |
| 190 try: | |
| 191 execfile(os.path.join( | |
| 192 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), | |
| 193 local_vars) | |
| 194 except Exception: | |
| 195 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. | |
| 196 print 'Warning: unable to read internal configuration file.' | |
| 197 print 'If this is an internal bot, this step may be erroneously inactive.' | |
| 198 internal_data = local_vars | |
| 199 | |
| 200 RECOGNIZED_PATHS = { | |
| 201 # If SVN path matches key, the entire URL is rewritten to the Git url. | |
| 202 '/chrome/trunk/src': | |
| 203 CHROMIUM_SRC_URL, | |
| 204 '/chrome/trunk/src/tools/cros.DEPS': | |
| 205 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git', | |
| 206 } | |
| 207 RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {})) | |
| 208 | |
| 209 ENABLED_MASTERS = [ | |
| 210 'bot_update.always_on', | |
| 211 'chromium.chrome', | |
| 212 'chromium.chromedriver', | |
| 213 'chromium.chromiumos', | |
| 214 'chromium', | |
| 215 'chromium.fyi', | |
| 216 'chromium.gpu', | |
| 217 'chromium.gpu.fyi', | |
| 218 'chromium.infra', | |
| 219 'chromium.infra.cron', | |
| 220 'chromium.linux', | |
| 221 'chromium.lkgr', | |
| 222 'chromium.mac', | |
| 223 'chromium.memory', | |
| 224 'chromium.memory.fyi', | |
| 225 'chromium.perf', | |
| 226 'chromium.perf.fyi', | |
| 227 'chromium.swarm', | |
| 228 'chromium.webkit', | |
| 229 'chromium.webrtc', | |
| 230 'chromium.webrtc.fyi', | |
| 231 'chromium.win', | |
| 232 'client.drmemory', | |
| 233 'client.mojo', | |
| 234 'client.nacl', | |
| 235 'client.nacl.ports', | |
| 236 'client.nacl.sdk', | |
| 237 'client.nacl.toolchain', | |
| 238 'client.skia', | |
| 239 'client.skia.fyi', | |
| 240 'client.v8', | |
| 241 'client.v8.branches', | |
| 242 'client.v8.fyi', | |
| 243 'client.webrtc', | |
| 244 'client.webrtc.fyi', | |
| 245 'tryserver.blink', | |
| 246 'tryserver.client.mojo', | |
| 247 'tryserver.chromium.linux', | |
| 248 'tryserver.chromium.mac', | |
| 249 'tryserver.chromium.perf', | |
| 250 'tryserver.chromium.win', | |
| 251 'tryserver.nacl', | |
| 252 'tryserver.v8', | |
| 253 'tryserver.webrtc', | |
| 254 ] | |
| 255 ENABLED_MASTERS += internal_data.get('ENABLED_MASTERS', []) | |
| 256 | |
| 257 ENABLED_BUILDERS = { | |
| 258 'client.dart.fyi': [ | |
| 259 'v8-linux-release', | |
| 260 'v8-mac-release', | |
| 261 'v8-win-release', | |
| 262 ], | |
| 263 'client.dynamorio': [ | |
| 264 'linux-v8-dr', | |
| 265 ], | |
| 266 } | |
| 267 ENABLED_BUILDERS.update(internal_data.get('ENABLED_BUILDERS', {})) | |
| 268 | |
| 269 ENABLED_SLAVES = {} | |
| 270 ENABLED_SLAVES.update(internal_data.get('ENABLED_SLAVES', {})) | |
| 271 | |
| 272 # Disabled filters get run AFTER enabled filters, so for example if a builder | |
| 273 # config is enabled, but a bot on that builder is disabled, that bot will | |
| 274 # be disabled. | |
| 275 DISABLED_BUILDERS = {} | |
| 276 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) | |
| 277 | |
| 278 DISABLED_SLAVES = {} | |
| 279 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) | |
| 280 | |
| 281 HEAD_BUILDERS = {} | |
| 282 HEAD_BUILDERS.update(internal_data.get('HEAD_BUILDERS', {})) | |
| 283 | |
| 284 # These masters work only in Git, meaning for got_revision, always output | |
| 285 # a git hash rather than a SVN rev. | |
| 286 GIT_MASTERS = [ | |
| 287 'client.v8', | |
| 288 'client.v8.branches', | |
| 289 'tryserver.v8', | |
| 290 ] | |
| 291 GIT_MASTERS += internal_data.get('GIT_MASTERS', []) | |
| 292 | |
| 293 | |
| 294 # How many times to retry failed subprocess calls. | |
| 295 RETRIES = 3 | |
| 296 | |
| 297 # Find deps2git | |
| 298 DEPS2GIT_DIR_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git') | |
| 299 DEPS2GIT_PATH = path.join(DEPS2GIT_DIR_PATH, 'deps2git.py') | |
| 300 S2G_INTERNAL_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git_internal', | |
| 301 'svn_to_git_internal.py') | |
| 302 | |
| 303 # ../../cache_dir aka /b/build/slave/cache_dir | |
| 304 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') | |
| 305 CACHE_DIR = path.join(SLAVE_DIR, 'cache_dir') | |
| 306 # Because we print CACHE_DIR out into a .gclient file, and then later run | |
| 307 # eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets | |
| 308 # parsed as "E:[\x08][\x08]uild". | |
| 309 if sys.platform.startswith('win'): | |
| 310 CACHE_DIR = CACHE_DIR.replace('\\', '\\\\') | |
| 311 | |
| 312 # Find the patch tool. | |
| 313 if sys.platform.startswith('win'): | |
| 314 PATCH_TOOL = path.join(BUILD_INTERNAL_DIR, 'tools', 'patch.EXE') | |
| 315 else: | |
| 316 PATCH_TOOL = '/usr/bin/patch' | |
| 317 | |
| 318 # If there is less than 100GB of disk space on the system, then we do | |
| 319 # a shallow checkout. | |
| 320 SHALLOW_CLONE_THRESHOLD = 100 * 1024 * 1024 * 1024 | |
| 321 | |
| 322 | |
| 323 class SubprocessFailed(Exception): | |
| 324 def __init__(self, message, code, output): | |
| 325 Exception.__init__(self, message) | |
| 326 self.code = code | |
| 327 self.output = output | |
| 328 | |
| 329 | |
| 330 class PatchFailed(SubprocessFailed): | |
| 331 pass | |
| 332 | |
| 333 | |
| 334 class GclientSyncFailed(SubprocessFailed): | |
| 335 pass | |
| 336 | |
| 337 | |
| 338 class SVNRevisionNotFound(Exception): | |
| 339 pass | |
| 340 | |
| 341 | |
| 342 class InvalidDiff(Exception): | |
| 343 pass | |
| 344 | |
| 345 | |
| 346 class Inactive(Exception): | |
| 347 """Not really an exception, just used to exit early cleanly.""" | |
| 348 pass | |
| 349 | |
| 350 | |
| 351 RETRY = object() | |
| 352 OK = object() | |
| 353 FAIL = object() | |
| 354 | |
| 355 def call(*args, **kwargs): | |
| 356 """Interactive subprocess call.""" | |
| 357 kwargs['stdout'] = subprocess.PIPE | |
| 358 kwargs['stderr'] = subprocess.STDOUT | |
| 359 kwargs.setdefault('bufsize', BUF_SIZE) | |
| 360 cwd = kwargs.get('cwd', os.getcwd()) | |
| 361 result_fn = kwargs.pop('result_fn', lambda code, out: RETRY if code else OK) | |
| 362 stdin_data = kwargs.pop('stdin_data', None) | |
| 363 tries = kwargs.pop('tries', RETRIES) | |
| 364 if stdin_data: | |
| 365 kwargs['stdin'] = subprocess.PIPE | |
| 366 out = cStringIO.StringIO() | |
| 367 new_env = kwargs.get('env', {}) | |
| 368 env = copy.copy(os.environ) | |
| 369 env.update(new_env) | |
| 370 kwargs['env'] = env | |
| 371 attempt = 0 | |
| 372 for attempt in range(1, tries + 1): | |
| 373 attempt_msg = ' (retry #%d)' % attempt if attempt else '' | |
| 374 if new_env: | |
| 375 print '===Injecting Environment Variables===' | |
| 376 for k, v in sorted(new_env.items()): | |
| 377 print '%s: %s' % (k, v) | |
| 378 print '===Running %s%s===' % (' '.join(args), attempt_msg) | |
| 379 start_time = time.time() | |
| 380 proc = subprocess.Popen(args, **kwargs) | |
| 381 if stdin_data: | |
| 382 proc.stdin.write(stdin_data) | |
| 383 proc.stdin.close() | |
| 384 # This is here because passing 'sys.stdout' into stdout for proc will | |
| 385 # produce out of order output. | |
| 386 hanging_cr = False | |
| 387 while True: | |
| 388 buf = proc.stdout.read(BUF_SIZE) | |
| 389 if not buf: | |
| 390 break | |
| 391 if hanging_cr: | |
| 392 buf = '\r' + buf | |
| 393 hanging_cr = buf.endswith('\r') | |
| 394 if hanging_cr: | |
| 395 buf = buf[:-1] | |
| 396 buf = buf.replace('\r\n', '\n').replace('\r', '\n') | |
| 397 sys.stdout.write(buf) | |
| 398 out.write(buf) | |
| 399 if hanging_cr: | |
| 400 sys.stdout.write('\n') | |
| 401 out.write('\n') | |
| 402 | |
| 403 code = proc.wait() | |
| 404 elapsed_time = ((time.time() - start_time) / 60.0) | |
| 405 outval = out.getvalue() | |
| 406 result = result_fn(code, outval) | |
| 407 if result in (FAIL, RETRY): | |
| 408 print '===Failed in %.1f mins===' % elapsed_time | |
| 409 print | |
| 410 else: | |
| 411 print '===Succeeded in %.1f mins===' % elapsed_time | |
| 412 print | |
| 413 return outval | |
| 414 if result is FAIL: | |
| 415 break | |
| 416 | |
| 417 raise SubprocessFailed('%s failed with code %d in %s after %d attempts.' % | |
| 418 (' '.join(args), code, cwd, attempt), | |
| 419 code, outval) | |
| 420 | |
| 421 | |
| 422 def git(*args, **kwargs): | |
| 423 """Wrapper around call specifically for Git commands.""" | |
| 424 if args and args[0] == 'cache': | |
| 425 # Rewrite "git cache" calls into "python git_cache.py". | |
| 426 cmd = (sys.executable, '-u', GIT_CACHE_PATH) + args[1:] | |
| 427 else: | |
| 428 git_executable = 'git' | |
| 429 # On windows, subprocess doesn't fuzzy-match 'git' to 'git.bat', so we | |
| 430 # have to do it explicitly. This is better than passing shell=True. | |
| 431 if sys.platform.startswith('win'): | |
| 432 git_executable += '.bat' | |
| 433 cmd = (git_executable,) + args | |
| 434 return call(*cmd, **kwargs) | |
| 435 | |
| 436 | |
| 437 def get_gclient_spec(solutions, target_os, target_os_only): | |
| 438 return GCLIENT_TEMPLATE % { | |
| 439 'solutions': pprint.pformat(solutions, indent=4), | |
| 440 'cache_dir': '"%s"' % CACHE_DIR, | |
| 441 'target_os': ('\ntarget_os=%s' % target_os) if target_os else '', | |
| 442 'target_os_only': '\ntarget_os_only=%s' % target_os_only | |
| 443 } | |
| 444 | |
| 445 | |
| 446 def check_enabled(master, builder, slave): | |
| 447 if master in ENABLED_MASTERS: | |
| 448 return True | |
| 449 builder_list = ENABLED_BUILDERS.get(master) | |
| 450 if builder_list and builder in builder_list: | |
| 451 return True | |
| 452 slave_list = ENABLED_SLAVES.get(master) | |
| 453 if slave_list and slave in slave_list: | |
| 454 return True | |
| 455 return False | |
| 456 | |
| 457 | |
| 458 def check_disabled(master, builder, slave): | |
| 459 """Returns True if disabled, False if not disabled.""" | |
| 460 builder_list = DISABLED_BUILDERS.get(master) | |
| 461 if builder_list and builder in builder_list: | |
| 462 return True | |
| 463 slave_list = DISABLED_SLAVES.get(master) | |
| 464 if slave_list and slave in slave_list: | |
| 465 return True | |
| 466 return False | |
| 467 | |
| 468 | |
| 469 def check_valid_host(master, builder, slave): | |
| 470 return (check_enabled(master, builder, slave) | |
| 471 and not check_disabled(master, builder, slave)) | |
| 472 | |
| 473 | |
| 474 def maybe_ignore_revision(master, builder, revision): | |
| 475 """Handle builders that don't care what buildbot tells them to build. | |
| 476 | |
| 477 This is especially the case with builders that build from buildspecs and/or | |
| 478 trigger off multiple repositories, where the --revision passed in has nothing | |
| 479 to do with the solution being built. Clearing the revision in this case | |
| 480 causes bot_update to use HEAD rather that trying to checkout an inappropriate | |
| 481 version of the solution. | |
| 482 """ | |
| 483 builder_list = HEAD_BUILDERS.get(master) | |
| 484 if builder_list and builder in builder_list: | |
| 485 return [] | |
| 486 return revision | |
| 487 | |
| 488 | |
| 489 def solutions_printer(solutions): | |
| 490 """Prints gclient solution to stdout.""" | |
| 491 print 'Gclient Solutions' | |
| 492 print '=================' | |
| 493 for solution in solutions: | |
| 494 name = solution.get('name') | |
| 495 url = solution.get('url') | |
| 496 print '%s (%s)' % (name, url) | |
| 497 if solution.get('deps_file'): | |
| 498 print ' Dependencies file is %s' % solution['deps_file'] | |
| 499 if 'managed' in solution: | |
| 500 print ' Managed mode is %s' % ('ON' if solution['managed'] else 'OFF') | |
| 501 custom_vars = solution.get('custom_vars') | |
| 502 if custom_vars: | |
| 503 print ' Custom Variables:' | |
| 504 for var_name, var_value in sorted(custom_vars.iteritems()): | |
| 505 print ' %s = %s' % (var_name, var_value) | |
| 506 custom_deps = solution.get('custom_deps') | |
| 507 if 'custom_deps' in solution: | |
| 508 print ' Custom Dependencies:' | |
| 509 for deps_name, deps_value in sorted(custom_deps.iteritems()): | |
| 510 if deps_value: | |
| 511 print ' %s -> %s' % (deps_name, deps_value) | |
| 512 else: | |
| 513 print ' %s: Ignore' % deps_name | |
| 514 for k, v in solution.iteritems(): | |
| 515 # Print out all the keys we don't know about. | |
| 516 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', | |
| 517 'managed']: | |
| 518 continue | |
| 519 print ' %s is %s' % (k, v) | |
| 520 print | |
| 521 | |
| 522 | |
| 523 def solutions_to_git(input_solutions): | |
| 524 """Modifies urls in solutions to point at Git repos. | |
| 525 | |
| 526 returns: (git solution, svn root of first solution) tuple. | |
| 527 """ | |
| 528 assert input_solutions | |
| 529 solutions = copy.deepcopy(input_solutions) | |
| 530 first_solution = True | |
| 531 buildspec = None | |
| 532 for solution in solutions: | |
| 533 original_url = solution['url'] | |
| 534 parsed_url = urlparse.urlparse(original_url) | |
| 535 parsed_path = parsed_url.path | |
| 536 | |
| 537 # Rewrite SVN urls into Git urls. | |
| 538 buildspec_m = re.match(BUILDSPEC_RE, parsed_path) | |
| 539 if first_solution and buildspec_m: | |
| 540 solution['url'] = GIT_BUILDSPEC_PATH | |
| 541 buildspec = BUILDSPEC_TYPE( | |
| 542 container=buildspec_m.group(1), | |
| 543 version=buildspec_m.group(2), | |
| 544 ) | |
| 545 solution['deps_file'] = path.join(buildspec.container, buildspec.version, | |
| 546 '.DEPS.git') | |
| 547 elif parsed_path in RECOGNIZED_PATHS: | |
| 548 solution['url'] = RECOGNIZED_PATHS[parsed_path] | |
| 549 solution['deps_file'] = '.DEPS.git' | |
| 550 elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc: | |
| 551 pass | |
| 552 else: | |
| 553 print 'Warning: %s' % ('path %r not recognized' % parsed_path,) | |
| 554 | |
| 555 # Strip out deps containing $$V8_REV$$, etc. | |
| 556 if 'custom_deps' in solution: | |
| 557 new_custom_deps = {} | |
| 558 for deps_name, deps_value in solution['custom_deps'].iteritems(): | |
| 559 if deps_value and '$$' in deps_value: | |
| 560 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value) | |
| 561 else: | |
| 562 new_custom_deps[deps_name] = deps_value | |
| 563 solution['custom_deps'] = new_custom_deps | |
| 564 | |
| 565 if first_solution: | |
| 566 root = parsed_path | |
| 567 first_solution = False | |
| 568 | |
| 569 solution['managed'] = False | |
| 570 # We don't want gclient to be using a safesync URL. Instead it should | |
| 571 # using the lkgr/lkcr branch/tags. | |
| 572 if 'safesync_url' in solution: | |
| 573 print 'Removing safesync url %s from %s' % (solution['safesync_url'], | |
| 574 parsed_path) | |
| 575 del solution['safesync_url'] | |
| 576 return solutions, root, buildspec | |
| 577 | |
| 578 | |
| 579 def remove(target): | |
| 580 """Remove a target by moving it into build.dead.""" | |
| 581 dead_folder = path.join(BUILDER_DIR, 'build.dead') | |
| 582 if not path.exists(dead_folder): | |
| 583 os.makedirs(dead_folder) | |
| 584 os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) | |
| 585 | |
| 586 | |
| 587 def ensure_no_checkout(dir_names, scm_dirname): | |
| 588 """Ensure that there is no undesired checkout under build/. | |
| 589 | |
| 590 If there is an incorrect checkout under build/, then | |
| 591 move build/ to build.dead/ | |
| 592 This function will check each directory in dir_names. | |
| 593 | |
| 594 scm_dirname is expected to be either ['.svn', '.git'] | |
| 595 """ | |
| 596 assert scm_dirname in ['.svn', '.git', '*'] | |
| 597 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname)) | |
| 598 for dir_name in dir_names) | |
| 599 | |
| 600 if has_checkout or scm_dirname == '*': | |
| 601 build_dir = os.getcwd() | |
| 602 prefix = '' | |
| 603 if scm_dirname != '*': | |
| 604 prefix = '%s detected in checkout, ' % scm_dirname | |
| 605 | |
| 606 for filename in os.listdir(build_dir): | |
| 607 deletion_target = path.join(build_dir, filename) | |
| 608 print '%sdeleting %s...' % (prefix, deletion_target), | |
| 609 remove(deletion_target) | |
| 610 print 'done' | |
| 611 | |
| 612 | |
| 613 def gclient_configure(solutions, target_os, target_os_only): | |
| 614 """Should do the same thing as gclient --spec='...'.""" | |
| 615 with codecs.open('.gclient', mode='w', encoding='utf-8') as f: | |
| 616 f.write(get_gclient_spec(solutions, target_os, target_os_only)) | |
| 617 | |
| 618 | |
| 619 def gclient_sync(with_branch_heads, shallow): | |
| 620 # We just need to allocate a filename. | |
| 621 fd, gclient_output_file = tempfile.mkstemp(suffix='.json') | |
| 622 os.close(fd) | |
| 623 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient' | |
| 624 cmd = [gclient_bin, 'sync', '--verbose', '--reset', '--force', | |
| 625 '--ignore_locks', '--output-json', gclient_output_file, | |
| 626 '--nohooks', '--noprehooks', '--delete_unversioned_trees'] | |
| 627 if with_branch_heads: | |
| 628 cmd += ['--with_branch_heads'] | |
| 629 if shallow: | |
| 630 cmd += ['--shallow'] | |
| 631 | |
| 632 try: | |
| 633 call(*cmd) | |
| 634 except SubprocessFailed as e: | |
| 635 # Throw a GclientSyncFailed exception so we can catch this independently. | |
| 636 raise GclientSyncFailed(e.message, e.code, e.output) | |
| 637 else: | |
| 638 with open(gclient_output_file) as f: | |
| 639 return json.load(f) | |
| 640 finally: | |
| 641 os.remove(gclient_output_file) | |
| 642 | |
| 643 | |
| 644 def gclient_runhooks(gyp_envs): | |
| 645 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient' | |
| 646 env = dict([env_var.split('=', 1) for env_var in gyp_envs]) | |
| 647 call(gclient_bin, 'runhooks', env=env) | |
| 648 | |
| 649 | |
| 650 def gclient_revinfo(): | |
| 651 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient' | |
| 652 return call(gclient_bin, 'revinfo', '-a') or '' | |
| 653 | |
| 654 | |
| 655 def create_manifest(): | |
| 656 manifest = {} | |
| 657 output = gclient_revinfo() | |
| 658 for line in output.strip().splitlines(): | |
| 659 match = REVINFO_RE.match(line.strip()) | |
| 660 if match: | |
| 661 manifest[match.group(1)] = { | |
| 662 'repository': match.group(2), | |
| 663 'revision': match.group(3), | |
| 664 } | |
| 665 else: | |
| 666 print "WARNING: Couldn't match revinfo line:\n%s" % line | |
| 667 return manifest | |
| 668 | |
| 669 | |
| 670 def get_commit_message_footer_map(message): | |
| 671 """Returns: (dict) A dictionary of commit message footer entries. | |
| 672 """ | |
| 673 footers = {} | |
| 674 | |
| 675 # Extract the lines in the footer block. | |
| 676 lines = [] | |
| 677 for line in message.strip().splitlines(): | |
| 678 line = line.strip() | |
| 679 if len(line) == 0: | |
| 680 del lines[:] | |
| 681 continue | |
| 682 lines.append(line) | |
| 683 | |
| 684 # Parse the footer | |
| 685 for line in lines: | |
| 686 m = COMMIT_FOOTER_ENTRY_RE.match(line) | |
| 687 if not m: | |
| 688 # If any single line isn't valid, the entire footer is invalid. | |
| 689 footers.clear() | |
| 690 return footers | |
| 691 footers[m.group(1)] = m.group(2).strip() | |
| 692 return footers | |
| 693 | |
| 694 | |
| 695 def get_commit_message_footer(message, key): | |
| 696 """Returns: (str/None) The footer value for 'key', or None if none was found. | |
| 697 """ | |
| 698 return get_commit_message_footer_map(message).get(key) | |
| 699 | |
| 700 | |
| 701 def get_svn_rev(git_hash, dir_name): | |
| 702 log = git('log', '-1', git_hash, cwd=dir_name) | |
| 703 git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY) | |
| 704 if not git_svn_id: | |
| 705 return None | |
| 706 m = GIT_SVN_ID_RE.match(git_svn_id) | |
| 707 if not m: | |
| 708 return None | |
| 709 return int(m.group(2)) | |
| 710 | |
| 711 | |
| 712 def get_git_hash(revision, branch, sln_dir): | |
| 713 """We want to search for the SVN revision on the git-svn branch. | |
| 714 | |
| 715 Note that git will search backwards from origin/master. | |
| 716 """ | |
| 717 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision) | |
| 718 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | |
| 719 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref] | |
| 720 result = git(*cmd, cwd=sln_dir).strip() | |
| 721 if result: | |
| 722 return result | |
| 723 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' % | |
| 724 (revision, sln_dir)) | |
| 725 | |
| 726 | |
| 727 def _last_commit_for_file(filename, repo_base): | |
| 728 cmd = ['log', '--format=%H', '--max-count=1', '--', filename] | |
| 729 return git(*cmd, cwd=repo_base).strip() | |
| 730 | |
| 731 | |
| 732 def need_to_run_deps2git(repo_base, deps_file, deps_git_file): | |
| 733 """Checks to see if we need to run deps2git. | |
| 734 | |
| 735 Returns True if there was a DEPS change after the last .DEPS.git update | |
| 736 or if DEPS has local modifications. | |
| 737 """ | |
| 738 # See if DEPS is dirty | |
| 739 deps_file_status = git( | |
| 740 'status', '--porcelain', deps_file, cwd=repo_base).strip() | |
| 741 if deps_file_status and deps_file_status.startswith('M '): | |
| 742 return True | |
| 743 | |
| 744 last_known_deps_ref = _last_commit_for_file(deps_file, repo_base) | |
| 745 last_known_deps_git_ref = _last_commit_for_file(deps_git_file, repo_base) | |
| 746 merge_base_ref = git('merge-base', last_known_deps_ref, | |
| 747 last_known_deps_git_ref, cwd=repo_base).strip() | |
| 748 | |
| 749 # If the merge base of the last DEPS and last .DEPS.git file is not | |
| 750 # equivilent to the hash of the last DEPS file, that means the DEPS file | |
| 751 # was committed after the last .DEPS.git file. | |
| 752 return last_known_deps_ref != merge_base_ref | |
| 753 | |
| 754 | |
| 755 def get_git_buildspec(buildspec_path, buildspec_version): | |
| 756 """Get the git buildspec of a version, return its contents. | |
| 757 | |
| 758 The contents are returned instead of the file so that we can check the | |
| 759 repository into a temp directory and confine the cleanup logic here. | |
| 760 """ | |
| 761 git('cache', 'populate', '--ignore_locks', '-v', '--cache-dir', CACHE_DIR, | |
| 762 GIT_BUILDSPEC_REPO) | |
| 763 mirror_dir = git( | |
| 764 'cache', 'exists', '--quiet', '--cache-dir', CACHE_DIR, | |
| 765 GIT_BUILDSPEC_REPO).strip() | |
| 766 TOTAL_TRIES = 30 | |
| 767 for tries in range(TOTAL_TRIES): | |
| 768 try: | |
| 769 return git( | |
| 770 'show', | |
| 771 'master:%s/%s/.DEPS.git' % (buildspec_path, buildspec_version), | |
| 772 cwd=mirror_dir | |
| 773 ) | |
| 774 except SubprocessFailed: | |
| 775 if tries < TOTAL_TRIES - 1: | |
| 776 print 'Git Buildspec for %s not committed yet, waiting 10 seconds...' | |
| 777 time.sleep(10) | |
| 778 git('cache', 'populate', '--ignore_locks', '-v', '--cache-dir', | |
| 779 CACHE_DIR, GIT_BUILDSPEC_REPO) | |
| 780 else: | |
| 781 print >> sys.stderr, '%s/%s .DEPS.git not found, ' % ( | |
| 782 buildspec_path, buildspec_version) | |
| 783 print >> sys.stderr, 'the publish_deps.py "privategit" step in the ', | |
| 784 print >> sys.stderr, 'Chrome release process might have failed. ', | |
| 785 print >> sys.stderr, 'Please contact chrome-re@google.com.' | |
| 786 raise | |
| 787 | |
| 788 | |
| 789 def buildspecs2git(sln_dir, buildspec): | |
| 790 """This is like deps2git, but for buildspecs. | |
| 791 | |
| 792 Because buildspecs are vastly different than normal DEPS files, we cannot | |
| 793 use deps2git.py to generate git versions of the git DEPS. Fortunately | |
| 794 we don't have buildspec trybots, and there is already a service that | |
| 795 generates git DEPS for every buildspec commit already, so we can leverage | |
| 796 that service so that we don't need to run buildspec2git.py serially. | |
| 797 | |
| 798 This checks the commit message of the current DEPS file for the release | |
| 799 number, waits in a busy loop for the coorisponding .DEPS.git file to be | |
| 800 committed into the git_buildspecs repository. | |
| 801 """ | |
| 802 repo_base = path.join(os.getcwd(), sln_dir) | |
| 803 deps_file = path.join(repo_base, buildspec.container, buildspec.version, | |
| 804 'DEPS') | |
| 805 deps_git_file = path.join(repo_base, buildspec.container, buildspec.version, | |
| 806 '.DEPS.git') | |
| 807 deps_log = git('log', '-1', '--format=%B', deps_file, cwd=repo_base) | |
| 808 | |
| 809 # Identify the path from the container name | |
| 810 if buildspec.container == 'branches': | |
| 811 # Path to the buildspec is: .../branches/VERSION | |
| 812 buildspec_path = buildspec.container | |
| 813 buildspec_version = buildspec.version | |
| 814 else: | |
| 815 # Scan through known commit headers for the number | |
| 816 for buildspec_re in BUILDSPEC_COMMIT_RE: | |
| 817 m = buildspec_re.search(deps_log) | |
| 818 if m: | |
| 819 break | |
| 820 if not m: | |
| 821 raise ValueError("Unable to parse buildspec from:\n%s" % (deps_log,)) | |
| 822 # Release versioned buildspecs are always in the 'releases' path. | |
| 823 buildspec_path = 'releases' | |
| 824 buildspec_version = m.group(1) | |
| 825 | |
| 826 git_buildspec = get_git_buildspec(buildspec_path, buildspec_version) | |
| 827 with open(deps_git_file, 'wb') as f: | |
| 828 f.write(git_buildspec) | |
| 829 | |
| 830 | |
| 831 def ensure_deps2git(solution, shallow): | |
| 832 repo_base = path.join(os.getcwd(), solution['name']) | |
| 833 deps_file = path.join(repo_base, 'DEPS') | |
| 834 deps_git_file = path.join(repo_base, '.DEPS.git') | |
| 835 if (not git('ls-files', 'DEPS', cwd=repo_base).strip() or | |
| 836 not git('ls-files', '.DEPS.git', cwd=repo_base).strip()): | |
| 837 return | |
| 838 | |
| 839 print 'Checking if %s is newer than %s' % (deps_file, deps_git_file) | |
| 840 if not need_to_run_deps2git(repo_base, deps_file, deps_git_file): | |
| 841 return | |
| 842 | |
| 843 print '===DEPS file modified, need to run deps2git===' | |
| 844 cmd = [sys.executable, DEPS2GIT_PATH, | |
| 845 '--workspace', os.getcwd(), | |
| 846 '--cache_dir', CACHE_DIR, | |
| 847 '--deps', deps_file, | |
| 848 '--out', deps_git_file] | |
| 849 if 'chrome-internal.googlesource' in solution['url']: | |
| 850 cmd.extend(['--extra-rules', S2G_INTERNAL_PATH]) | |
| 851 if shallow: | |
| 852 cmd.append('--shallow') | |
| 853 call(*cmd) | |
| 854 | |
| 855 | |
| 856 def emit_log_lines(name, lines): | |
| 857 for line in lines.splitlines(): | |
| 858 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) | |
| 859 print '@@@STEP_LOG_END@%s@@@' % name | |
| 860 | |
| 861 | |
| 862 def emit_properties(properties): | |
| 863 for property_name, property_value in sorted(properties.items()): | |
| 864 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) | |
| 865 | |
| 866 | |
| 867 # Derived from: | |
| 868 # http://code.activestate.com/recipes/577972-disk-usage/?in=user-4178764 | |
| 869 def get_total_disk_space(): | |
| 870 cwd = os.getcwd() | |
| 871 # Windows is the only platform that doesn't support os.statvfs, so | |
| 872 # we need to special case this. | |
| 873 if sys.platform.startswith('win'): | |
| 874 _, total, free = (ctypes.c_ulonglong(), ctypes.c_ulonglong(), \ | |
| 875 ctypes.c_ulonglong()) | |
| 876 if sys.version_info >= (3,) or isinstance(cwd, unicode): | |
| 877 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExW | |
| 878 else: | |
| 879 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExA | |
| 880 ret = fn(cwd, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free)) | |
| 881 if ret == 0: | |
| 882 # WinError() will fetch the last error code. | |
| 883 raise ctypes.WinError() | |
| 884 return (total.value, free.value) | |
| 885 | |
| 886 else: | |
| 887 st = os.statvfs(cwd) | |
| 888 free = st.f_bavail * st.f_frsize | |
| 889 total = st.f_blocks * st.f_frsize | |
| 890 return (total, free) | |
| 891 | |
| 892 | |
| 893 def get_target_revision(folder_name, git_url, revisions): | |
| 894 normalized_name = folder_name.strip('/') | |
| 895 if normalized_name in revisions: | |
| 896 return revisions[normalized_name] | |
| 897 if git_url in revisions: | |
| 898 return revisions[git_url] | |
| 899 return None | |
| 900 | |
| 901 | |
| 902 def force_revision(folder_name, revision): | |
| 903 split_revision = revision.split(':', 1) | |
| 904 branch = 'master' | |
| 905 if len(split_revision) == 2: | |
| 906 # Support for "branch:revision" syntax. | |
| 907 branch, revision = split_revision | |
| 908 | |
| 909 if revision and revision.upper() != 'HEAD': | |
| 910 if revision and revision.isdigit() and len(revision) < 40: | |
| 911 # rev_num is really a svn revision number, convert it into a git hash. | |
| 912 git_ref = get_git_hash(int(revision), branch, folder_name) | |
| 913 else: | |
| 914 # rev_num is actually a git hash or ref, we can just use it. | |
| 915 git_ref = revision | |
| 916 git('checkout', '--force', git_ref, cwd=folder_name) | |
| 917 else: | |
| 918 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | |
| 919 git('checkout', '--force', ref, cwd=folder_name) | |
| 920 | |
| 921 def git_checkout(solutions, revisions, shallow, refs): | |
| 922 build_dir = os.getcwd() | |
| 923 # Before we do anything, break all git_cache locks. | |
| 924 if path.isdir(CACHE_DIR): | |
| 925 git('cache', 'unlock', '-vv', '--force', '--all', '--cache-dir', CACHE_DIR) | |
| 926 for item in os.listdir(CACHE_DIR): | |
| 927 filename = os.path.join(CACHE_DIR, item) | |
| 928 if item.endswith('.lock'): | |
| 929 raise Exception('%s exists after cache unlock' % filename) | |
| 930 first_solution = True | |
| 931 for sln in solutions: | |
| 932 # This is so we can loop back and try again if we need to wait for the | |
| 933 # git mirrors to update from SVN. | |
| 934 done = False | |
| 935 tries_left = 60 | |
| 936 while not done: | |
| 937 name = sln['name'] | |
| 938 url = sln['url'] | |
| 939 if url == CHROMIUM_SRC_URL or url + '.git' == CHROMIUM_SRC_URL: | |
| 940 # Experiments show there's little to be gained from | |
| 941 # a shallow clone of src. | |
| 942 shallow = False | |
| 943 sln_dir = path.join(build_dir, name) | |
| 944 s = ['--shallow'] if shallow else [] | |
| 945 populate_cmd = (['cache', 'populate', '--ignore_locks', '-v', | |
| 946 '--cache-dir', CACHE_DIR] + s + [url]) | |
| 947 for ref in refs: | |
| 948 populate_cmd.extend(['--ref', ref]) | |
| 949 git(*populate_cmd) | |
| 950 mirror_dir = git( | |
| 951 'cache', 'exists', '--quiet', '--cache-dir', CACHE_DIR, url).strip() | |
| 952 clone_cmd = ( | |
| 953 'clone', '--no-checkout', '--local', '--shared', mirror_dir, sln_dir) | |
| 954 | |
| 955 try: | |
| 956 if not path.isdir(sln_dir): | |
| 957 git(*clone_cmd) | |
| 958 else: | |
| 959 git('remote', 'set-url', 'origin', mirror_dir, cwd=sln_dir) | |
| 960 git('fetch', 'origin', cwd=sln_dir) | |
| 961 for ref in refs: | |
| 962 refspec = '%s:%s' % (ref, ref.lstrip('+')) | |
| 963 git('fetch', 'origin', refspec, cwd=sln_dir) | |
| 964 | |
| 965 revision = get_target_revision(name, url, revisions) or 'HEAD' | |
| 966 force_revision(sln_dir, revision) | |
| 967 done = True | |
| 968 except SubprocessFailed as e: | |
| 969 # Exited abnormally, theres probably something wrong. | |
| 970 # Lets wipe the checkout and try again. | |
| 971 tries_left -= 1 | |
| 972 if tries_left > 0: | |
| 973 print 'Something failed: %s.' % str(e) | |
| 974 print 'waiting 5 seconds and trying again...' | |
| 975 time.sleep(5) | |
| 976 else: | |
| 977 raise | |
| 978 remove(sln_dir) | |
| 979 except SVNRevisionNotFound: | |
| 980 tries_left -= 1 | |
| 981 if tries_left > 0: | |
| 982 # If we don't have the correct revision, wait and try again. | |
| 983 print 'We can\'t find revision %s.' % revision | |
| 984 print 'The svn to git replicator is probably falling behind.' | |
| 985 print 'waiting 5 seconds and trying again...' | |
| 986 time.sleep(5) | |
| 987 else: | |
| 988 raise | |
| 989 | |
| 990 git('clean', '-dff', cwd=sln_dir) | |
| 991 | |
| 992 if first_solution: | |
| 993 git_ref = git('log', '--format=%H', '--max-count=1', | |
| 994 cwd=sln_dir).strip() | |
| 995 first_solution = False | |
| 996 return git_ref | |
| 997 | |
| 998 | |
| 999 def _download(url): | |
| 1000 """Fetch url and return content, with retries for flake.""" | |
| 1001 for attempt in xrange(RETRIES): | |
| 1002 try: | |
| 1003 return urllib2.urlopen(url).read() | |
| 1004 except Exception: | |
| 1005 if attempt == RETRIES - 1: | |
| 1006 raise | |
| 1007 | |
| 1008 | |
| 1009 def parse_diff(diff): | |
| 1010 """Takes a unified diff and returns a list of diffed files and their diffs. | |
| 1011 | |
| 1012 The return format is a list of pairs of: | |
| 1013 (<filename>, <diff contents>) | |
| 1014 <diff contents> is inclusive of the diff line. | |
| 1015 """ | |
| 1016 result = [] | |
| 1017 current_diff = '' | |
| 1018 current_header = None | |
| 1019 for line in diff.splitlines(): | |
| 1020 # "diff" is for git style patches, and "Index: " is for SVN style patches. | |
| 1021 if line.startswith('diff') or line.startswith('Index: '): | |
| 1022 if current_header: | |
| 1023 # If we are in a diff portion, then save the diff. | |
| 1024 result.append((current_header, '%s\n' % current_diff)) | |
| 1025 git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line) | |
| 1026 svn_header_match = re.match(r'Index: (.*)', line) | |
| 1027 | |
| 1028 if git_header_match: | |
| 1029 # First, see if its a git style header. | |
| 1030 from_file = git_header_match.group(1) | |
| 1031 to_file = git_header_match.group(2) | |
| 1032 if from_file != to_file and from_file.startswith('a/'): | |
| 1033 # Sometimes git prepends 'a/' and 'b/' in front of file paths. | |
| 1034 from_file = from_file[2:] | |
| 1035 current_header = from_file | |
| 1036 | |
| 1037 elif svn_header_match: | |
| 1038 # Otherwise, check if its an SVN style header. | |
| 1039 current_header = svn_header_match.group(1) | |
| 1040 | |
| 1041 else: | |
| 1042 # Otherwise... I'm not really sure what to do with this. | |
| 1043 raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' % | |
| 1044 (line, diff)) | |
| 1045 | |
| 1046 current_diff = '' | |
| 1047 current_diff += '%s\n' % line | |
| 1048 if current_header: | |
| 1049 # We hit EOF, gotta save the last diff. | |
| 1050 result.append((current_header, current_diff)) | |
| 1051 return result | |
| 1052 | |
| 1053 | |
| 1054 def get_svn_patch(patch_url): | |
| 1055 """Fetch patch from patch_url, return list of (filename, diff)""" | |
| 1056 svn_exe = 'svn.bat' if sys.platform.startswith('win') else 'svn' | |
| 1057 patch_data = call(svn_exe, 'cat', patch_url) | |
| 1058 return parse_diff(patch_data) | |
| 1059 | |
| 1060 | |
| 1061 def apply_svn_patch(patch_root, patches, whitelist=None, blacklist=None): | |
| 1062 """Expects a list of (filename, diff), applies it on top of patch_root.""" | |
| 1063 if whitelist: | |
| 1064 patches = [(name, diff) for name, diff in patches if name in whitelist] | |
| 1065 elif blacklist: | |
| 1066 patches = [(name, diff) for name, diff in patches if name not in blacklist] | |
| 1067 diffs = [diff for _, diff in patches] | |
| 1068 patch = ''.join(diffs) | |
| 1069 | |
| 1070 if patch: | |
| 1071 print '===Patching files===' | |
| 1072 for filename, _ in patches: | |
| 1073 print 'Patching %s' % filename | |
| 1074 try: | |
| 1075 call(PATCH_TOOL, '-p0', '--remove-empty-files', '--force', '--forward', | |
| 1076 stdin_data=patch, cwd=patch_root, tries=1) | |
| 1077 for filename, _ in patches: | |
| 1078 full_filename = path.abspath(path.join(patch_root, filename)) | |
| 1079 git('add', full_filename, cwd=path.dirname(full_filename)) | |
| 1080 except SubprocessFailed as e: | |
| 1081 raise PatchFailed(e.message, e.code, e.output) | |
| 1082 | |
| 1083 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, | |
| 1084 email_file, key_file, whitelist=None, blacklist=None): | |
| 1085 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') | |
| 1086 else 'apply_issue') | |
| 1087 cmd = [apply_issue_bin, | |
| 1088 # The patch will be applied on top of this directory. | |
| 1089 '--root_dir', root, | |
| 1090 # Tell apply_issue how to fetch the patch. | |
| 1091 '--issue', issue, | |
| 1092 '--server', server, | |
| 1093 # Always run apply_issue.py, otherwise it would see update.flag | |
| 1094 # and then bail out. | |
| 1095 '--force', | |
| 1096 # Don't run gclient sync when it sees a DEPS change. | |
| 1097 '--ignore_deps', | |
| 1098 ] | |
| 1099 # Use an oauth key file if specified. | |
| 1100 if email_file and key_file: | |
| 1101 cmd.extend(['--email-file', email_file, '--private-key-file', key_file]) | |
| 1102 else: | |
| 1103 cmd.append('--no-auth') | |
| 1104 | |
| 1105 if patchset: | |
| 1106 cmd.extend(['--patchset', patchset]) | |
| 1107 if whitelist: | |
| 1108 for item in whitelist: | |
| 1109 cmd.extend(['--whitelist', item]) | |
| 1110 elif blacklist: | |
| 1111 for item in blacklist: | |
| 1112 cmd.extend(['--blacklist', item]) | |
| 1113 | |
| 1114 # Only try once, since subsequent failures hide the real failure. | |
| 1115 try: | |
| 1116 call(*cmd, tries=1) | |
| 1117 except SubprocessFailed as e: | |
| 1118 raise PatchFailed(e.message, e.code, e.output) | |
| 1119 | |
| 1120 | |
| 1121 def check_flag(flag_file): | |
| 1122 """Returns True if the flag file is present.""" | |
| 1123 return os.path.isfile(flag_file) | |
| 1124 | |
| 1125 | |
| 1126 def delete_flag(flag_file): | |
| 1127 """Remove bot update flag.""" | |
| 1128 if os.path.isfile(flag_file): | |
| 1129 os.remove(flag_file) | |
| 1130 | |
| 1131 | |
| 1132 def emit_flag(flag_file): | |
| 1133 """Deposit a bot update flag on the system to tell gclient not to run.""" | |
| 1134 print 'Emitting flag file at %s' % flag_file | |
| 1135 with open(flag_file, 'wb') as f: | |
| 1136 f.write('Success!') | |
| 1137 | |
| 1138 | |
| 1139 def get_commit_position_for_git_svn(url, revision): | |
| 1140 """Generates a commit position string for a 'git-svn' URL/revision. | |
| 1141 | |
| 1142 If the 'git-svn' URL maps to a known project, we will construct a commit | |
| 1143 position branch value by applying substitution on the SVN URL. | |
| 1144 """ | |
| 1145 # Identify the base URL so we can strip off trunk/branch name | |
| 1146 project_config = branch = None | |
| 1147 for _, project_config in GIT_SVN_PROJECT_MAP.iteritems(): | |
| 1148 if url.startswith(project_config['svn_url']): | |
| 1149 branch = url[len(project_config['svn_url']):] | |
| 1150 break | |
| 1151 | |
| 1152 if branch: | |
| 1153 # Strip any leading slashes | |
| 1154 branch = branch.lstrip('/') | |
| 1155 | |
| 1156 # Try and map the branch | |
| 1157 for pattern, repl in project_config.get('branch_map', ()): | |
| 1158 nbranch, subn = re.subn(pattern, repl, branch, count=1) | |
| 1159 if subn: | |
| 1160 print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % ( | |
| 1161 branch, nbranch) | |
| 1162 branch = nbranch | |
| 1163 break | |
| 1164 else: | |
| 1165 # Use generic 'svn' branch | |
| 1166 print 'INFO: Could not resolve project for SVN URL %r' % (url,) | |
| 1167 branch = 'svn' | |
| 1168 return '%s@{#%s}' % (branch, revision) | |
| 1169 | |
| 1170 | |
| 1171 def get_commit_position(git_path, revision='HEAD'): | |
| 1172 """Dumps the 'git' log for a specific revision and parses out the commit | |
| 1173 position. | |
| 1174 | |
| 1175 If a commit position metadata key is found, its value will be returned. | |
| 1176 | |
| 1177 Otherwise, we will search for a 'git-svn' metadata entry. If one is found, | |
| 1178 we will compose a commit position from it, using its SVN revision value as | |
| 1179 the revision. | |
| 1180 | |
| 1181 If the 'git-svn' URL maps to a known project, we will construct a commit | |
| 1182 position branch value by truncating the URL, mapping 'trunk' to | |
| 1183 "refs/heads/master". Otherwise, we will return the generic branch, 'svn'. | |
| 1184 """ | |
| 1185 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) | |
| 1186 footer_map = get_commit_message_footer_map(git_log) | |
| 1187 | |
| 1188 # Search for commit position metadata | |
| 1189 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or | |
| 1190 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) | |
| 1191 if value: | |
| 1192 return value | |
| 1193 | |
| 1194 # Compose a commit position from 'git-svn' metadata | |
| 1195 value = footer_map.get(GIT_SVN_ID_FOOTER_KEY) | |
| 1196 if value: | |
| 1197 m = GIT_SVN_ID_RE.match(value) | |
| 1198 if not m: | |
| 1199 raise ValueError("Invalid 'git-svn' value: [%s]" % (value,)) | |
| 1200 return get_commit_position_for_git_svn(m.group(1), m.group(2)) | |
| 1201 return None | |
| 1202 | |
| 1203 | |
| 1204 def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs): | |
| 1205 """Translate git gclient revision mapping to build properties. | |
| 1206 | |
| 1207 If use_svn_revs is True, then translate git hashes in the revision mapping | |
| 1208 to svn revision numbers. | |
| 1209 """ | |
| 1210 properties = {} | |
| 1211 solutions_output = { | |
| 1212 # Make sure path always ends with a single slash. | |
| 1213 '%s/' % path.rstrip('/') : solution_output for path, solution_output | |
| 1214 in gclient_output['solutions'].iteritems() | |
| 1215 } | |
| 1216 for dir_name, property_name in got_revision_mapping.iteritems(): | |
| 1217 # Make sure dir_name always ends with a single slash. | |
| 1218 dir_name = '%s/' % dir_name.rstrip('/') | |
| 1219 if dir_name not in solutions_output: | |
| 1220 continue | |
| 1221 solution_output = solutions_output[dir_name] | |
| 1222 if solution_output.get('scm') is None: | |
| 1223 # This is an ignored DEPS, so the output got_revision should be 'None'. | |
| 1224 git_revision = revision = commit_position = None | |
| 1225 else: | |
| 1226 # Since we are using .DEPS.git, everything had better be git. | |
| 1227 assert solution_output.get('scm') == 'git' | |
| 1228 git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() | |
| 1229 if use_svn_revs: | |
| 1230 revision = get_svn_rev(git_revision, dir_name) | |
| 1231 if not revision: | |
| 1232 revision = git_revision | |
| 1233 else: | |
| 1234 revision = git_revision | |
| 1235 commit_position = get_commit_position(dir_name) | |
| 1236 | |
| 1237 properties[property_name] = revision | |
| 1238 if revision != git_revision: | |
| 1239 properties['%s_git' % property_name] = git_revision | |
| 1240 if commit_position: | |
| 1241 properties['%s_cp' % property_name] = commit_position | |
| 1242 | |
| 1243 return properties | |
| 1244 | |
| 1245 | |
| 1246 def emit_json(out_file, did_run, gclient_output=None, **kwargs): | |
| 1247 """Write run information into a JSON file.""" | |
| 1248 output = {} | |
| 1249 output.update(gclient_output if gclient_output else {}) | |
| 1250 output.update({'did_run': did_run}) | |
| 1251 output.update(kwargs) | |
| 1252 with open(out_file, 'wb') as f: | |
| 1253 f.write(json.dumps(output, sort_keys=True)) | |
| 1254 | |
| 1255 | |
| 1256 def ensure_deps_revisions(deps_url_mapping, solutions, revisions): | |
| 1257 """Ensure correct DEPS revisions, ignores solutions.""" | |
| 1258 for deps_name, deps_data in sorted(deps_url_mapping.items()): | |
| 1259 if deps_name.strip('/') in solutions: | |
| 1260 # This has already been forced to the correct solution by git_checkout(). | |
| 1261 continue | |
| 1262 revision = get_target_revision(deps_name, deps_data.get('url', None), | |
| 1263 revisions) | |
| 1264 if not revision: | |
| 1265 continue | |
| 1266 # TODO(hinoka): Catch SVNRevisionNotFound error maybe? | |
| 1267 git('fetch', 'origin', cwd=deps_name) | |
| 1268 force_revision(deps_name, revision) | |
| 1269 | |
| 1270 | |
| 1271 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, | |
| 1272 patch_root, issue, patchset, patch_url, rietveld_server, | |
| 1273 revision_mapping, apply_issue_email_file, | |
| 1274 apply_issue_key_file, buildspec, gyp_env, shallow, runhooks, | |
| 1275 refs): | |
| 1276 # Get a checkout of each solution, without DEPS or hooks. | |
| 1277 # Calling git directly because there is no way to run Gclient without | |
| 1278 # invoking DEPS. | |
| 1279 print 'Fetching Git checkout' | |
| 1280 | |
| 1281 git_ref = git_checkout(solutions, revisions, shallow, refs) | |
| 1282 | |
| 1283 patches = None | |
| 1284 if patch_url: | |
| 1285 patches = get_svn_patch(patch_url) | |
| 1286 | |
| 1287 already_patched = [] | |
| 1288 patch_root = patch_root or '' | |
| 1289 for solution in solutions: | |
| 1290 if (patch_root == solution['name'] or | |
| 1291 solution['name'].startswith(patch_root + '/')): | |
| 1292 relative_root = solution['name'][len(patch_root) + 1:] | |
| 1293 target = '/'.join([relative_root, 'DEPS']).lstrip('/') | |
| 1294 if patches: | |
| 1295 apply_svn_patch(patch_root, patches, whitelist=[target]) | |
| 1296 already_patched.append(target) | |
| 1297 elif issue: | |
| 1298 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, | |
| 1299 revision_mapping, git_ref, apply_issue_email_file, | |
| 1300 apply_issue_key_file, whitelist=[target]) | |
| 1301 already_patched.append(target) | |
| 1302 | |
| 1303 if buildspec: | |
| 1304 buildspecs2git(first_sln, buildspec) | |
| 1305 else: | |
| 1306 # Run deps2git if there is a DEPS change after the last .DEPS.git commit. | |
| 1307 for solution in solutions: | |
| 1308 ensure_deps2git(solution, shallow) | |
| 1309 | |
| 1310 # Ensure our build/ directory is set up with the correct .gclient file. | |
| 1311 gclient_configure(solutions, target_os, target_os_only) | |
| 1312 | |
| 1313 # Let gclient do the DEPS syncing. | |
| 1314 # The branch-head refspec is a special case because its possible Chrome | |
| 1315 # src, which contains the branch-head refspecs, is DEPSed in. | |
| 1316 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs, | |
| 1317 shallow) | |
| 1318 | |
| 1319 # Now that gclient_sync has finished, we should revert any .DEPS.git so that | |
| 1320 # presubmit doesn't complain about it being modified. | |
| 1321 if (not buildspec and | |
| 1322 git('ls-files', '.DEPS.git', cwd=first_sln).strip()): | |
| 1323 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln) | |
| 1324 | |
| 1325 if buildspec and runhooks: | |
| 1326 # Run gclient runhooks if we're on an official builder. | |
| 1327 # TODO(hinoka): Remove this when the official builders run their own | |
| 1328 # runhooks step. | |
| 1329 gclient_runhooks(gyp_env) | |
| 1330 | |
| 1331 # Finally, ensure that all DEPS are pinned to the correct revision. | |
| 1332 dir_names = [sln['name'] for sln in solutions] | |
| 1333 ensure_deps_revisions(gclient_output.get('solutions', {}), | |
| 1334 dir_names, revisions) | |
| 1335 # Apply the rest of the patch here (sans DEPS) | |
| 1336 if patches: | |
| 1337 apply_svn_patch(patch_root, patches, blacklist=already_patched) | |
| 1338 elif issue: | |
| 1339 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, | |
| 1340 revision_mapping, git_ref, apply_issue_email_file, | |
| 1341 apply_issue_key_file, blacklist=already_patched) | |
| 1342 | |
| 1343 # Reset the deps_file point in the solutions so that hooks get run properly. | |
| 1344 for sln in solutions: | |
| 1345 sln['deps_file'] = sln.get('deps_file', 'DEPS').replace('.DEPS.git', 'DEPS') | |
| 1346 gclient_configure(solutions, target_os, target_os_only) | |
| 1347 | |
| 1348 return gclient_output | |
| 1349 | |
| 1350 | |
| 1351 def parse_revisions(revisions, root): | |
| 1352 """Turn a list of revision specs into a nice dictionary. | |
| 1353 | |
| 1354 We will always return a dict with {root: something}. By default if root | |
| 1355 is unspecified, or if revisions is [], then revision will be assigned 'HEAD' | |
| 1356 """ | |
| 1357 results = {root.strip('/'): 'HEAD'} | |
| 1358 expanded_revisions = [] | |
| 1359 for revision in revisions: | |
| 1360 # Allow rev1,rev2,rev3 format. | |
| 1361 # TODO(hinoka): Delete this when webkit switches to recipes. | |
| 1362 expanded_revisions.extend(revision.split(',')) | |
| 1363 for revision in expanded_revisions: | |
| 1364 split_revision = revision.split('@') | |
| 1365 if len(split_revision) == 1: | |
| 1366 # This is just a plain revision, set it as the revision for root. | |
| 1367 results[root] = split_revision[0] | |
| 1368 elif len(split_revision) == 2: | |
| 1369 # This is an alt_root@revision argument. | |
| 1370 current_root, current_rev = split_revision | |
| 1371 | |
| 1372 # We want to normalize svn/git urls into .git urls. | |
| 1373 parsed_root = urlparse.urlparse(current_root) | |
| 1374 if parsed_root.scheme == 'svn': | |
| 1375 if parsed_root.path in RECOGNIZED_PATHS: | |
| 1376 normalized_root = RECOGNIZED_PATHS[parsed_root.path] | |
| 1377 else: | |
| 1378 print 'WARNING: SVN path %s not recognized, ignoring' % current_root | |
| 1379 continue | |
| 1380 elif parsed_root.scheme in ['http', 'https']: | |
| 1381 normalized_root = 'https://%s/%s' % (parsed_root.netloc, | |
| 1382 parsed_root.path) | |
| 1383 if not normalized_root.endswith('.git'): | |
| 1384 normalized_root = '%s.git' % normalized_root | |
| 1385 elif parsed_root.scheme: | |
| 1386 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme | |
| 1387 continue | |
| 1388 else: | |
| 1389 # This is probably a local path. | |
| 1390 normalized_root = current_root.strip('/') | |
| 1391 | |
| 1392 results[normalized_root] = current_rev | |
| 1393 else: | |
| 1394 print ('WARNING: %r is not recognized as a valid revision specification,' | |
| 1395 'skipping' % revision) | |
| 1396 return results | |
| 1397 | |
| 1398 | |
| 1399 def parse_args(): | |
| 1400 parse = optparse.OptionParser() | |
| 1401 | |
| 1402 parse.add_option('--issue', help='Issue number to patch from.') | |
| 1403 parse.add_option('--patchset', | |
| 1404 help='Patchset from issue to patch from, if applicable.') | |
| 1405 parse.add_option('--apply_issue_email_file', | |
| 1406 help='--email-file option passthrough for apply_patch.py.') | |
| 1407 parse.add_option('--apply_issue_key_file', | |
| 1408 help='--private-key-file option passthrough for ' | |
| 1409 'apply_patch.py.') | |
| 1410 parse.add_option('--patch_url', help='Optional URL to SVN patch.') | |
| 1411 parse.add_option('--root', dest='patch_root', | |
| 1412 help='DEPRECATED: Use --patch_root.') | |
| 1413 parse.add_option('--patch_root', help='Directory to patch on top of.') | |
| 1414 parse.add_option('--rietveld_server', | |
| 1415 default='codereview.chromium.org', | |
| 1416 help='Rietveld server.') | |
| 1417 parse.add_option('--specs', help='Gcilent spec.') | |
| 1418 parse.add_option('--master', help='Master name.') | |
| 1419 parse.add_option('-f', '--force', action='store_true', | |
| 1420 help='Bypass check to see if we want to be run. ' | |
| 1421 'Should ONLY be used locally or by smart recipes.') | |
| 1422 parse.add_option('--revision_mapping', | |
| 1423 help='{"path/to/repo/": "property_name"}') | |
| 1424 parse.add_option('--revision_mapping_file', | |
| 1425 help=('Same as revision_mapping, except its a path to a json' | |
| 1426 ' file containing that format.')) | |
| 1427 parse.add_option('--revision', action='append', default=[], | |
| 1428 help='Revision to check out. Can be an SVN revision number, ' | |
| 1429 'git hash, or any form of git ref. Can prepend ' | |
| 1430 'root@<rev> to specify which repository, where root ' | |
| 1431 'is either a filesystem path, git https url, or ' | |
| 1432 'svn url. To specify Tip of Tree, set rev to HEAD.' | |
| 1433 'To specify a git branch and an SVN rev, <rev> can be ' | |
| 1434 'set to <branch>:<revision>.') | |
| 1435 parse.add_option('--output_manifest', action='store_true', | |
| 1436 help=('Add manifest json to the json output.')) | |
| 1437 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], | |
| 1438 help='Hostname of the current machine, ' | |
| 1439 'used for determining whether or not to activate.') | |
| 1440 parse.add_option('--builder_name', help='Name of the builder, ' | |
| 1441 'used for determining whether or not to activate.') | |
| 1442 parse.add_option('--build_dir', default=os.getcwd()) | |
| 1443 parse.add_option('--flag_file', default=path.join(os.getcwd(), | |
| 1444 'update.flag')) | |
| 1445 parse.add_option('--shallow', action='store_true', | |
| 1446 help='Use shallow clones for cache repositories.') | |
| 1447 parse.add_option('--gyp_env', action='append', default=[], | |
| 1448 help='Environment variables to pass into gclient runhooks.') | |
| 1449 parse.add_option('--clobber', action='store_true', | |
| 1450 help='Delete checkout first, always') | |
| 1451 parse.add_option('--bot_update_clobber', action='store_true', dest='clobber', | |
| 1452 help='(synonym for --clobber)') | |
| 1453 parse.add_option('-o', '--output_json', | |
| 1454 help='Output JSON information into a specified file') | |
| 1455 parse.add_option('--no_shallow', action='store_true', | |
| 1456 help='Bypass disk detection and never shallow clone. ' | |
| 1457 'Does not override the --shallow flag') | |
| 1458 parse.add_option('--no_runhooks', action='store_true', | |
| 1459 help='Do not run hooks on official builder.') | |
| 1460 parse.add_option('--refs', action='append', | |
| 1461 help='Also fetch this refspec for the main solution(s). ' | |
| 1462 'Eg. +refs/branch-heads/*') | |
| 1463 parse.add_option('--with_branch_heads', action='store_true', | |
| 1464 help='Always pass --with_branch_heads to gclient. This ' | |
| 1465 'does the same thing as --refs +refs/branch-heads/*') | |
| 1466 | |
| 1467 | |
| 1468 options, args = parse.parse_args() | |
| 1469 | |
| 1470 if not options.refs: | |
| 1471 options.refs = [] | |
| 1472 | |
| 1473 if options.with_branch_heads: | |
| 1474 options.refs.append(BRANCH_HEADS_REFSPEC) | |
| 1475 del options.with_branch_heads | |
| 1476 | |
| 1477 try: | |
| 1478 if options.revision_mapping_file: | |
| 1479 if options.revision_mapping: | |
| 1480 print ('WARNING: Ignoring --revision_mapping: --revision_mapping_file ' | |
| 1481 'was set at the same time as --revision_mapping?') | |
| 1482 with open(options.revision_mapping_file, 'r') as f: | |
| 1483 options.revision_mapping = json.load(f) | |
| 1484 elif options.revision_mapping: | |
| 1485 options.revision_mapping = json.loads(options.revision_mapping) | |
| 1486 except Exception as e: | |
| 1487 print ( | |
| 1488 'WARNING: Caught execption while parsing revision_mapping*: %s' | |
| 1489 % (str(e),) | |
| 1490 ) | |
| 1491 | |
| 1492 return options, args | |
| 1493 | |
| 1494 | |
| 1495 def prepare(options, git_slns, active): | |
| 1496 """Prepares the target folder before we checkout.""" | |
| 1497 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | |
| 1498 # If we're active now, but the flag file doesn't exist (we weren't active | |
| 1499 # last run) or vice versa, blow away all checkouts. | |
| 1500 if bool(active) != bool(check_flag(options.flag_file)): | |
| 1501 ensure_no_checkout(dir_names, '*') | |
| 1502 if options.output_json: | |
| 1503 # Make sure we tell recipes that we didn't run if the script exits here. | |
| 1504 emit_json(options.output_json, did_run=active) | |
| 1505 if active: | |
| 1506 if options.clobber: | |
| 1507 ensure_no_checkout(dir_names, '*') | |
| 1508 else: | |
| 1509 ensure_no_checkout(dir_names, '.svn') | |
| 1510 emit_flag(options.flag_file) | |
| 1511 else: | |
| 1512 delete_flag(options.flag_file) | |
| 1513 raise Inactive # This is caught in main() and we exit cleanly. | |
| 1514 | |
| 1515 # Do a shallow checkout if the disk is less than 100GB. | |
| 1516 total_disk_space, free_disk_space = get_total_disk_space() | |
| 1517 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) | |
| 1518 used_disk_space_gb = int((total_disk_space - free_disk_space) | |
| 1519 / (1024 * 1024 * 1024)) | |
| 1520 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) | |
| 1521 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, | |
| 1522 total_disk_space_gb, | |
| 1523 percent_used) | |
| 1524 if not options.output_json: | |
| 1525 print '@@@STEP_TEXT@%s@@@' % step_text | |
| 1526 if not options.shallow: | |
| 1527 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD | |
| 1528 and not options.no_shallow) | |
| 1529 | |
| 1530 # The first solution is where the primary DEPS file resides. | |
| 1531 first_sln = dir_names[0] | |
| 1532 | |
| 1533 # Split all the revision specifications into a nice dict. | |
| 1534 print 'Revisions: %s' % options.revision | |
| 1535 revisions = parse_revisions(options.revision, first_sln) | |
| 1536 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) | |
| 1537 return revisions, step_text | |
| 1538 | |
| 1539 | |
| 1540 def checkout(options, git_slns, specs, buildspec, master, | |
| 1541 svn_root, revisions, step_text): | |
| 1542 first_sln = git_slns[0]['name'] | |
| 1543 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | |
| 1544 try: | |
| 1545 # Outer try is for catching patch failures and exiting gracefully. | |
| 1546 # Inner try is for catching gclient failures and retrying gracefully. | |
| 1547 try: | |
| 1548 checkout_parameters = dict( | |
| 1549 # First, pass in the base of what we want to check out. | |
| 1550 solutions=git_slns, | |
| 1551 revisions=revisions, | |
| 1552 first_sln=first_sln, | |
| 1553 | |
| 1554 # Also, target os variables for gclient. | |
| 1555 target_os=specs.get('target_os', []), | |
| 1556 target_os_only=specs.get('target_os_only', False), | |
| 1557 | |
| 1558 # Then, pass in information about how to patch. | |
| 1559 patch_root=options.patch_root, | |
| 1560 issue=options.issue, | |
| 1561 patchset=options.patchset, | |
| 1562 patch_url=options.patch_url, | |
| 1563 rietveld_server=options.rietveld_server, | |
| 1564 revision_mapping=options.revision_mapping, | |
| 1565 apply_issue_email_file=options.apply_issue_email_file, | |
| 1566 apply_issue_key_file=options.apply_issue_key_file, | |
| 1567 | |
| 1568 # For official builders. | |
| 1569 buildspec=buildspec, | |
| 1570 gyp_env=options.gyp_env, | |
| 1571 runhooks=not options.no_runhooks, | |
| 1572 | |
| 1573 # Finally, extra configurations such as shallowness of the clone. | |
| 1574 shallow=options.shallow, | |
| 1575 refs=options.refs) | |
| 1576 gclient_output = ensure_checkout(**checkout_parameters) | |
| 1577 except GclientSyncFailed: | |
| 1578 print 'We failed gclient sync, lets delete the checkout and retry.' | |
| 1579 ensure_no_checkout(dir_names, '*') | |
| 1580 gclient_output = ensure_checkout(**checkout_parameters) | |
| 1581 except PatchFailed as e: | |
| 1582 if options.output_json: | |
| 1583 # Tell recipes information such as root, got_revision, etc. | |
| 1584 emit_json(options.output_json, | |
| 1585 did_run=True, | |
| 1586 root=first_sln, | |
| 1587 log_lines=[('patch error', e.output),], | |
| 1588 patch_root=options.patch_root, | |
| 1589 patch_failure=True, | |
| 1590 step_text='%s PATCH FAILED' % step_text) | |
| 1591 else: | |
| 1592 # If we're not on recipes, tell annotator about our got_revisions. | |
| 1593 emit_log_lines('patch error', e.output) | |
| 1594 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text | |
| 1595 raise | |
| 1596 | |
| 1597 # Revision is an svn revision, unless it's a git master. | |
| 1598 use_svn_rev = master not in GIT_MASTERS | |
| 1599 | |
| 1600 # Take care of got_revisions outputs. | |
| 1601 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {})) | |
| 1602 if options.revision_mapping: | |
| 1603 revision_mapping.update(options.revision_mapping) | |
| 1604 | |
| 1605 # If the repo is not in the default GOT_REVISION_MAPPINGS and no | |
| 1606 # revision_mapping were specified on the command line then | |
| 1607 # default to setting 'got_revision' based on the first solution. | |
| 1608 if not revision_mapping: | |
| 1609 revision_mapping[first_sln] = 'got_revision' | |
| 1610 | |
| 1611 got_revisions = parse_got_revision(gclient_output, revision_mapping, | |
| 1612 use_svn_rev) | |
| 1613 | |
| 1614 if not got_revisions: | |
| 1615 # TODO(hinoka): We should probably bail out here, but in the interest | |
| 1616 # of giving mis-configured bots some time to get fixed use a dummy | |
| 1617 # revision here. | |
| 1618 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } | |
| 1619 #raise Exception('No got_revision(s) found in gclient output') | |
| 1620 | |
| 1621 if options.output_json: | |
| 1622 manifest = create_manifest() if options.output_manifest else None | |
| 1623 # Tell recipes information such as root, got_revision, etc. | |
| 1624 emit_json(options.output_json, | |
| 1625 did_run=True, | |
| 1626 root=first_sln, | |
| 1627 patch_root=options.patch_root, | |
| 1628 step_text=step_text, | |
| 1629 properties=got_revisions, | |
| 1630 manifest=manifest) | |
| 1631 else: | |
| 1632 # If we're not on recipes, tell annotator about our got_revisions. | |
| 1633 emit_properties(got_revisions) | |
| 1634 | |
| 1635 | |
| 1636 def print_help_text(force, output_json, active, master, builder, slave): | |
| 1637 """Print helpful messages to tell devs whats going on.""" | |
| 1638 if force and output_json: | |
| 1639 recipe_force = 'Forced on by recipes' | |
| 1640 elif active and output_json: | |
| 1641 recipe_force = 'Off by recipes, but forced on by bot update' | |
| 1642 elif not active and output_json: | |
| 1643 recipe_force = 'Forced off by recipes' | |
| 1644 else: | |
| 1645 recipe_force = 'N/A. Was not called by recipes' | |
| 1646 | |
| 1647 print BOT_UPDATE_MESSAGE % { | |
| 1648 'master': master or 'Not specified', | |
| 1649 'builder': builder or 'Not specified', | |
| 1650 'slave': slave or 'Not specified', | |
| 1651 'recipe': recipe_force, | |
| 1652 }, | |
| 1653 print ACTIVATED_MESSAGE if active else NOT_ACTIVATED_MESSAGE | |
| 1654 | |
| 1655 | |
| 1656 def main(): | |
| 1657 # Get inputs. | |
| 1658 options, _ = parse_args() | |
| 1659 builder = options.builder_name | |
| 1660 slave = options.slave_name | |
| 1661 master = options.master | |
| 1662 | |
| 1663 # Check if this script should activate or not. | |
| 1664 active = check_valid_host(master, builder, slave) or options.force or False | |
| 1665 | |
| 1666 options.revision = maybe_ignore_revision(master, builder, options.revision) | |
| 1667 | |
| 1668 # Print a helpful message to tell developers whats going on with this step. | |
| 1669 print_help_text( | |
| 1670 options.force, options.output_json, active, master, builder, slave) | |
| 1671 | |
| 1672 # Parse, munipulate, and print the gclient solutions. | |
| 1673 specs = {} | |
| 1674 exec(options.specs, specs) | |
| 1675 svn_solutions = specs.get('solutions', []) | |
| 1676 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions) | |
| 1677 solutions_printer(git_slns) | |
| 1678 | |
| 1679 try: | |
| 1680 # Dun dun dun, the main part of bot_update. | |
| 1681 revisions, step_text = prepare(options, git_slns, active) | |
| 1682 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions, | |
| 1683 step_text) | |
| 1684 | |
| 1685 except Inactive: | |
| 1686 # Not active, should count as passing. | |
| 1687 pass | |
| 1688 except PatchFailed: | |
| 1689 emit_flag(options.flag_file) | |
| 1690 # Return a specific non-zero exit code for patch failure (because it is | |
| 1691 # a failure), but make it different than other failures to distinguish | |
| 1692 # between infra failures (independent from patch author), and patch | |
| 1693 # failures (that patch author can fix). | |
| 1694 return 88 | |
| 1695 except Exception: | |
| 1696 # Unexpected failure. | |
| 1697 emit_flag(options.flag_file) | |
| 1698 raise | |
| 1699 else: | |
| 1700 emit_flag(options.flag_file) | |
| 1701 | |
| 1702 | 12 |
| 1703 if __name__ == '__main__': | 13 if __name__ == '__main__': |
| 1704 sys.exit(main()) | 14 os.execv(sys.executable, sys.argv) |
| OLD | NEW |