OLD | NEW |
---|---|
(Empty) | |
1 # Copyright 2015 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 import contextlib | |
6 import logging | |
7 import os | |
8 import re | |
9 import tempfile | |
10 | |
11 from infra.tools.cros_pin import checkout, execute, pinfile | |
12 from infra.tools.cros_pin.logger import LOGGER | |
13 | |
14 # Ths path of the Chromite repository. | |
15 CHROMITE_REPOSITORY = 'https://chromium.googlesource.com/chromiumos/chromite' | |
16 | |
17 # The number of stable release branches to build in addition to the beta | |
18 # branch. | |
19 DEFAULT_STABLE_COUNT = 2 | |
20 | |
21 # Regular expression to match release branch names. | |
22 RELEASE_RE = re.compile(r'release-R(\d+)-.*') | |
23 | |
24 | |
25 def add_argparse_options(parser): | |
26 parser.add_argument('-d', '--dry-run', | |
27 action='store_true', | |
28 help="Stop short of submitting the CLs.") | |
29 parser.add_argument('-n', '--no-verify', | |
30 action='store_true', | |
31 help="Don't check that the specified pin exists.") | |
32 parser.add_argument('-C', '--checkout-path', metavar='PATH', | |
33 help="If specified, the checkout at PATH will be used instead of a " | |
34 "temporary one. If PATH does not exist, it will be created, and " | |
35 "the checkout will not be cleaned up. This is intended for " | |
36 "debugging.") | |
37 parser.add_argument('--chromite-repository', default=CHROMITE_REPOSITORY, | |
38 help="The Chromite repository to query (default is %(default)s).") | |
39 parser.add_argument('-b', '--bug', | |
40 help="Cite this BUG when creating CLs.") | |
41 parser.add_argument('-r', '--reviewer', | |
42 action='append', default=[], | |
43 help="Add this reviewer to the uploaded CL. If no reviewers are " | |
44 "specified, someone from the OWNERS file will be chosen.") | |
45 parser.add_argument('-m', '--commit-message', | |
46 help="Use this commit message instead of an auto-generated one.") | |
47 parser.add_argument('-c', '--commit', | |
48 action='store_true', | |
49 help="Automatically mark generated CLs for commit queue.") | |
50 | |
51 subparsers = parser.add_subparsers(help='CrOS Pin Subcommands') | |
52 | |
53 # Subcommand: update | |
54 subp = subparsers.add_parser('update', | |
55 help=subcommand_update.__doc__) | |
56 subp.add_argument('-t', '--target', | |
57 choices=["existing", "external", "internal", "both"], default='existing', | |
58 help="Specifies which pin repositories to update. 'existing' (default) " | |
59 "updates all existing named pins. 'external', 'internal', and " | |
60 "'both' indicate that the pin should be updated in the external " | |
61 "and/or internal pin repositories, and should be added if not " | |
62 "currently present. Use these with caution!") | |
63 subp.add_argument('name', | |
64 help="The name of the pin to update.") | |
65 subp.add_argument('version', nargs='?', | |
66 help="The new commit hash for the pin. If empty, probe for tip-of-tree " | |
67 "of the branch sharing the pin's name.") | |
68 subp.set_defaults(func=subcommand_update) | |
69 | |
70 # Subcommand: add-release | |
71 subp = subparsers.add_parser('add-release', | |
72 help=subcommand_add_release.__doc__) | |
73 subp.add_argument('--stable-count', metavar='COUNT', | |
74 type=int, default=DEFAULT_STABLE_COUNT, | |
75 help="Specifies the number of stable branches to preserve. (default is " | |
76 "%(default)s). The youngest COUNT release branch pins beyond the " | |
77 "newest will be preserved as stable branches, and any additional " | |
78 "release branches will be removed from the pins.") | |
79 subp.add_argument('branch', | |
80 help='The name of the release branch. Must begin with "release-R#".') | |
81 subp.add_argument('version', nargs='?', | |
82 help="The commit hash for the branch. If empty, use the branch's " | |
83 "tip-of-tree commit.") | |
84 subp.set_defaults(func=subcommand_add_release) | |
85 | |
86 | |
87 @contextlib.contextmanager | |
88 def checkout_for_args(args): | |
ghost stip (do not use)
2015/10/15 20:02:54
why not just make Checkout() a contextmanager itse
dnj
2015/10/15 21:06:41
I want to have code initialize it, and I don't lik
| |
89 """A contextmanager that supplies the Checkout configured in args. | |
90 | |
91 The Checkout's teardown() method will be invoked on cleanup. | |
92 | |
93 Args: | |
94 args (argparse.Options): Parsed option list. | |
95 """ | |
96 c = None | |
97 try: | |
98 c = checkout.Checkout.create( | |
99 path=args.checkout_path) | |
100 LOGGER.debug('Using checkout at: %s', c.path) | |
101 yield c | |
102 finally: | |
103 if c: | |
104 c.teardown() | |
105 | |
106 | |
107 def pinfile_editor_from_args(args, c): | |
108 return pinfile.Editor( | |
109 c, | |
110 chromite_repo=args.chromite_repository, | |
111 validate=not args.no_verify) | |
112 | |
113 | |
114 def logging_verbosity(): | |
115 count = 0 | |
116 if LOGGER.level >= logging.INFO: | |
117 count += 1 | |
118 if LOGGER.level >= logging.DEBUG: | |
119 count += 1 | |
120 return ['-v'] * count | |
121 | |
122 | |
123 def get_release_version(v): | |
124 m = RELEASE_RE.match(v) | |
125 if not m: | |
126 return None | |
127 return int(m.group(1)) | |
128 | |
129 | |
130 def subcommand_update(args): | |
131 """Update a single Chromite pin.""" | |
132 create = (args.target != 'existing') | |
133 target_pins = [] | |
134 if args.target in ('external', 'both', 'existing'): | |
135 target_pins.append(pinfile.EXTERNAL) | |
136 if args.target in ('internal', 'both', 'existing'): | |
137 target_pins.append(pinfile.INTERNAL) | |
138 | |
139 with checkout_for_args(args) as c: | |
140 pfe = pinfile_editor_from_args(args, c) | |
141 tracker = UpdateTracker.from_args(args, c) | |
142 | |
143 for pin in target_pins: | |
144 logging.debug('Updating target pin [%s]', pin) | |
145 | |
146 # Update | |
147 pf = pfe.load(pin) | |
148 update = pf.update(args.name, version=args.version, create=create) | |
149 if not update: | |
150 LOGGER.debug('Did not update pins for [%s]', pin) | |
151 continue | |
152 tracker.add(pin, update) | |
153 | |
154 LOGGER.debug('Updated pin set: %s', update) | |
155 if not tracker: | |
156 LOGGER.error('No pins were updated.') | |
157 return 1 | |
158 | |
159 # Regenerate slave pools for affected masters. | |
160 tracker.update() | |
161 for i in tracker.issues: | |
162 LOGGER.warning('Created Issue: %s', i) | |
163 return 0 | |
164 | |
165 | |
166 def subcommand_add_release(args): | |
167 """Add a new release branch to the list of pins.""" | |
168 with checkout_for_args(args) as c: | |
169 pfe = pinfile_editor_from_args(args, c) | |
170 tracker = UpdateTracker.from_args(args, c) | |
171 | |
172 add_release = (get_release_version(args.branch), args.branch) | |
173 if add_release[0] is None: | |
174 raise ValueError("Invalid release branch: [%s]" % (args.branch,)) | |
175 | |
176 # Build a list of releases and their versions. | |
177 pf = pfe.load(pinfile.INTERNAL) | |
178 releases = [add_release] | |
179 for name, _ in pf.iterpins(): | |
180 v = get_release_version(name) | |
181 if v == add_release[0]: | |
182 LOGGER.error('Release [%s] (%d) is already pinned.', | |
183 add_release[1], add_release[0]) | |
184 return 1 | |
185 | |
186 if v is not None: | |
187 releases.append((v, name)) | |
188 releases.sort(reverse=True) | |
189 | |
190 # Shave off the top [stable_count+1] releases. | |
191 count = args.stable_count+1 | |
192 releases, deleted = releases[:count], releases[count:] | |
193 if add_release not in releases: | |
194 raise ValueError("Updated releases do not include added (%s):\n%s" % ( | |
195 add_release[1], '\n'.join(r[1] for r in releases))) | |
196 | |
197 # Set the new releases. | |
198 tracker.add(pinfile.INTERNAL, pf.update(add_release[1], create=True)) | |
199 for _, r in deleted: | |
200 tracker.add(pinfile.INTERNAL, pf.remove(r)) | |
201 | |
202 if not tracker: | |
203 LOGGER.error('No pins were updated.') | |
204 return 1 | |
205 | |
206 # Regenerate slave pools for affected masters. | |
207 tracker.update() | |
208 LOGGER.warning('Created issues:\n%s', '\n'.join(tracker.issues)) | |
209 return 0 | |
210 | |
211 | |
212 class SlavePoolUpdateError(Exception): | |
213 pass | |
214 | |
215 | |
216 class UpdateTracker(object): | |
217 | |
218 RUNIT_PY = ('build', 'scripts', 'tools', 'runit.py') | |
219 SLAVE_ALLOC_UPDATE = ('build', 'scripts', 'tools', 'slave_alloc_update.py') | |
220 | |
221 RE_ISSUE_CREATED = re.compile(r'^Issue created. URL: (.+)$') | |
222 | |
223 def __init__(self, c, cq=False, bug=None, reviewers=None, dry_run=True): | |
224 self._c = c | |
225 self._cq = cq | |
226 self._bug = bug | |
227 self._reviewers = reviewers | |
228 self._dry_run = dry_run | |
229 | |
230 self._updated = {} | |
231 self._issues = set() | |
232 | |
233 @classmethod | |
234 def from_args(cls, args, c): | |
235 return cls( | |
236 c, | |
237 cq=args.commit, | |
238 bug=args.bug, | |
239 reviewers=args.reviewer, | |
240 dry_run=args.dry_run) | |
241 | |
242 def __nonzero__(self): | |
243 return bool(self._updated) | |
244 | |
245 @property | |
246 def issues(self): | |
247 return sorted(self._issues) | |
248 | |
249 def add(self, pin, update): | |
250 self._updated.setdefault(pin, {})[update.name] = (update.fr, update.to) | |
251 | |
252 def update(self): | |
253 LOGGER.info('Updating repositories: %s', self._updated) | |
254 affected_masters = set() | |
255 for pin in self._updated.iterkeys(): | |
256 affected_masters.update(pin.masters) | |
257 | |
258 failed_slave_pool_masters = [] | |
259 for m in sorted(affected_masters): | |
260 try: | |
261 self._regenerate_slave_pool(m) | |
262 except SlavePoolUpdateError as e: | |
263 failed_slave_pool_masters.append(m) | |
264 if failed_slave_pool_masters: | |
265 LOGGER.error('Failed to update slave pools for %s. You may need to ' | |
266 'add additional slaves the pool(s).', | |
267 failed_slave_pool_masters) | |
268 raise SlavePoolUpdateError("Failed to update slave pools.") | |
269 | |
270 # Upload CLs for the affected repositories. | |
271 for pin, updates in self._updated.iteritems(): | |
272 self._upload_patch( | |
273 self._c.subpath(*pin.base), | |
274 self._generate_commit_message(updates)) | |
275 | |
276 def _regenerate_slave_pool(self, master): | |
277 LOGGER.debug('Regenerating slave pool for: %s', master) | |
278 cmd = [ | |
279 os.path.join(*self.RUNIT_PY), | |
280 os.path.join(*self.SLAVE_ALLOC_UPDATE), | |
281 ] | |
282 cmd += logging_verbosity() | |
283 cmd.append(master) | |
284 | |
285 rv, stdout = execute.call(cmd, cwd=self._c.path) | |
286 if rv != 0: | |
287 LOGGER.exception('Failed to update slaves for master [%s] (%d):\n%s', | |
288 master, rv, stdout) | |
289 raise SlavePoolUpdateError() | |
290 | |
291 | |
292 def _upload_patch(self, repo_path, commit_msg): | |
293 # Check if the Git repository actually has changes. | |
294 diff_args = ['git', 'diff', '--no-ext-diff', '--exit-code'] | |
295 if not LOGGER.isEnabledFor(logging.DEBUG): | |
296 diff_args.append('--quiet') | |
297 rv, diff = execute.call(diff_args, cwd=repo_path) | |
298 LOGGER.debug('Diff for [%s]:\n%s', repo_path, diff) | |
299 if rv == 0: | |
300 LOGGER.warning('No changes in repository; refusing to commit.') | |
301 return | |
302 | |
303 LOGGER.debug('Creating commit in [%s] with message:\n%s', | |
304 repo_path, commit_msg) | |
305 execute.check_call( | |
306 ['git', 'checkout', '-b', '_cros_pin', '--track'], | |
307 cwd=repo_path) | |
308 execute.check_call( | |
309 ['git', 'commit', '--all', '--message', commit_msg], | |
310 cwd=repo_path) | |
311 | |
312 LOGGER.debug('Uploading CL!') | |
313 args = [ | |
314 'git', 'cl', 'upload', | |
315 '--bypass-hooks', # The CQ will take care of them! | |
316 '-t', commit_msg, | |
317 '-m', 'Auto-generated by `%s`' % (__name__,), | |
318 '-f', | |
319 ] | |
320 if self._cq: | |
321 args.append('--use-commit-queue') | |
322 if not self._reviewers: | |
323 args.append('--tbr-owners') | |
324 | |
325 output = execute.check_call(args, cwd=repo_path, dry_run=self._dry_run) | |
326 issue = None | |
327 for line in output.splitlines(): | |
328 match = self.RE_ISSUE_CREATED.match(line) | |
329 if match: | |
330 issue = match.group(1) | |
331 LOGGER.debug('Extracted issue from output: %s', issue) | |
332 self._issues.add(issue) | |
333 break | |
334 else: | |
335 LOGGER.warning("Unable to extract issue from patch submission.") | |
336 | |
337 def _generate_commit_message(self, updates): | |
338 lines = [ | |
339 'CrOS: Update Chromite pin.', | |
340 '', | |
341 'Update ChromeOS Chromite pins.' | |
342 ] | |
343 for name, update in updates.iteritems(): | |
344 if not update: | |
345 continue | |
346 | |
347 fr, to = update | |
348 lines.append('- [%s]' % (name,)) | |
349 if fr: | |
350 if to: | |
351 # Update from one commit to another. | |
352 lines.extend([ | |
353 ' %s =>' % (fr,), | |
354 ' %s' % (to,), | |
355 ]) | |
356 else: | |
357 # Added new pin. | |
358 lines.append(' - Deleted (was %s)' % (fr,)) | |
359 elif to: | |
360 # Deleted a pin. | |
361 lines.append(' - Added => %s' % (to,)) | |
362 lines.append('') | |
363 | |
364 if self._bug: | |
365 lines.append('BUG=%s' % (self._bug,)) | |
366 if self._reviewers: | |
367 lines.append('TBR=%s' % (', '.join(self._reviewers))) | |
368 return '\n'.join(lines) | |
OLD | NEW |