OLD | NEW |
---|---|
(Empty) | |
1 # Copyright 2015 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 import logging | |
6 import os | |
7 import re | |
8 import tempfile | |
9 | |
10 from infra.tools.cros_pin import checkout, execute, pinfile | |
11 from infra.tools.cros_pin.logger import LOGGER | |
12 | |
13 # Ths path of the Chromite repository. | |
14 CHROMITE_REPOSITORY = 'https://chromium.googlesource.com/chromiumos/chromite' | |
15 | |
16 # The number of stable release branches to build in addition to the beta | |
17 # branch. | |
18 DEFAULT_STABLE_COUNT = 2 | |
19 | |
20 # Regular expression to match release branch names. | |
21 RELEASE_RE = re.compile(r'release-R(\d+)-.*') | |
22 | |
23 | |
24 def add_argparse_options(parser): | |
25 parser.add_argument('-d', '--dry-run', | |
26 action='store_true', | |
27 help="Stop short of submitting the CLs.") | |
28 parser.add_argument('-n', '--no-verify', | |
29 action='store_true', | |
30 help="Don't check that the specified pin exists.") | |
31 parser.add_argument('-C', '--checkout-path', metavar='PATH', | |
32 help="If specified, the checkout at PATH will be used instead of a " | |
33 "temporary one. If PATH does not exist, it will be created, and " | |
34 "the checkout will not be cleaned up. This is intended for " | |
35 "debugging.") | |
36 parser.add_argument('--chromite-repository', default=CHROMITE_REPOSITORY, | |
hinoka
2015/10/21 19:49:40
What other ones can we query?
dnj
2015/10/22 21:48:41
Some other CrOS-derivative projects are using CrOS
| |
37 help="The Chromite repository to query (default is %(default)s).") | |
38 parser.add_argument('-b', '--bug', | |
39 help="Cite this BUG when creating CLs.") | |
40 parser.add_argument('-r', '--reviewer', | |
41 action='append', default=[], | |
42 help="Add this reviewer to the uploaded CL. If no reviewers are " | |
43 "specified, someone from the OWNERS file will be chosen.") | |
44 parser.add_argument('-m', '--commit-message', | |
45 help="Use this commit message instead of an auto-generated one.") | |
46 parser.add_argument('-c', '--commit', | |
hinoka
2015/10/21 19:49:40
What about doing this the other way? have a --no-c
dnj
2015/10/22 21:48:40
Done.
| |
47 action='store_true', | |
48 help="Automatically mark generated CLs for commit queue.") | |
49 | |
50 subparsers = parser.add_subparsers(help='CrOS Pin Subcommands') | |
51 | |
52 # Subcommand: update | |
53 subp = subparsers.add_parser('update', | |
54 help=subcommand_update.__doc__) | |
55 subp.add_argument('-t', '--target', | |
56 choices=["existing", "external", "internal", "both"], default='existing', | |
57 help="Specifies which pin repositories to update. 'existing' (default) " | |
58 "updates all existing named pins. 'external', 'internal', and " | |
59 "'both' indicate that the pin should be updated in the external " | |
60 "and/or internal pin repositories, and should be added if not " | |
61 "currently present. Use these with caution!") | |
62 subp.add_argument('name', | |
63 help="The name of the pin to update.") | |
64 subp.add_argument('version', nargs='?', | |
65 help="The new commit hash for the pin. If empty, probe for tip-of-tree " | |
66 "of the branch sharing the pin's name.") | |
67 subp.set_defaults(func=subcommand_update) | |
68 | |
69 # Subcommand: add-release | |
70 subp = subparsers.add_parser('add-release', | |
hinoka
2015/10/21 19:49:40
When does one use this tool?
dnj
2015/10/22 21:48:41
Adding a new CrOS release branch to the CrOS relea
| |
71 help=subcommand_add_release.__doc__) | |
72 subp.add_argument('--stable-count', metavar='COUNT', | |
hinoka
2015/10/21 19:49:40
Under what circumstances does this need to change?
dnj
2015/10/22 21:48:40
It's up to the TPMs, but generally it's only chang
| |
73 type=int, default=DEFAULT_STABLE_COUNT, | |
74 help="Specifies the number of stable branches to preserve. (default is " | |
75 "%(default)s). The youngest COUNT release branch pins beyond the " | |
76 "newest will be preserved as stable branches, and any additional " | |
77 "release branches will be removed from the pins.") | |
78 subp.add_argument('branch', | |
79 help='The name of the release branch. Must begin with "release-R#".') | |
80 subp.add_argument('version', nargs='?', | |
81 help="The commit hash for the branch. If empty, use the branch's " | |
hinoka
2015/10/21 19:49:40
Should this ever not be ToT?
dnj
2015/10/22 21:48:41
Up to the TPMs, but probably will be ToT.
| |
82 "tip-of-tree commit.") | |
83 subp.set_defaults(func=subcommand_add_release) | |
84 | |
85 | |
86 def checkout_for_args(args): | |
87 """A contextmanager that supplies the Checkout configured in args. | |
88 | |
89 The Checkout's teardown() method will be invoked on cleanup. | |
90 | |
91 Args: | |
92 args (argparse.Options): Parsed option list. | |
93 """ | |
94 return checkout.Checkout.use( | |
95 path=args.checkout_path) | |
96 | |
97 def pinfile_editor_from_args(args, c): | |
98 return pinfile.Editor( | |
99 c, | |
100 chromite_repo=args.chromite_repository, | |
101 validate=not args.no_verify) | |
102 | |
103 | |
104 def logging_verbosity(): | |
105 count = 0 | |
106 if LOGGER.level >= logging.INFO: | |
107 count += 1 | |
108 if LOGGER.level >= logging.DEBUG: | |
109 count += 1 | |
110 return ['-v'] * count | |
111 | |
112 | |
113 def get_release_version(v): | |
114 m = RELEASE_RE.match(v) | |
115 if not m: | |
116 return None | |
117 return int(m.group(1)) | |
118 | |
119 | |
120 def subcommand_update(args): | |
121 """Update a single Chromite pin.""" | |
122 create = (args.target != 'existing') | |
123 target_pins = [] | |
124 if args.target in ('external', 'both', 'existing'): | |
125 target_pins.append(pinfile.EXTERNAL) | |
126 if args.target in ('internal', 'both', 'existing'): | |
127 target_pins.append(pinfile.INTERNAL) | |
128 | |
129 with checkout_for_args(args) as c: | |
130 pfe = pinfile_editor_from_args(args, c) | |
131 tracker = UpdateTracker.from_args(args, c) | |
132 | |
133 for pin in target_pins: | |
134 logging.debug('Updating target pin [%s]', pin) | |
135 | |
136 # Update | |
137 pf = pfe.load(pin) | |
138 update = pf.update(args.name, version=args.version, create=create) | |
139 if not update: | |
140 LOGGER.debug('Did not update pins for [%s]', pin) | |
141 continue | |
142 tracker.add(pin, update) | |
143 | |
144 LOGGER.debug('Updated pin set: %s', update) | |
145 if not tracker: | |
146 LOGGER.error('No pins were updated.') | |
147 return 1 | |
148 | |
149 # Regenerate slave pools for affected masters. | |
150 tracker.update() | |
151 for i in tracker.issues: | |
152 LOGGER.warning('Created Issue: %s', i) | |
153 return 0 | |
154 | |
155 | |
156 def subcommand_add_release(args): | |
157 """Add a new release branch to the list of pins.""" | |
158 with checkout_for_args(args) as c: | |
159 pfe = pinfile_editor_from_args(args, c) | |
160 tracker = UpdateTracker.from_args(args, c) | |
161 | |
162 add_release = (get_release_version(args.branch), args.branch) | |
163 if add_release[0] is None: | |
164 raise ValueError("Invalid release branch: [%s]" % (args.branch,)) | |
165 | |
166 # Build a list of releases and their versions. | |
167 pf = pfe.load(pinfile.INTERNAL) | |
168 releases = [add_release] | |
169 for name, _ in pf.iterpins(): | |
170 v = get_release_version(name) | |
171 if v == add_release[0]: | |
172 LOGGER.error('Release [%s] (%d) is already pinned.', | |
173 add_release[1], add_release[0]) | |
174 return 1 | |
175 | |
176 if v is not None: | |
177 releases.append((v, name)) | |
178 releases.sort(reverse=True) | |
179 | |
180 # Shave off the top [stable_count+1] releases. | |
181 count = args.stable_count+1 | |
182 releases, deleted = releases[:count], releases[count:] | |
183 if add_release not in releases: | |
184 raise ValueError("Updated releases do not include added (%s):\n%s" % ( | |
185 add_release[1], '\n'.join(r[1] for r in releases))) | |
186 | |
187 # Set the new releases. | |
188 tracker.add(pinfile.INTERNAL, pf.update(add_release[1], create=True)) | |
189 for _, r in deleted: | |
190 tracker.add(pinfile.INTERNAL, pf.remove(r)) | |
191 | |
192 if not tracker: | |
193 LOGGER.error('No pins were updated.') | |
194 return 1 | |
195 | |
196 # Regenerate slave pools for affected masters. | |
197 tracker.update() | |
198 LOGGER.warning('Created issues:\n%s', '\n'.join(tracker.issues)) | |
199 return 0 | |
200 | |
201 | |
202 class SlavePoolUpdateError(Exception): | |
203 pass | |
204 | |
205 | |
206 class UpdateTracker(object): | |
207 | |
208 RUNIT_PY = ('build', 'scripts', 'tools', 'runit.py') | |
209 SLAVE_ALLOC_UPDATE = ('build', 'scripts', 'tools', 'slave_alloc_update.py') | |
210 | |
211 RE_ISSUE_CREATED = re.compile(r'^Issue created. URL: (.+)$') | |
212 | |
213 def __init__(self, c, cq=False, bug=None, reviewers=None, dry_run=True): | |
214 self._c = c | |
215 self._cq = cq | |
216 self._bug = bug | |
217 self._reviewers = reviewers | |
218 self._dry_run = dry_run | |
219 | |
220 self._updated = {} | |
221 self._issues = set() | |
222 | |
223 @classmethod | |
224 def from_args(cls, args, c): | |
225 return cls( | |
226 c, | |
227 cq=args.commit, | |
228 bug=args.bug, | |
229 reviewers=args.reviewer, | |
230 dry_run=args.dry_run) | |
231 | |
232 def __nonzero__(self): | |
233 return bool(self._updated) | |
234 | |
235 @property | |
236 def issues(self): | |
237 return sorted(self._issues) | |
238 | |
239 def add(self, pin, update): | |
240 self._updated.setdefault(pin, {})[update.name] = (update.fr, update.to) | |
241 | |
242 def update(self): | |
243 LOGGER.info('Updating repositories: %s', self._updated) | |
244 affected_masters = set() | |
245 for pin in self._updated.iterkeys(): | |
246 affected_masters.update(pin.masters) | |
247 | |
248 failed_slave_pool_masters = [] | |
249 for m in sorted(affected_masters): | |
250 try: | |
251 self._regenerate_slave_pool(m) | |
252 except SlavePoolUpdateError: | |
253 failed_slave_pool_masters.append(m) | |
254 if failed_slave_pool_masters: | |
255 LOGGER.error('Failed to update slave pools for %s. You may need to ' | |
256 'add additional slaves the pool(s).', | |
257 failed_slave_pool_masters) | |
258 raise SlavePoolUpdateError("Failed to update slave pools.") | |
259 | |
260 # Upload CLs for the affected repositories. | |
261 for pin, updates in self._updated.iteritems(): | |
262 self._upload_patch( | |
263 self._c.subpath(*pin.base), | |
264 self._generate_commit_message(updates)) | |
265 | |
266 def _regenerate_slave_pool(self, master): | |
267 LOGGER.debug('Regenerating slave pool for: %s', master) | |
268 cmd = [ | |
269 os.path.join(*self.RUNIT_PY), | |
270 os.path.join(*self.SLAVE_ALLOC_UPDATE), | |
271 ] | |
272 cmd += logging_verbosity() | |
273 cmd.append(master) | |
274 | |
275 rv, stdout = execute.call(cmd, cwd=self._c.path) | |
276 if rv != 0: | |
277 LOGGER.exception('Failed to update slaves for master [%s] (%d):\n%s', | |
278 master, rv, stdout) | |
279 raise SlavePoolUpdateError() | |
280 | |
281 | |
282 def _upload_patch(self, repo_path, commit_msg): | |
283 # Check if the Git repository actually has changes. | |
284 diff_args = ['git', 'diff', '--no-ext-diff', '--exit-code'] | |
285 if not LOGGER.isEnabledFor(logging.DEBUG): | |
286 diff_args.append('--quiet') | |
287 rv, diff = execute.call(diff_args, cwd=repo_path) | |
288 LOGGER.debug('Diff for [%s]:\n%s', repo_path, diff) | |
289 if rv == 0: | |
290 LOGGER.warning('No changes in repository; refusing to commit.') | |
291 return | |
292 | |
293 LOGGER.debug('Creating commit in [%s] with message:\n%s', | |
294 repo_path, commit_msg) | |
295 execute.check_call( | |
296 ['git', 'checkout', '-b', '_cros_pin', '--track'], | |
297 cwd=repo_path) | |
298 execute.check_call( | |
299 ['git', 'commit', '--all', '--message', commit_msg], | |
300 cwd=repo_path) | |
301 | |
302 LOGGER.debug('Uploading CL!') | |
303 args = [ | |
304 'git', 'cl', 'upload', | |
305 '--bypass-hooks', # The CQ will take care of them! | |
306 '-t', commit_msg, | |
307 '-m', 'Auto-generated by `%s`' % (__name__,), | |
308 '-f', | |
309 ] | |
310 if self._cq: | |
311 args.append('--use-commit-queue') | |
312 if not self._reviewers: | |
313 args.append('--tbr-owners') | |
314 | |
315 output = execute.check_call(args, cwd=repo_path, dry_run=self._dry_run) | |
316 issue = None | |
317 for line in output.splitlines(): | |
318 match = self.RE_ISSUE_CREATED.match(line) | |
319 if match: | |
320 issue = match.group(1) | |
321 LOGGER.debug('Extracted issue from output: %s', issue) | |
322 self._issues.add(issue) | |
323 break | |
324 else: | |
325 LOGGER.warning("Unable to extract issue from patch submission.") | |
326 | |
327 def _generate_commit_message(self, updates): | |
328 lines = [ | |
329 'CrOS: Update Chromite pin.', | |
330 '', | |
331 'Update ChromeOS Chromite pins.' | |
332 ] | |
333 for name, update in updates.iteritems(): | |
334 if not update: | |
335 continue | |
336 | |
337 fr, to = update | |
338 lines.append('- [%s]' % (name,)) | |
339 if fr: | |
340 if to: | |
341 # Update from one commit to another. | |
342 lines.extend([ | |
343 ' %s =>' % (fr,), | |
344 ' %s' % (to,), | |
345 ]) | |
346 else: | |
347 # Added new pin. | |
348 lines.append(' - Deleted (was %s)' % (fr,)) | |
349 elif to: | |
350 # Deleted a pin. | |
351 lines.append(' - Added => %s' % (to,)) | |
352 lines.append('') | |
353 | |
354 if self._bug: | |
355 lines.append('BUG=%s' % (self._bug,)) | |
356 if self._reviewers: | |
357 lines.append('TBR=%s' % (', '.join(self._reviewers))) | |
358 return '\n'.join(lines) | |
OLD | NEW |