OLD | NEW |
1 # coding=utf8 | 1 # coding=utf8 |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 """Commit queue manager class. | 5 """Commit queue manager class. |
6 | 6 |
7 Security implications: | 7 Security implications: |
8 | 8 |
9 The following hypothesis are made: | 9 The following hypothesis are made: |
10 - Commit queue: | 10 - Commit queue: |
(...skipping 28 matching lines...) Expand all Loading... |
39 description = unicode | 39 description = unicode |
40 files = list | 40 files = list |
41 # Only a cache, these values can be regenerated. | 41 # Only a cache, these values can be regenerated. |
42 owner = unicode | 42 owner = unicode |
43 reviewers = list | 43 reviewers = list |
44 base_url = unicode | 44 base_url = unicode |
45 messages = list | 45 messages = list |
46 relpath = unicode | 46 relpath = unicode |
47 # Only used after a patch was committed. Keeping here for try job retries. | 47 # Only used after a patch was committed. Keeping here for try job retries. |
48 revision = (None, int, unicode) | 48 revision = (None, int, unicode) |
| 49 revert = bool |
| 50 reverted_by = unicode |
49 | 51 |
50 def __init__(self, **kwargs): | 52 def __init__(self, **kwargs): |
51 super(PendingCommit, self).__init__(**kwargs) | 53 super(PendingCommit, self).__init__(**kwargs) |
52 for message in self.messages: | 54 for message in self.messages: |
53 # Save storage, no verifier really need 'text', just 'approval'. | 55 # Save storage, no verifier really need 'text', just 'approval'. |
54 if 'text' in message: | 56 if 'text' in message: |
55 del message['text'] | 57 del message['text'] |
56 | 58 |
57 def pending_name(self): | 59 def pending_name(self): |
58 """The name that should be used for try jobs. | 60 """The name that should be used for try jobs. |
(...skipping 16 matching lines...) Expand all Loading... |
75 patches = context_obj.rietveld.get_patch(self.issue, self.patchset) | 77 patches = context_obj.rietveld.get_patch(self.issue, self.patchset) |
76 if not patches: | 78 if not patches: |
77 raise base.DiscardPending( | 79 raise base.DiscardPending( |
78 self, 'No diff was found for this patchset.') | 80 self, 'No diff was found for this patchset.') |
79 if self.relpath: | 81 if self.relpath: |
80 patches.set_relpath(self.relpath) | 82 patches.set_relpath(self.relpath) |
81 self.files = [p.filename for p in patches] | 83 self.files = [p.filename for p in patches] |
82 if not self.files: | 84 if not self.files: |
83 raise base.DiscardPending( | 85 raise base.DiscardPending( |
84 self, 'No file was found in this patchset.') | 86 self, 'No file was found in this patchset.') |
85 context_obj.checkout.apply_patch(patches) | 87 context_obj.checkout.apply_patch(patches, revert=self.revert) |
86 except (checkout.PatchApplicationFailed, patch.UnsupportedPatchFormat) as e: | 88 except (checkout.PatchApplicationFailed, patch.UnsupportedPatchFormat) as e: |
87 raise base.DiscardPending(self, str(e)) | 89 raise base.DiscardPending(self, str(e)) |
88 except subprocess2.CalledProcessError as e: | 90 except subprocess2.CalledProcessError as e: |
89 out = 'Failed to apply the patch.' | 91 out = 'Failed to apply the patch.' |
90 if e.stdout: | 92 if e.stdout: |
91 out += '\n%s' % e.stdout | 93 out += '\n%s' % e.stdout |
92 raise base.DiscardPending(self, out) | 94 raise base.DiscardPending(self, out) |
93 except urllib2.HTTPError as e: | 95 except urllib2.HTTPError as e: |
94 raise base.DiscardPending( | 96 raise base.DiscardPending( |
95 self, | 97 self, |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
138 DESCRIPTION_UPDATED = ( | 140 DESCRIPTION_UPDATED = ( |
139 'Commit queue rejected this change because the description was changed\n' | 141 'Commit queue rejected this change because the description was changed\n' |
140 'between the time the change entered the commit queue and the time it\n' | 142 'between the time the change entered the commit queue and the time it\n' |
141 'was ready to commit. You can safely check the commit box again.') | 143 'was ready to commit. You can safely check the commit box again.') |
142 TRYING_PATCH = 'CQ is trying da patch. Follow status at\n' | 144 TRYING_PATCH = 'CQ is trying da patch. Follow status at\n' |
143 # Maximum number of commits done in a burst. | 145 # Maximum number of commits done in a burst. |
144 MAX_COMMIT_BURST = 4 | 146 MAX_COMMIT_BURST = 4 |
145 # Delay (secs) between commit bursts. | 147 # Delay (secs) between commit bursts. |
146 COMMIT_BURST_DELAY = 10*60 | 148 COMMIT_BURST_DELAY = 10*60 |
147 | 149 |
148 def __init__(self, context_obj, pre_patch_verifiers, verifiers): | 150 def __init__(self, context_obj, pre_patch_verifiers, verifiers, |
| 151 pre_patch_revert_verifiers=None, revert_verifiers=None): |
149 """ | 152 """ |
150 Args: | 153 Args: |
151 pre_patch_verifiers: Verifiers objects that are run before applying the | 154 pre_patch_verifiers: Verifiers objects that are run before applying the |
152 patch. | 155 patch. |
153 verifiers: Verifiers object run after applying the patch. | 156 verifiers: Verifiers object run after applying the patch. |
| 157 pre_patch_revert_verifiers: Verifiers objects that are run before applying |
| 158 the patch for reverts. |
| 159 revert_verifiers: Verifiers object run after applying the patch for |
| 160 reverts. |
154 """ | 161 """ |
| 162 # Atleast one commit verifier must be provided. Revert verifiers are not |
| 163 # compulsory. |
155 assert len(pre_patch_verifiers) or len(verifiers) | 164 assert len(pre_patch_verifiers) or len(verifiers) |
156 self.context = context_obj | 165 self.context = context_obj |
| 166 |
157 self.pre_patch_verifiers = pre_patch_verifiers or [] | 167 self.pre_patch_verifiers = pre_patch_verifiers or [] |
158 self.verifiers = verifiers or [] | 168 self.verifiers = verifiers or [] |
159 self.all_verifiers = pre_patch_verifiers + verifiers | 169 self.all_verifiers = pre_patch_verifiers + verifiers |
| 170 |
| 171 self.pre_patch_revert_verifiers = pre_patch_revert_verifiers or [] |
| 172 self.revert_verifiers = revert_verifiers or [] |
| 173 self.all_revert_verifiers = ( |
| 174 self.pre_patch_revert_verifiers + self.revert_verifiers) |
| 175 |
160 self.queue = PendingQueue() | 176 self.queue = PendingQueue() |
161 # Keep the timestamps of the last few commits so that we can control the | 177 # Keep the timestamps of the last few commits so that we can control the |
162 # pace (burstiness) of commits. | 178 # pace (burstiness) of commits. |
163 self.recent_commit_timestamps = [] | 179 self.recent_commit_timestamps = [] |
164 # Assert names are unique. | 180 # Assert names are unique. |
165 names = [x.name for x in pre_patch_verifiers + verifiers] | 181 names = [x.name for x in pre_patch_verifiers + verifiers] |
| 182 revert_names = [x.name for x in self.pre_patch_revert_verifiers + |
| 183 self.revert_verifiers] |
166 assert len(names) == len(set(names)) | 184 assert len(names) == len(set(names)) |
167 for verifier in self.pre_patch_verifiers: | 185 assert len(revert_names) == len(set(revert_names)) |
| 186 for verifier in self.pre_patch_verifiers + self.pre_patch_revert_verifiers: |
168 assert not isinstance(verifier, base.VerifierCheckout) | 187 assert not isinstance(verifier, base.VerifierCheckout) |
169 | 188 |
170 def look_for_new_pending_commit(self): | 189 def look_for_new_pending_commit(self): |
171 """Looks for new reviews on self.context.rietveld with c+ set. | 190 """Looks for new reviews on self.context.rietveld with c+ set. |
172 | 191 |
173 Calls _new_pending_commit() on all new review found. | 192 Calls _new_pending_commit() on all new review found. |
174 """ | 193 """ |
175 try: | 194 try: |
176 new_issues = self._fetch_pending_issues() | 195 new_issues = self._fetch_pending_issues() |
| 196 if self.context.check_for_reverts: |
| 197 # New issues consist of both regular commits and reverts if the check |
| 198 # for reverts flag is enabled. |
| 199 new_issues.extend(self._fetch_pending_reverts()) |
177 | 200 |
178 # If there is an issue in processed_issues that is not in new_issues, | 201 # If there is an issue in processed_issues that is not in new_issues, |
179 # discard it. | 202 # discard it. |
180 for pending in self.queue.iterate(): | 203 for pending in self.queue.iterate(): |
181 # Note that pending.issue is a int but self.queue.pending_commits keys | 204 # Note that pending.issue is a int but self.queue.pending_commits keys |
182 # are str due to json support. | 205 # are str due to json support. |
183 if pending.issue not in new_issues: | 206 if pending.issue not in new_issues: |
184 logging.info('Flushing issue %d' % pending.issue) | 207 logging.info('Flushing issue %d' % pending.issue) |
185 self.context.status.send( | 208 self.context.status.send( |
186 pending, | 209 pending, |
187 { 'verification': 'abort', | 210 { 'verification': 'abort', |
188 'payload': { | 211 'payload': { |
189 'output': 'CQ bit was unchecked on CL. Ignoring.' }}) | 212 'output': 'CQ or Revert bit was unchecked on CL. Ignoring.' } |
| 213 } |
| 214 ) |
190 pending.get_state = lambda: base.IGNORED | 215 pending.get_state = lambda: base.IGNORED |
191 self._discard_pending(pending, None) | 216 self._discard_pending(pending, None) |
192 | 217 |
193 # Find new issues. | 218 # Find new issues. |
194 for issue_id in new_issues: | 219 for issue_id in new_issues: |
195 if str(issue_id) not in self.queue.pending_commits: | 220 if str(issue_id) not in self.queue.pending_commits: |
196 issue_data = self.context.rietveld.get_issue_properties( | 221 issue_data = self.context.rietveld.get_issue_properties( |
197 issue_id, True) | 222 issue_id, True) |
198 # This assumption needs to hold. | 223 # This assumption needs to hold. |
199 assert issue_id == issue_data['issue'] | 224 assert issue_id == issue_data['issue'] |
200 if issue_data['patchsets'] and issue_data['commit']: | 225 if issue_data['patchsets'] and ( |
| 226 issue_data['commit'] or issue_data['revert']): |
201 logging.info('Found new issue %d' % issue_id) | 227 logging.info('Found new issue %d' % issue_id) |
| 228 |
| 229 reverted_by = issue_data.get('reverted_by') |
| 230 if not reverted_by: |
| 231 reverted_by = '' |
| 232 |
202 self.queue.add( | 233 self.queue.add( |
203 PendingCommit( | 234 PendingCommit( |
204 issue=issue_id, | 235 issue=issue_id, |
205 owner=issue_data['owner_email'], | 236 owner=issue_data['owner_email'], |
206 reviewers=issue_data['reviewers'], | 237 reviewers=issue_data['reviewers'], |
207 patchset=issue_data['patchsets'][-1], | 238 patchset=issue_data['patchsets'][-1], |
208 base_url=issue_data['base_url'], | 239 base_url=issue_data['base_url'], |
209 description=issue_data['description'].replace('\r', ''), | 240 description=issue_data['description'].replace('\r', ''), |
210 messages=issue_data['messages'])) | 241 messages=issue_data['messages'], |
| 242 revert=issue_data.get('revert', False), |
| 243 reverted_by=reverted_by)) |
211 except Exception as e: | 244 except Exception as e: |
212 traceback.print_exc() | 245 traceback.print_exc() |
213 # Swallow every exception in that code and move on. Make sure to send a | 246 # Swallow every exception in that code and move on. Make sure to send a |
214 # stack trace though. | 247 # stack trace though. |
215 errors.send_stack(e) | 248 errors.send_stack(e) |
216 | 249 |
217 def _fetch_pending_issues(self): | 250 def _fetch_pending_issues(self): |
218 """Returns the list of issue number for reviews on Rietveld with their last | 251 """Returns the list of issue number for reviews on Rietveld with their last |
219 patchset with commit+ flag set. | 252 patchset with commit+ flag set. |
220 """ | 253 """ |
221 return self.context.rietveld.get_pending_issues() | 254 return self.context.rietveld.get_pending_issues() |
222 | 255 |
| 256 def _fetch_pending_reverts(self): |
| 257 """Returns the list of issue number for reviews on Rietveld with their last |
| 258 patchset with revert+ flag set.""" |
| 259 return self.context.rietveld.get_revert_issues() |
| 260 |
223 def process_new_pending_commit(self): | 261 def process_new_pending_commit(self): |
224 """Starts verification on newly found pending commits.""" | 262 """Starts verification on newly found pending commits.""" |
225 expected = set(i.name for i in self.all_verifiers) | 263 expected = set(i.name for i in self.all_verifiers) |
226 for pending in self.queue.iterate(): | 264 for pending in self.queue.iterate(): |
227 try: | 265 try: |
228 # Take in account the case where a verifier was removed. | 266 # Take in account the case where a verifier was removed. |
229 done = set(pending.verifications.keys()) | 267 done = set(pending.verifications.keys()) |
230 missing = expected - done | 268 missing = expected - done |
231 if (not missing or pending.get_state() != base.PROCESSING): | 269 if (not missing or pending.get_state() != base.PROCESSING): |
232 continue | 270 continue |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
299 # When state is IGNORED, we need to keep this issue so it's not fetched | 337 # When state is IGNORED, we need to keep this issue so it's not fetched |
300 # another time but we can't discard it since we don't want to remove the | 338 # another time but we can't discard it since we don't want to remove the |
301 # commit bit for another project hosted on the same code review | 339 # commit bit for another project hosted on the same code review |
302 # instance. | 340 # instance. |
303 assert state in (base.PROCESSING, base.IGNORED) | 341 assert state in (base.PROCESSING, base.IGNORED) |
304 | 342 |
305 def _verify_pending(self, pending): | 343 def _verify_pending(self, pending): |
306 """Initiates all the verifiers on a pending change.""" | 344 """Initiates all the verifiers on a pending change.""" |
307 # Do not apply the patch if not necessary. It will be applied at commit | 345 # Do not apply the patch if not necessary. It will be applied at commit |
308 # time anyway so if the patch doesn't apply, it'll be catch later. | 346 # time anyway so if the patch doesn't apply, it'll be catch later. |
309 if not self._pending_run_verifiers(pending, self.pre_patch_verifiers): | 347 if not self._pending_run_verifiers( |
| 348 pending, |
| 349 self.pre_patch_revert_verifiers if pending.revert else |
| 350 self.pre_patch_verifiers): |
310 return | 351 return |
311 | 352 |
312 if self.verifiers: | 353 if self.verifiers or self.revert_verifiers: |
313 pending.prepare_for_patch(self.context) | 354 pending.prepare_for_patch(self.context) |
314 | 355 |
315 # This CL is real business, alert the user that we're going to try his | 356 # This CL is real business, alert the user that we're going to try his |
316 # patch. Note that this is done *after* syncing but *before* applying the | 357 # patch. Note that this is done *after* syncing but *before* applying the |
317 # patch. | 358 # patch. |
318 self.context.status.send( | 359 self.context.status.send( |
319 pending, | 360 pending, |
320 { 'verification': 'initial', | 361 { 'verification': 'initial', |
321 'payload': {'revision': pending.revision}}) | 362 'payload': {'revision': pending.revision}}) |
322 self.context.rietveld.add_comment( | 363 self.context.rietveld.add_comment( |
323 pending.issue, | 364 pending.issue, |
324 self.TRYING_PATCH + '%s/%s/%d/%d\n' % ( | 365 self.TRYING_PATCH + '%s/%s/%d/%d\n' % ( |
325 self.context.status.url, pending.owner, | 366 self.context.status.url, pending.owner, |
326 pending.issue, pending.patchset)) | 367 pending.issue, pending.patchset)) |
327 | 368 |
328 if self.verifiers: | 369 if self.verifiers or self.revert_verifiers: |
329 pending.apply_patch(self.context, False) | 370 pending.apply_patch(self.context, False) |
330 previous_cwd = os.getcwd() | 371 previous_cwd = os.getcwd() |
331 try: | 372 try: |
332 os.chdir(self.context.checkout.project_path) | 373 os.chdir(self.context.checkout.project_path) |
333 self._pending_run_verifiers(pending, self.verifiers) | 374 self._pending_run_verifiers( |
| 375 pending, |
| 376 self.revert_verifiers if pending.revert else self.verifiers) |
334 finally: | 377 finally: |
335 os.chdir(previous_cwd) | 378 os.chdir(previous_cwd) |
336 | 379 |
337 # Send the initial 'why not' message. | 380 # Send the initial 'why not' message. |
338 if pending.why_not(): | 381 if pending.why_not(): |
339 self.context.status.send( | 382 self.context.status.send( |
340 pending, | 383 pending, |
341 {'verification': 'why not', | 384 {'verification': 'why not', |
342 'payload': {'message': pending.why_not()}}) | 385 'payload': {'message': pending.why_not()}}) |
343 | 386 |
(...skipping 21 matching lines...) Expand all Loading... |
365 if pending.get_state() == base.FAILED: | 408 if pending.get_state() == base.FAILED: |
366 # Throw if it didn't pass, so the error message is not lost. | 409 # Throw if it didn't pass, so the error message is not lost. |
367 raise base.DiscardPending( | 410 raise base.DiscardPending( |
368 pending, pending.error_message() or cls.FAILED_NO_MESSAGE) | 411 pending, pending.error_message() or cls.FAILED_NO_MESSAGE) |
369 return True | 412 return True |
370 | 413 |
371 def _last_minute_checks(self, pending): | 414 def _last_minute_checks(self, pending): |
372 """Does last minute checks on Rietvld before committing a pending patch.""" | 415 """Does last minute checks on Rietvld before committing a pending patch.""" |
373 pending_data = self.context.rietveld.get_issue_properties( | 416 pending_data = self.context.rietveld.get_issue_properties( |
374 pending.issue, True) | 417 pending.issue, True) |
375 if pending_data['commit'] != True: | 418 if pending.revert: |
376 raise base.DiscardPending(pending, None) | 419 # For revert requests check that revert bit is checked and that the issue |
377 if pending_data['closed'] != False: | 420 # is closed. |
378 raise base.DiscardPending(pending, None) | 421 if pending_data['revert'] != True: |
| 422 raise base.DiscardPending(pending, None) |
| 423 if pending_data['closed'] != True: |
| 424 raise base.DiscardPending(pending, None) |
| 425 else: |
| 426 # For regular commit requests check that the commit bit is checked and |
| 427 # that the issue is not closed. |
| 428 if pending_data['commit'] != True: |
| 429 raise base.DiscardPending(pending, None) |
| 430 if pending_data['closed'] != False: |
| 431 raise base.DiscardPending(pending, None) |
379 if pending.description != pending_data['description'].replace('\r', ''): | 432 if pending.description != pending_data['description'].replace('\r', ''): |
380 raise base.DiscardPending(pending, self.DESCRIPTION_UPDATED) | 433 raise base.DiscardPending(pending, self.DESCRIPTION_UPDATED) |
381 commit_user = set([self.context.rietveld.email]) | 434 commit_user = set([self.context.rietveld.email]) |
382 expected = set(pending.reviewers) - commit_user | 435 expected = set(pending.reviewers) - commit_user |
383 actual = set(pending_data['reviewers']) - commit_user | 436 actual = set(pending_data['reviewers']) - commit_user |
384 # Try to be nice, if there was a drive-by review and the new reviewer left | 437 # Try to be nice, if there was a drive-by review and the new reviewer left |
385 # a lgtm, don't abort. | 438 # a lgtm, don't abort. |
386 def is_approver(r): | 439 def is_approver(r): |
387 return any( | 440 return any( |
388 m.get('approval') for m in pending_data['messages'] | 441 m.get('approval') for m in pending_data['messages'] |
389 if m['sender'] == r) | 442 if m['sender'] == r) |
390 drivers_by = [r for r in (actual - expected) if not is_approver(r)] | 443 drivers_by = [r for r in (actual - expected) if not is_approver(r)] |
391 if drivers_by: | 444 if drivers_by: |
392 # That annoying driver-by. | 445 # That annoying driver-by. |
393 raise base.DiscardPending( | 446 raise base.DiscardPending( |
394 pending, | 447 pending, |
395 'List of reviewers changed. %s did a drive-by without LGTM\'ing!' % | 448 'List of reviewers changed. %s did a drive-by without LGTM\'ing!' % |
396 ','.join(drivers_by)) | 449 ','.join(drivers_by)) |
397 if pending.patchset != pending_data['patchsets'][-1]: | 450 if pending.patchset != pending_data['patchsets'][-1]: |
398 raise base.DiscardPending(pending, | 451 raise base.DiscardPending(pending, |
399 'Commit queue failed due to new patchset.') | 452 'Commit queue failed due to new patchset.') |
400 | 453 |
401 def _discard_pending(self, pending, message): | 454 def _discard_pending(self, pending, message): |
402 """Discards a pending commit. Attach an optional message to the review.""" | 455 """Discards a pending commit. Attach an optional message to the review.""" |
403 logging.debug('_discard_pending(%s, %s)', pending.issue, message) | 456 logging.debug('_discard_pending(%s, %s)', pending.issue, message) |
404 try: | 457 try: |
405 try: | 458 try: |
406 if pending.get_state() != base.IGNORED: | 459 if pending.get_state() != base.IGNORED: |
407 self.context.rietveld.set_flag( | 460 # Flip revert bit to False if it was a revert request else flip the |
408 pending.issue, pending.patchset, 'commit', 'False') | 461 # commit bit to False. |
| 462 if pending.revert: |
| 463 self.context.rietveld.set_flag( |
| 464 pending.issue, pending.patchset, 'revert', 'False') |
| 465 else: |
| 466 self.context.rietveld.set_flag( |
| 467 pending.issue, pending.patchset, 'commit', 'False') |
409 except urllib2.HTTPError as e: | 468 except urllib2.HTTPError as e: |
410 logging.error( | 469 logging.error( |
411 'Failed to set the flag to False for %s with message %s' % ( | 470 'Failed to set the flag to False for %s with message %s' % ( |
412 pending.pending_name(), message)) | 471 pending.pending_name(), message)) |
413 traceback.print_stack() | 472 traceback.print_stack() |
414 errors.send_stack(e) | 473 errors.send_stack(e) |
415 if message: | 474 if message: |
416 try: | 475 try: |
417 self.context.rietveld.add_comment(pending.issue, message) | 476 self.context.rietveld.add_comment(pending.issue, message) |
418 except urllib2.HTTPError as e: | 477 except urllib2.HTTPError as e: |
(...skipping 23 matching lines...) Expand all Loading... |
442 pending.apply_patch(self.context, True) | 501 pending.apply_patch(self.context, True) |
443 # Commit it. | 502 # Commit it. |
444 commit_desc = git_cl.ChangeDescription(pending.description) | 503 commit_desc = git_cl.ChangeDescription(pending.description) |
445 if (self.context.server_hooks_missing and | 504 if (self.context.server_hooks_missing and |
446 self.context.rietveld.email != pending.owner): | 505 self.context.rietveld.email != pending.owner): |
447 commit_desc.update_reviewers(pending.reviewers) | 506 commit_desc.update_reviewers(pending.reviewers) |
448 commit_desc.append_footer('Author: ' + pending.owner) | 507 commit_desc.append_footer('Author: ' + pending.owner) |
449 commit_desc.append_footer('Review URL: %s/%s' % ( | 508 commit_desc.append_footer('Review URL: %s/%s' % ( |
450 self.context.rietveld.url, | 509 self.context.rietveld.url, |
451 pending.issue)) | 510 pending.issue)) |
| 511 if pending.revert: |
| 512 commit_desc.update_as_reverted() |
452 pending.revision = self.context.checkout.commit( | 513 pending.revision = self.context.checkout.commit( |
453 commit_desc.description, pending.owner) | 514 commit_desc.description, pending.owner) |
454 if not pending.revision: | 515 if not pending.revision: |
455 raise base.DiscardPending(pending, 'Failed to commit patch.') | 516 raise base.DiscardPending(pending, 'Failed to commit patch.') |
456 | 517 |
457 # Note that the commit succeeded for commit throttling. | 518 # Note that the commit succeeded for commit throttling. |
458 self.recent_commit_timestamps.append(time.time()) | 519 self.recent_commit_timestamps.append(time.time()) |
459 self.recent_commit_timestamps = ( | 520 self.recent_commit_timestamps = ( |
460 self.recent_commit_timestamps[-(self.MAX_COMMIT_BURST + 1):]) | 521 self.recent_commit_timestamps[-(self.MAX_COMMIT_BURST + 1):]) |
461 | 522 |
462 viewvc_url = self.context.checkout.get_settings('VIEW_VC') | 523 viewvc_url = self.context.checkout.get_settings('VIEW_VC') |
463 issue_desc = git_cl.ChangeDescription(pending.description) | 524 issue_desc = git_cl.ChangeDescription(pending.description) |
464 msg = 'Committed: %s' % pending.revision | 525 msg = 'Committed: %s' % pending.revision |
465 if viewvc_url: | 526 if viewvc_url: |
466 viewvc_url = '%s%s' % (viewvc_url.rstrip('/'), pending.revision) | 527 if 'googlesource' not in viewvc_url: |
467 msg = 'Committed: %s' % viewvc_url | 528 viewvc_url = viewvc_url.rstrip('/') |
| 529 viewvc_url = '%s%s' % (viewvc_url, pending.revision) |
| 530 if pending.revert: |
| 531 msg = 'Reverted: %s' % viewvc_url |
| 532 else: |
| 533 msg = 'Committed: %s' % viewvc_url |
468 issue_desc.append_footer(msg) | 534 issue_desc.append_footer(msg) |
469 | 535 |
470 # Update the CQ dashboard. | 536 # Update the CQ dashboard. |
471 self.context.status.send( | 537 self.context.status.send( |
472 pending, | 538 pending, |
473 { 'verification': 'commit', | 539 { 'verification': 'commit', |
474 'payload': { | 540 'payload': { |
475 'revision': pending.revision, | 541 'revision': pending.revision, |
476 'output': msg, | 542 'output': msg, |
477 'url': viewvc_url}}) | 543 'url': viewvc_url}}) |
478 | 544 |
479 # Closes the issue on Rietveld. | 545 # Closes the issue on Rietveld. |
480 # TODO(csharp): Retry if exceptions are encountered. | 546 # TODO(csharp): Retry if exceptions are encountered. |
481 try: | 547 try: |
482 self.context.rietveld.close_issue(pending.issue) | 548 if pending.revert: |
| 549 self.context.rietveld.open_issue(pending.issue) |
| 550 else: |
| 551 self.context.rietveld.close_issue(pending.issue) |
483 self.context.rietveld.update_description( | 552 self.context.rietveld.update_description( |
484 pending.issue, issue_desc.description) | 553 pending.issue, issue_desc.description) |
485 self.context.rietveld.add_comment( | 554 if pending.revert: |
486 pending.issue, 'Change committed as %s' % pending.revision) | 555 self.context.rietveld.add_comment( |
| 556 pending.issue, 'Change reverted as %s' % pending.revision) |
| 557 else: |
| 558 self.context.rietveld.add_comment( |
| 559 pending.issue, 'Change committed as %s' % pending.revision) |
487 except (urllib2.HTTPError, urllib2.URLError) as e: | 560 except (urllib2.HTTPError, urllib2.URLError) as e: |
488 # Ignore AppEngine flakiness. | 561 # Ignore AppEngine flakiness. |
489 logging.warning('Unable to fully close the issue') | 562 logging.warning('Unable to fully close the issue') |
490 # And finally remove the issue. If the close_issue() call above failed, | 563 # And finally remove the issue. If the close_issue() call above failed, |
491 # it is possible the dashboard will be confused but it is harmless. | 564 # it is possible the dashboard will be confused but it is harmless. |
492 try: | 565 try: |
493 self.queue.get(pending.issue) | 566 self.queue.get(pending.issue) |
494 except KeyError: | 567 except KeyError: |
495 logging.error('Internal inconsistency for %d', pending.issue) | 568 logging.error('Internal inconsistency for %d', pending.issue) |
496 self.queue.remove(pending.issue) | 569 self.queue.remove(pending.issue) |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
542 """Loads the commit queue state from a JSON file.""" | 615 """Loads the commit queue state from a JSON file.""" |
543 self.queue = model.load_from_json_file(filename) | 616 self.queue = model.load_from_json_file(filename) |
544 | 617 |
545 def save(self, filename): | 618 def save(self, filename): |
546 """Save the commit queue state in a simple JSON file.""" | 619 """Save the commit queue state in a simple JSON file.""" |
547 model.save_to_json_file(filename, self.queue) | 620 model.save_to_json_file(filename, self.queue) |
548 | 621 |
549 def close(self): | 622 def close(self): |
550 """Close all the active pending manager items.""" | 623 """Close all the active pending manager items.""" |
551 self.context.status.close() | 624 self.context.status.close() |
OLD | NEW |