OLD | NEW |
(Empty) | |
| 1 # buildlogparse.py: Proxy and rendering layer for build.chromium.org. |
| 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. |
| 5 |
| 6 import jinja2 |
| 7 import json |
| 8 import logging |
| 9 import os |
| 10 import re |
| 11 import time |
| 12 import urlparse |
| 13 import webapp2 |
| 14 import zlib |
| 15 |
| 16 from google.appengine.api import urlfetch |
| 17 from google.appengine.ext import db |
| 18 |
| 19 import utils |
| 20 |
| 21 |
| 22 VERSION_ID = os.environ['CURRENT_VERSION_ID'] |
| 23 |
| 24 jinja_environment = jinja2.Environment( |
| 25 loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), |
| 26 'templates')), |
| 27 autoescape=True, |
| 28 extensions=['jinja2.ext.autoescape']) |
| 29 jinja_environment.filters['delta_time'] = utils.delta_time |
| 30 jinja_environment.filters['nl2br'] = utils.nl2br |
| 31 jinja_environment.filters['time_since'] = utils.time_since |
| 32 jinja_environment.filters['rot13_email'] = utils.rot13_email |
| 33 jinja_environment.filters['cl_comment'] = utils.cl_comment |
| 34 |
| 35 if os.environ.get('HTTP_HOST'): |
| 36 APP_URL = os.environ['HTTP_HOST'] |
| 37 else: |
| 38 APP_URL = os.environ['SERVER_NAME'] |
| 39 |
| 40 # Note: All of these replacements occur AFTER jinja autoescape. |
| 41 # This way we can add <html> tags in the replacements, but do note that spaces |
| 42 # are . |
| 43 REPLACEMENTS = [ |
| 44 # Find ../../scripts/.../*.py scripts and add links to them. |
| 45 (r'\.\./\.\./\.\./scripts/(.*)\.py', |
| 46 r'<a href="https://code.google.com/p/chromium/codesearch#chromium/tools/' |
| 47 r'build/scripts/\1.py">../../scripts/\1.py</a>'), |
| 48 |
| 49 # Find ../../chrome/.../*.cc files and add links to them. |
| 50 (r'\.\./\.\./chrome/(.*)\.cc:(\d+)', |
| 51 r'<a href="https://code.google.com/p/chromium/codesearch#chromium/src/' |
| 52 r'chrome/\1.cc&l=\2">../../chrome/\1.cc:\2</a>'), |
| 53 |
| 54 # Searches for codereview issue numbers, and add codereview links. |
| 55 (r'apply_issue(.*)-i (\d{8})(.*)-s (.*)', |
| 56 r'apply_issue\1-i <a href="\4/\2">\2</a>\3-s \4'), |
| 57 |
| 58 # Add green labels to PASSED or OK items. |
| 59 (r'\[(( PASSED )|' |
| 60 r'( OK ))\]', |
| 61 r'<span class="label label-success">[\1]</span>'), |
| 62 |
| 63 # Add red labels to FAILED items. |
| 64 (r'\[( FAILED )\]', |
| 65 r'<span class="label label-important">[\1]</span>'), |
| 66 |
| 67 # Add black labels ot RUN items. |
| 68 (r'\[( RUN )\]', |
| 69 r'<span class="label label-inverse">[\1]</span>'), |
| 70 |
| 71 # Add badges to running tests. |
| 72 (r'\[(( )*\d+/\d+)\](( )+)(\d+\.\d+s) ' |
| 73 r'([\w/]+\.[\w/]+) \(([\d.s]+)\)', |
| 74 r'<span class="badge badge-success">\1</span>\3<span class="badge">' |
| 75 r'\5</span> \6 <span class="badge">\7</span>'), |
| 76 |
| 77 # Add gray labels to [==========] blocks. |
| 78 (r'\[([-=]{10})\]', |
| 79 r'<span class="label">[\1]</span>'), |
| 80 |
| 81 # Find .cc and .h files and add codesite links to them. |
| 82 (r'\.\./\.\./([\w/-]+)\.(cc|h): ', |
| 83 r'<a href="https://code.google.com/p/chromium/codesearch#chromium/src/' |
| 84 r'\1.\2">../../\1.\2</a>: '), |
| 85 |
| 86 # Find source files with line numbers and add links to them. |
| 87 (r'\.\./\.\./([\w/-]+)\.(cc|h):(\d+): ', |
| 88 r'<a href="https://code.google.com/p/chromium/codesearch#chromium/src/' |
| 89 r'\1.\2&l=\3">../../\1.\2:\3</a>: '), |
| 90 |
| 91 # Add badges to compiling items. |
| 92 (r'\[(\d+/\d+)\] (CXX|AR|STAMP|CC|ACTION|RULE|COPY)', |
| 93 r'<span class="badge badge-info">\1</span> ' |
| 94 r'<span class="badge">\2</span>'), |
| 95 |
| 96 # Bold the LHS of A=B text. |
| 97 (r'^(( )*)(\w+)=([\w:/-_.]+)', |
| 98 r'\1<strong>\3</strong>=\4'), |
| 99 ] |
| 100 |
| 101 |
| 102 ######## |
| 103 # Models |
| 104 ######## |
| 105 class BuildLogModel(db.Model): |
| 106 # Used for caching finished build logs. |
| 107 url = db.StringProperty() |
| 108 data = db.BlobProperty() |
| 109 |
| 110 class BuildbotCacheModel(db.Model): |
| 111 # Used for caching finished build data. |
| 112 url = db.StringProperty() |
| 113 data = db.BlobProperty() |
| 114 |
| 115 class BuildLogResultModel(db.Model): |
| 116 # Used for caching finished and parsed build logs. |
| 117 url = db.StringProperty() |
| 118 version = db.StringProperty() |
| 119 data = db.BlobProperty() |
| 120 |
| 121 |
| 122 def emit(source, out): |
| 123 # TODO(hinoka): This currently employs a "lookback" strategy |
| 124 # (Find [PASS/FAIL], then goes back and marks all of the lines.) |
| 125 # This should be switched to a "scan twice" strategy. 1st pass creates a |
| 126 # Test Name -> PASS/FAIL/INCOMPLETE dictionary, and 2nd pass marks the lines. |
| 127 attr = [] |
| 128 if source == 'header': |
| 129 attr.append('text-info') |
| 130 lines = [] |
| 131 current_test = None |
| 132 current_test_line = 0 |
| 133 for line in out.split('\n'): |
| 134 if line: |
| 135 test_match = re.search(r'\[ RUN \]\s*([^() ]*)\s*', line) |
| 136 line_attr = attr[:] |
| 137 if test_match: |
| 138 # This line is a "We're running a test" line. |
| 139 current_test = test_match.group(1).strip() |
| 140 current_test_line = len(lines) |
| 141 elif '[ OK ]' in line or '[ PASSED ]' in line: |
| 142 line_attr.append('text-success') |
| 143 test_match = re.search(r'\[ OK \]\s*([^(), ]*)\s*', line) |
| 144 if test_match: |
| 145 finished_test = test_match.group(1).strip() |
| 146 for line_item in lines[current_test_line:]: |
| 147 if finished_test == current_test: |
| 148 line_item[2].append('text-success') |
| 149 else: |
| 150 line_item[2].append('text-error') |
| 151 current_test = None |
| 152 elif '[ FAILED ]' in line: |
| 153 line_attr.append('text-error') |
| 154 test_match = re.search(r'\[ FAILED \]\s*([^(), ]*)\s*', line) |
| 155 if test_match: |
| 156 finished_test = test_match.group(1).strip() |
| 157 for line_item in lines[current_test_line:]: |
| 158 if finished_test == current_test: |
| 159 line_item[2].append('text-error') |
| 160 current_test = None |
| 161 elif re.search(r'\[.{10}\]', line): |
| 162 current_test = None |
| 163 elif re.search(r'\[\s*\d+/\d+\]\s*\d+\.\d+s\s+[\w/]+\.' |
| 164 r'[\w/]+\s+\([\d.s]+\)', line): |
| 165 # runtest.py output: [20/200] 23.3s [TestSuite.TestName] (5.3s) |
| 166 current_test = None |
| 167 line_attr.append('text-success') |
| 168 elif 'aborting test' in line: |
| 169 current_test = None |
| 170 elif current_test: |
| 171 line_attr.append('text-warning') |
| 172 |
| 173 line = line.replace(' ', ' ') |
| 174 for rep_from, rep_to in REPLACEMENTS: |
| 175 line = re.sub(rep_from, rep_to, line) |
| 176 lines.append((line, line_attr)) |
| 177 return (source, lines) |
| 178 |
| 179 |
| 180 class BuildbotPassthrough(webapp2.RequestHandler): |
| 181 def get(self, path): |
| 182 # TODO(hinoka): Page caching. |
| 183 url = 'http://build.chromium.org/p/%s' % path |
| 184 s = urlfetch.fetch(url.replace(' ', '%20'), |
| 185 method=urlfetch.GET, deadline=60).content |
| 186 s = s.replace('default.css', '../../static/default-old.css') |
| 187 self.response.out.write(s) |
| 188 |
| 189 |
| 190 class BuildStep(webapp2.RequestHandler): |
| 191 @staticmethod |
| 192 def get_build_step(url): |
| 193 build_step = BuildbotCacheModel.all().filter('url =', url).get() |
| 194 if build_step: |
| 195 return json.loads(build_step.data) |
| 196 else: |
| 197 s = urlfetch.fetch(url.replace(' ', '%20'), |
| 198 method=urlfetch.GET, deadline=60).content |
| 199 logging.info(s) |
| 200 build_step_data = json.loads(s) |
| 201 # Cache if completed. |
| 202 if not build_step_data['currentStep']: |
| 203 build_step = BuildbotCacheModel(url=url, data=s) |
| 204 build_step.put() |
| 205 return build_step_data |
| 206 |
| 207 @utils.render_iff_new_flag_set('step.html', jinja_environment) |
| 208 def get(self, master, builder, step, new=None): |
| 209 """Parses a build step page.""" |
| 210 # Fetch the page. |
| 211 if new: |
| 212 json_url = ('http://build.chromium.org/p/%s/' |
| 213 'json/builders/%s/builds/%s' % (master, builder, step)) |
| 214 result = BuildStep.get_build_step(json_url) |
| 215 |
| 216 # Add on some extraneous info. |
| 217 build_properties = dict((name, value) for name, value, _ |
| 218 in result['properties']) |
| 219 failed_steps = ['<strong>%s</strong>' % s['name'] for s in result['steps'] |
| 220 if s['results'][0] == 2] |
| 221 if len(failed_steps) == 1: |
| 222 result['failed_steps'] = failed_steps[0] |
| 223 elif len(failed_steps) == 2: |
| 224 logging.info(failed_steps) |
| 225 result['failed_steps'] = '%s and %s' % tuple(failed_steps) |
| 226 elif failed_steps: |
| 227 # Oxford comma. |
| 228 result['failed_steps'] = '%s, and %s' % ( |
| 229 ', '.join(failed_steps[:-1], failed_steps[-1])) |
| 230 else: |
| 231 result['failed_steps'] = None |
| 232 |
| 233 if 'rietveld' in build_properties: |
| 234 result['rietveld'] = build_properties['rietveld'] |
| 235 result['breadcrumbs'] = [ |
| 236 ('Master %s' % master, '/buildbot/%s' % master), |
| 237 ('Builder %s' % builder, '/buildbot/%s/builders/%s' % |
| 238 (master, builder)), |
| 239 ('Slave %s' % result['slave'], |
| 240 '/buildbot/%s/buildslaves/%s' % (master, result['slave'])), |
| 241 ('Build Number %s' % step, |
| 242 '/buildbot/%s/builders/%s/builds/%s' % |
| 243 (master, builder, step)), |
| 244 ] |
| 245 result['url'] = self.request.url.split('?')[0] |
| 246 return result |
| 247 else: |
| 248 url = ('http://build.chromium.org/p/%s/' |
| 249 'builders/%s/builds/%s' % (master, builder, step)) |
| 250 s = urlfetch.fetch(url.replace(' ', '%20'), |
| 251 method=urlfetch.GET, deadline=60).content |
| 252 s = s.replace('../../../default.css', '/static/default-old.css') |
| 253 s = s.replace('<a href="../../../about">About</a>', |
| 254 '<a href="../../../about">About</a>' |
| 255 ' - <a href="%s?new=true">New Layout</a>' % |
| 256 self.request.url.split('?')[0]) |
| 257 return s |
| 258 |
| 259 |
| 260 class BuildSlave(webapp2.RequestHandler): |
| 261 """Parses a build slave page.""" |
| 262 @utils.render_iff_new_flag_set('slave.html', jinja_environment) |
| 263 def get(self, master, slave, new=None): |
| 264 # Fetch the page. |
| 265 if new: |
| 266 json_url = ('http://build.chromium.org/p/%s/' |
| 267 'json/slaves/%s' % (master, slave)) |
| 268 logging.info(json_url) |
| 269 s = urlfetch.fetch(json_url.replace(' ', '%20'), |
| 270 method=urlfetch.GET, deadline=60).content |
| 271 |
| 272 result = json.loads(s) |
| 273 result['breadcrumbs'] = [ |
| 274 ('Master %s' % master, |
| 275 '/buildbot/%s?new=true' % master), |
| 276 ('All Slaves', |
| 277 '/buildbot/%s/buildslaves?new=true' % master), |
| 278 ('Slave %s' % slave, |
| 279 '/buildbot/%s/buildslaves/%s?new=true' % (master, slave)), |
| 280 ] |
| 281 result['url'] = self.request.url.split('?')[0] |
| 282 result['master'] = master |
| 283 result['slave'] = slave |
| 284 return result |
| 285 else: |
| 286 url = ('http://build.chromium.org/p/%s/buildslaves/%s' % |
| 287 (master, slave)) |
| 288 s = urlfetch.fetch(url.replace(' ', '%20'), |
| 289 method=urlfetch.GET, deadline=60).content |
| 290 s = s.replace('../default.css', '/static/default-old.css') |
| 291 s = s.replace('<a href="../about">About</a>', |
| 292 '<a href="../about">About</a>' |
| 293 ' - <a href="%s?new=true">New Layout</a>' % |
| 294 self.request.url.split('?')[0]) |
| 295 return s |
| 296 |
| 297 |
| 298 class MainPage(webapp2.RequestHandler): |
| 299 """Parses a buildlog page.""" |
| 300 @utils.render('buildbot.html', jinja_environment) |
| 301 @utils.expect_request_param('url') |
| 302 def get(self, url): |
| 303 if not url: |
| 304 return {} |
| 305 |
| 306 # Redirect the page if we detect a different type of URL. |
| 307 _, _, path, _, _, _ = urlparse.urlparse(url) |
| 308 logging.info(path) |
| 309 step_m = re.match(r'^/((p/)?)(.*)/builders/(.*)/builds/(\d+)$', path) |
| 310 if step_m: |
| 311 self.redirect('/buildbot/%s/builders/%s/builds/%s' % step_m.groups()[2:]) |
| 312 return {} |
| 313 |
| 314 log_m = re.match( |
| 315 r'^/((p/)?)(.*)/builders/(.*)/builds/(\d+)/steps/(.*)/logs/(.*)', path) |
| 316 if log_m: |
| 317 self.redirect('/buildbot/%s/builders/%s/builds/%s/steps/%s' |
| 318 '/logs/%s?new=true' % log_m.groups()[2:]) |
| 319 return {} |
| 320 |
| 321 self.error(404) |
| 322 return {'error': 'Url not found: %s' % url} |
| 323 |
| 324 class BuildLog(webapp2.RequestHandler): |
| 325 @staticmethod |
| 326 def fetch_buildlog(url): |
| 327 """Fetch buildlog from either the datastore cache or the remote url. |
| 328 Caches the log once fetched.""" |
| 329 buildlog = BuildLogModel.all().filter('url =', url).get() |
| 330 if buildlog: |
| 331 return zlib.decompress(buildlog.data) |
| 332 else: |
| 333 log_fetch_start = time.time() |
| 334 s = urlfetch.fetch(url.replace(' ', '%20'), |
| 335 method=urlfetch.GET, deadline=60).content |
| 336 logging.info('Log fetching time: %2f' % (time.time() - log_fetch_start)) |
| 337 # Cache this build log. |
| 338 # TODO(hinoka): This should be in Google Storage. |
| 339 compressed_data = zlib.compress(s) |
| 340 if len(compressed_data) < 10**6: |
| 341 buildlog = BuildLogModel(url=url, data=compressed_data) |
| 342 buildlog.put() |
| 343 return s |
| 344 |
| 345 @utils.render_iff_new_flag_set('logs.html', jinja_environment) |
| 346 def get(self, master, builder, build, step, logname, new): |
| 347 # Lets fetch the build data first to determine if this is a running step. |
| 348 json_url = ('http://build.chromium.org/p/%s/' |
| 349 'json/builders/%s/builds/%s' % (master, builder, build)) |
| 350 build_data = BuildStep.get_build_step(json_url) |
| 351 steps = dict([(_step['name'], _step) for _step in build_data['steps']]) |
| 352 # Construct the url to the log file. |
| 353 url = ('http://build.chromium.org/' |
| 354 'p/%s/builders/%s/builds/%s/steps/%s/logs/%s' % |
| 355 (master, builder, build, step, logname)) |
| 356 current_step = steps[step] |
| 357 if not current_step['isFinished']: |
| 358 # We're not finished with this step, redirect over to the real buildbot. |
| 359 self.redirect(url) |
| 360 return {} # Empty dict to keep the decorator happy. |
| 361 |
| 362 if new: |
| 363 logging.info('New layout') |
| 364 # New layout: We want to fetch the processed json blob. |
| 365 # Check for cached results or fetch the page if none exists. |
| 366 cached_result = BuildLogResultModel.all().filter( |
| 367 'url =', url).filter('version =', VERSION_ID).get() |
| 368 if cached_result: |
| 369 logging.info('Returning cached data') |
| 370 return json.loads(zlib.decompress(cached_result.data)) |
| 371 else: |
| 372 # Fetch the log from the buildbot master. |
| 373 s = BuildLog.fetch_buildlog(url) |
| 374 |
| 375 # Parse the log output to add colors. |
| 376 parse_time_start = time.time() |
| 377 all_output = re.findall(r'<span class="(header|stdout)">(.*?)</span>', |
| 378 s, re.S) |
| 379 result_output = [] |
| 380 current_source = None |
| 381 current_string = '' |
| 382 for source, output in all_output: |
| 383 if source == current_source: |
| 384 current_string += output |
| 385 continue |
| 386 else: |
| 387 # We hit a new source, we want to emit whatever we had left and |
| 388 # start anew. |
| 389 if current_string: |
| 390 result_output.append(emit(current_source, current_string)) |
| 391 current_string = output |
| 392 current_source = source |
| 393 if current_string: |
| 394 result_output.append(emit(current_source, current_string)) |
| 395 logging.info('Parse time: %2f' % (time.time() - parse_time_start)) |
| 396 |
| 397 # Add build PASS/FAIL banner. |
| 398 ret_code_m = re.search('program finished with exit code (-?\d+)', s) |
| 399 if ret_code_m: |
| 400 ret_code = int(ret_code_m.group(1)) |
| 401 if ret_code == 0: |
| 402 status = 'OK' |
| 403 else: |
| 404 status = 'ERROR' |
| 405 else: |
| 406 ret_code = None |
| 407 status = None |
| 408 |
| 409 final_result = { |
| 410 'output': result_output, |
| 411 'org_url': url, |
| 412 'url': self.request.url.split('?')[0], |
| 413 'name': step, |
| 414 'breadcrumbs': [ |
| 415 ('Master %s' % master, |
| 416 '/buildbot/%s/waterfall' % master), |
| 417 ('Builder %s' % builder, |
| 418 '/buildbot/%s/builders/%s' % |
| 419 (master, builder)), |
| 420 ('Slave %s' % build_data['slave'], |
| 421 '/buildbot/%s/buildslaves/%s' % |
| 422 (master, build_data['slave'])), |
| 423 ('Build Number %s ' % build, |
| 424 '/buildbot/%s/builders/%s/builds/%s' % |
| 425 (master, builder, build)), |
| 426 ('Step %s' % step, '/buildbot/%s/builders/%s/builds/%s' |
| 427 '/steps/%s/logs/%s' % |
| 428 (master, builder, build, step, logname)) |
| 429 ], |
| 430 'status': status, |
| 431 'ret_code': ret_code, |
| 432 'debug': self.request.get('debug'), |
| 433 'size': len(s), |
| 434 'slave': build_data['slave'] |
| 435 } |
| 436 # Cache parsed logs. |
| 437 # TODO(hinoka): This should be in Google storage, where the grass is |
| 438 # green and size limits don't exist. |
| 439 compressed_result = zlib.compress(json.dumps(final_result)) |
| 440 if len(compressed_result) < 10**6: |
| 441 cached_result = BuildLogResultModel( |
| 442 url=url, version=VERSION_ID, data=compressed_result) |
| 443 cached_result.put() |
| 444 |
| 445 return final_result |
| 446 else: |
| 447 # Fetch the log from the buildbot master. |
| 448 logging.info('Old layout') |
| 449 s = BuildLog.fetch_buildlog(url) |
| 450 s = s.replace('default.css', '../../static/default-old.css') |
| 451 s = s.replace('<a href="stdio/text">(view as text)</a>', |
| 452 '<a href="stdio/text">(view as text)</a><br/><br/>' |
| 453 '<a href="%s?new=true">(New layout)</a>' % |
| 454 self.request.url.split('?')[0]) |
| 455 return s |
| 456 |
| 457 |
| 458 app = webapp2.WSGIApplication([ |
| 459 ('/buildbot/', MainPage), |
| 460 ('/buildbot/(.*)/builders/(.*)/builds/(\d+)/steps/(.*)/logs/(.*)/?', |
| 461 BuildLog), |
| 462 ('/buildbot/(.*)/builders/(.*)/builds/(\d+)/?', BuildStep), |
| 463 ('/buildbot/(.*)/buildslaves/(.*)/?', BuildSlave), |
| 464 ('/buildbot/(.*)', BuildbotPassthrough), |
| 465 ], debug=True) |
OLD | NEW |