Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: appengine/findit/findit_api.py

Issue 2435983003: [Findit] Asynchronously process flake reports from chromium-try-flakes. (Closed)
Patch Set: Rebase to resolve conflict Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2015 The Chromium Authors. All rights reserved. 1 # Copyright 2015 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 """This module is to provide Findit service APIs through Cloud Endpoints: 5 """This module is to provide Findit service APIs through Cloud Endpoints:
6 6
7 Current APIs include: 7 Current APIs include:
8 1. Analysis of compile/test failures in Chromium waterfalls. 8 1. Analysis of compile/test failures in Chromium waterfalls.
9 Analyzes failures and detects suspected CLs. 9 Analyzes failures and detects suspected CLs.
10 2. Analysis of flakes on Commit Queue. 10 2. Analysis of flakes on Commit Queue.
11 """ 11 """
12 12
13 import json 13 import json
14 import logging 14 import logging
15 import pickle
15 16
16 import endpoints 17 import endpoints
17 from google.appengine.api import taskqueue 18 from google.appengine.api import taskqueue
18 from protorpc import messages 19 from protorpc import messages
19 from protorpc import remote 20 from protorpc import remote
20 21
21 from common import appengine_util 22 from common import appengine_util
22 from common import auth_util 23 from common import auth_util
23 from common import constants 24 from common import constants
24 from common import time_util 25 from common import time_util
25 from common.waterfall import failure_type 26 from common.waterfall import failure_type
26 from model.flake.flake_analysis_request import FlakeAnalysisRequest 27 from model.flake.flake_analysis_request import FlakeAnalysisRequest
27 from model.wf_analysis import WfAnalysis 28 from model.wf_analysis import WfAnalysis
28 from model.wf_swarming_task import WfSwarmingTask 29 from model.wf_swarming_task import WfSwarmingTask
29 from model.wf_try_job import WfTryJob 30 from model.wf_try_job import WfTryJob
30 from waterfall import buildbot 31 from waterfall import buildbot
31 from waterfall import waterfall_config 32 from waterfall import waterfall_config
32 from waterfall.flake import flake_analysis_service 33 from waterfall.flake import flake_analysis_service
33 from waterfall.flake import triggering_sources
34 34
35 35
36 # This is used by the underlying ProtoRpc when creating names for the ProtoRPC 36 # This is used by the underlying ProtoRpc when creating names for the ProtoRPC
37 # messages below. This package name will show up as a prefix to the message 37 # messages below. This package name will show up as a prefix to the message
38 # class names in the discovery doc and client libraries. 38 # class names in the discovery doc and client libraries.
39 package = 'FindIt' 39 package = 'FindIt'
40 40
41 41
42 # These subclasses of Message are basically definitions of Protocol RPC 42 # These subclasses of Message are basically definitions of Protocol RPC
43 # messages. https://cloud.google.com/appengine/docs/python/tools/protorpc/ 43 # messages. https://cloud.google.com/appengine/docs/python/tools/protorpc/
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
103 103
104 104
105 class _Build(messages.Message): 105 class _Build(messages.Message):
106 master_name = messages.StringField(1, required=True) 106 master_name = messages.StringField(1, required=True)
107 builder_name = messages.StringField(2, required=True) 107 builder_name = messages.StringField(2, required=True)
108 build_number = messages.IntegerField( 108 build_number = messages.IntegerField(
109 3, variant=messages.Variant.INT32, required=True) 109 3, variant=messages.Variant.INT32, required=True)
110 110
111 111
112 class _FlakeAnalysis(messages.Message): 112 class _FlakeAnalysis(messages.Message):
113 analysis_triggered = messages.BooleanField(1, required=True) 113 queued = messages.BooleanField(1, required=True)
114 114
115 115
116 def _TriggerNewAnalysesOnDemand(builds): 116 def _AsyncProcessFailureAnalysisRequests(builds):
117 """Pushes a task to run on the backend to trigger new analyses on demand.""" 117 """Pushes a task on the backend to process requests of failure analysis."""
118 target = appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND) 118 target = appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND)
119 payload = json.dumps({'builds': builds}) 119 payload = json.dumps({'builds': builds})
120 taskqueue.add( 120 taskqueue.add(
121 url=constants.WATERFALL_TRIGGER_ANALYSIS_URL, 121 url=constants.WATERFALL_PROCESS_FAILURE_ANALYSIS_REQUESTS_URL,
122 payload=payload, target=target, 122 payload=payload, target=target,
123 queue_name=constants.WATERFALL_FAILURE_ANALYSIS_REQUEST_QUEUE) 123 queue_name=constants.WATERFALL_FAILURE_ANALYSIS_REQUEST_QUEUE)
124 124
125 125
126 def _AsyncProcessFlakeReport(flake_analysis_request, user_email, is_admin):
127 """Pushes a task on the backend to process the flake report."""
128 target = appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND)
129 payload = pickle.dumps((flake_analysis_request, user_email, is_admin))
130 taskqueue.add(
131 url=constants.WATERFALL_PROCESS_FLAKE_ANALYSIS_REQUEST_URL,
132 payload=payload, target=target,
133 queue_name=constants.WATERFALL_FLAKE_ANALYSIS_REQUEST_QUEUE)
134
135
126 # Create a Cloud Endpoints API. 136 # Create a Cloud Endpoints API.
127 # https://cloud.google.com/appengine/docs/python/endpoints/create_api 137 # https://cloud.google.com/appengine/docs/python/endpoints/create_api
128 @endpoints.api(name='findit', version='v1', description='FindIt API') 138 @endpoints.api(name='findit', version='v1', description='FindIt API')
129 class FindItApi(remote.Service): 139 class FindItApi(remote.Service):
130 """FindIt API v1.""" 140 """FindIt API v1."""
131 141
132 def _GenerateBuildFailureAnalysisResult( 142 def _GenerateBuildFailureAnalysisResult(
133 self, build, suspected_cls_in_result, step_name, 143 self, build, suspected_cls_in_result, step_name,
134 first_failure, test_name=None, 144 first_failure, test_name=None,
135 analysis_approach=_AnalysisApproach.HEURISTIC): 145 analysis_approach=_AnalysisApproach.HEURISTIC):
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
272 282
273 if heuristic_analysis.failed or not heuristic_analysis.result: 283 if heuristic_analysis.failed or not heuristic_analysis.result:
274 # Bail out if the analysis failed or there is no result yet. 284 # Bail out if the analysis failed or there is no result yet.
275 continue 285 continue
276 286
277 self._GenerateResultsForBuild(build, heuristic_analysis, results) 287 self._GenerateResultsForBuild(build, heuristic_analysis, results)
278 288
279 logging.info('%d build failure(s), while %d are supported', 289 logging.info('%d build failure(s), while %d are supported',
280 len(request.builds), len(supported_builds)) 290 len(request.builds), len(supported_builds))
281 try: 291 try:
282 _TriggerNewAnalysesOnDemand(supported_builds) 292 _AsyncProcessFailureAnalysisRequests(supported_builds)
283 except Exception: # pragma: no cover. 293 except Exception: # pragma: no cover.
284 # If we fail to post a task to the task queue, we ignore and wait for next 294 # If we fail to post a task to the task queue, we ignore and wait for next
285 # request. 295 # request.
286 logging.exception('Failed to add analysis request to task queue: %s', 296 logging.exception('Failed to add analysis request to task queue: %s',
287 repr(supported_builds)) 297 repr(supported_builds))
288 298
289 return _BuildFailureAnalysisResultCollection(results=results) 299 return _BuildFailureAnalysisResultCollection(results=results)
290 300
291 @endpoints.method(_Flake, _FlakeAnalysis, path='flake', name='flake') 301 @endpoints.method(_Flake, _FlakeAnalysis, path='flake', name='flake')
292 def AnalyzeFlake(self, request): 302 def AnalyzeFlake(self, request):
293 """Analyze a flake on Commit Queue. Currently only supports flaky tests.""" 303 """Analyze a flake on Commit Queue. Currently only supports flaky tests."""
294 user_email = auth_util.GetUserEmail() 304 user_email = auth_util.GetUserEmail()
295 is_admin = auth_util.IsCurrentUserAdmin() 305 is_admin = auth_util.IsCurrentUserAdmin()
296 306
307 if not flake_analysis_service.IsAuthorizedUser(user_email, is_admin):
308 raise endpoints.UnauthorizedException(
309 'No permission to run a new analysis! User is %s' % user_email)
310
297 def CreateFlakeAnalysisRequest(flake): 311 def CreateFlakeAnalysisRequest(flake):
298 analysis_request = FlakeAnalysisRequest.Create( 312 analysis_request = FlakeAnalysisRequest.Create(
299 flake.name, flake.is_step, flake.bug_id) 313 flake.name, flake.is_step, flake.bug_id)
300 for step in flake.build_steps: 314 for step in flake.build_steps:
301 analysis_request.AddBuildStep(step.master_name, step.builder_name, 315 analysis_request.AddBuildStep(step.master_name, step.builder_name,
302 step.build_number, step.step_name, 316 step.build_number, step.step_name,
303 time_util.GetUTCNow()) 317 time_util.GetUTCNow())
304 return analysis_request 318 return analysis_request
305 319
306 logging.info('Flake: %s', CreateFlakeAnalysisRequest(request)) 320 flake_analysis_request = CreateFlakeAnalysisRequest(request)
307 analysis_triggered = flake_analysis_service.ScheduleAnalysisForFlake( 321 logging.info('Flake report: %s', flake_analysis_request)
308 CreateFlakeAnalysisRequest(request), user_email, is_admin,
309 triggering_sources.FINDIT_API)
310 322
311 if analysis_triggered is None: 323 try:
312 raise endpoints.UnauthorizedException( 324 _AsyncProcessFlakeReport(flake_analysis_request, user_email, is_admin)
313 'No permission for a new analysis! User is %s' % user_email) 325 queued = True
326 except Exception:
327 # Ignore the report when fail to queue it for async processing.
328 queued = False
329 logging.exception('Failed to queue flake report for async processing')
314 330
315 return _FlakeAnalysis(analysis_triggered=analysis_triggered) 331 return _FlakeAnalysis(queued=queued)
OLDNEW
« no previous file with comments | « appengine/findit/common/constants.py ('k') | appengine/findit/handlers/process_flake_analysis_request.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698