Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1242)

Side by Side Diff: scripts/slave/annotated_run.py

Issue 1492613002: annotated_run: Cleanup/refactor. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/build
Patch Set: Use standard paths, fix BuildInternal error. Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 import argparse 6 import argparse
7 import collections
7 import contextlib 8 import contextlib
8 import json 9 import json
10 import logging
9 import os 11 import os
12 import platform
10 import shutil 13 import shutil
11 import socket 14 import socket
12 import subprocess 15 import subprocess
13 import sys 16 import sys
14 import tempfile 17 import tempfile
15 import traceback
16 18
19
20 # Install Infra build environment.
17 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname( 21 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(
18 os.path.abspath(__file__)))) 22 os.path.abspath(__file__))))
19 sys.path.append(os.path.join(BUILD_ROOT, 'scripts')) 23 sys.path.insert(0, os.path.join(BUILD_ROOT, 'scripts'))
20 sys.path.append(os.path.join(BUILD_ROOT, 'third_party')) 24 import common.env
25 common.env.Install()
21 26
22 from common import annotator 27 from common import annotator
23 from common import chromium_utils 28 from common import chromium_utils
24 from common import master_cfg_utils 29 from common import master_cfg_utils
25 30
26 SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) 31 # Logging instance.
27 BUILD_LIMITED_ROOT = os.path.join( 32 LOGGER = logging.getLogger('annotated_run')
28 os.path.dirname(BUILD_ROOT), 'build_internal', 'scripts', 'slave')
29 33
30 PACKAGE_CFG = os.path.join(
31 os.path.dirname(os.path.dirname(SCRIPT_PATH)),
32 'infra', 'config', 'recipes.cfg')
33 34
34 if sys.platform.startswith('win'): 35 # RecipeRuntime will probe this for values.
35 # TODO(pgervais): add windows support 36 # - First, (system, platform)
36 # QQ: Where is infra/run.py on windows machines? 37 # - Then, (system,)
37 RUN_CMD = None 38 # - Finally, (),
38 else: 39 PLATFORM_CONFIG = {
39 RUN_CMD = os.path.join('/', 'opt', 'infra-python', 'run.py') 40 # All systems.
41 (): {},
42
43 # Linux
44 ('Linux',): {
45 'run_cmd': ['/opt/infra-python/run.py'],
46 },
47
48 # Mac OSX
49 ('Darwin',): {
50 'run_cmd': ['/opt/infra-python/run.py'],
51 },
52
53 # Windows
54 ('Windows',): {
55 'run_cmd': ['C:\\infra-python\\ENV\\Scripts\\python.exe',
56 'C:\\infra-python\\run.py'],
57 },
58 }
59
60
61 # Config is the runtime configuration used by `annotated_run.py` to bootstrap
62 # the recipe engine.
63 Config = collections.namedtuple('Config', (
64 'run_cmd',
65 ))
66
67
68 def get_config():
69 """Returns (Config): The constructed Config object.
70
71 The Config object is constructed from:
72 - Cascading the PLATFORM_CONFIG fields together based on current
73 OS/Architecture.
74
75 Raises:
76 KeyError: if a required configuration key/parameter is not available.
77 """
78 # Cascade the platform configuration.
79 p = (platform.system(), platform.processor())
80 platform_config = {}
81 for i in xrange(len(p)+1):
82 platform_config.update(PLATFORM_CONFIG.get(p[:i], {}))
83
84 # Construct runtime configuration.
85 return Config(
86 run_cmd=platform_config.get('run_cmd'),
87 )
88
89
90 def ensure_directory(*path):
91 path = os.path.join(*path)
92 if not os.path.isdir(path):
93 os.makedirs(path)
94 return path
95
96
97 def _run_command(cmd, **kwargs):
98 if kwargs.pop('dry_run', False):
99 LOGGER.info('(Dry Run) Would have executed command: %s', cmd)
100 return 0, ''
101
102 LOGGER.debug('Executing command: %s', cmd)
103 proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
104 stdout, _ = proc.communicate()
105
106 LOGGER.debug('Process [%s] returned [%d] with output:\n%s',
107 cmd, proc.returncode, stdout)
108 return proc.returncode, stdout
109
110
111 def _check_command(*args, **kwargs):
112 rv, stdout = _run_command(args, **kwargs)
113 if rv != 0:
114 raise subprocess.CalledProcessError(rv, args, output=stdout)
115 return stdout
116
40 117
41 @contextlib.contextmanager 118 @contextlib.contextmanager
42 def namedTempFile(): 119 def recipe_tempdir(root=None, leak=False):
43 fd, name = tempfile.mkstemp() 120 """Creates a temporary recipe-local working directory and yields it.
44 os.close(fd) # let the exceptions fly 121
122 This creates a temporary directory for this annotation run that is
123 automatically cleaned up. It returns the directory.
124
125 Args:
126 root (str/None): If not None, the root directory. Otherwise, |os.cwd| will
127 be used.
128 leak (bool): If true, don't clean up the temporary directory on exit.
129 """
130 basedir = ensure_directory((root or os.getcwd()), '.recipe_runtime')
45 try: 131 try:
46 yield name 132 tdir = tempfile.mkdtemp(dir=basedir)
133 yield tdir
47 finally: 134 finally:
48 try: 135 if basedir and os.path.isdir(basedir):
49 os.remove(name) 136 if not leak:
50 except OSError as e: 137 LOGGER.debug('Cleaning up temporary directory [%s].', basedir)
51 print >> sys.stderr, "LEAK: %s: %s" % (name, e) 138 try:
139 chromium_utils.RemoveDirectory(basedir)
140 except Exception:
141 LOGGER.exception('Failed to clean up temporary directory [%s].',
142 basedir)
143 else:
144 LOGGER.warning('(--leak) Leaking temporary directory [%s].', basedir)
52 145
53 146
54 def get_recipe_properties(build_properties, use_factory_properties_from_disk): 147 def get_recipe_properties(workdir, build_properties,
148 use_factory_properties_from_disk):
55 """Constructs the recipe's properties from buildbot's properties. 149 """Constructs the recipe's properties from buildbot's properties.
56 150
57 This retrieves the current factory properties from the master_config 151 This retrieves the current factory properties from the master_config
58 in the slave's checkout (no factory properties are handed to us from the 152 in the slave's checkout (no factory properties are handed to us from the
59 master), and merges in the build properties. 153 master), and merges in the build properties.
60 154
61 Using the values from the checkout allows us to do things like change 155 Using the values from the checkout allows us to do things like change
62 the recipe and other factory properties for a builder without needing 156 the recipe and other factory properties for a builder without needing
63 a master restart. 157 a master restart.
64 158
65 As the build properties doesn't include the factory properties, we would: 159 As the build properties doesn't include the factory properties, we would:
66 1. Load factory properties from checkout on the slave. 160 1. Load factory properties from checkout on the slave.
67 2. Override the factory properties with the build properties. 161 2. Override the factory properties with the build properties.
68 3. Set the factory-only properties as build properties using annotation so 162 3. Set the factory-only properties as build properties using annotation so
69 that they will show up on the build page. 163 that they will show up on the build page.
70 """ 164 """
71 if not use_factory_properties_from_disk: 165 if not use_factory_properties_from_disk:
72 return build_properties 166 return build_properties
73 167
74 stream = annotator.StructuredAnnotationStream() 168 stream = annotator.StructuredAnnotationStream()
75 with stream.step('setup_properties') as s: 169 with stream.step('setup_properties') as s:
76 factory_properties = {} 170 factory_properties = {}
77 171
78 mastername = build_properties.get('mastername') 172 mastername = build_properties.get('mastername')
79 buildername = build_properties.get('buildername') 173 buildername = build_properties.get('buildername')
80 if mastername and buildername: 174 if mastername and buildername:
81 # Load factory properties from tip-of-tree checkout on the slave builder. 175 # Load factory properties from tip-of-tree checkout on the slave builder.
82 factory_properties = get_factory_properties_from_disk( 176 factory_properties = get_factory_properties_from_disk(
83 mastername, buildername) 177 workdir, mastername, buildername)
84 178
85 # Check conflicts between factory properties and build properties. 179 # Check conflicts between factory properties and build properties.
86 conflicting_properties = {} 180 conflicting_properties = {}
87 for name, value in factory_properties.items(): 181 for name, value in factory_properties.items():
88 if not build_properties.has_key(name) or build_properties[name] == value: 182 if not build_properties.has_key(name) or build_properties[name] == value:
89 continue 183 continue
90 conflicting_properties[name] = (value, build_properties[name]) 184 conflicting_properties[name] = (value, build_properties[name])
91 185
92 if conflicting_properties: 186 if conflicting_properties:
93 s.step_text( 187 s.step_text(
94 '<br/>detected %d conflict[s] between factory and build properties' 188 '<br/>detected %d conflict[s] between factory and build properties'
95 % len(conflicting_properties)) 189 % len(conflicting_properties))
96 print 'Conflicting factory and build properties:' 190
97 for name, (factory_value, build_value) in conflicting_properties.items(): 191 conflicts = [' "%s": factory: "%s", build: "%s"' % (
98 print (' "%s": factory: "%s", build: "%s"' % (
99 name, 192 name,
100 '<unset>' if (factory_value is None) else factory_value, 193 '<unset>' if (fv is None) else fv,
101 '<unset>' if (build_value is None) else build_value)) 194 '<unset>' if (bv is None) else bv)
102 print "Will use the values from build properties." 195 for name, (fv, bv) in conflicting_properties.items()]
196 LOGGER.warning('Conflicting factory and build properties:\n%s',
197 '\n'.join(conflicts))
198 LOGGER.warning("Will use the values from build properties.")
103 199
104 # Figure out the factory-only properties and set them as build properties so 200 # Figure out the factory-only properties and set them as build properties so
105 # that they will show up on the build page. 201 # that they will show up on the build page.
106 for name, value in factory_properties.items(): 202 for name, value in factory_properties.items():
107 if not build_properties.has_key(name): 203 if not build_properties.has_key(name):
108 s.set_build_property(name, json.dumps(value)) 204 s.set_build_property(name, json.dumps(value))
109 205
110 # Build properties override factory properties. 206 # Build properties override factory properties.
111 properties = factory_properties.copy() 207 properties = factory_properties.copy()
112 properties.update(build_properties) 208 properties.update(build_properties)
113 return properties 209 return properties
114 210
115 211
116 def get_factory_properties_from_disk(mastername, buildername): 212 def get_factory_properties_from_disk(workdir, mastername, buildername):
117 master_list = master_cfg_utils.GetMasters() 213 master_list = master_cfg_utils.GetMasters()
118 master_path = None 214 master_path = None
119 for name, path in master_list: 215 for name, path in master_list:
120 if name == mastername: 216 if name == mastername:
121 master_path = path 217 master_path = path
122 218
123 if not master_path: 219 if not master_path:
124 raise LookupError('master "%s" not found.' % mastername) 220 raise LookupError('master "%s" not found.' % mastername)
125 221
126 script_path = os.path.join(BUILD_ROOT, 'scripts', 'tools', 222 script_path = os.path.join(common.env.Build, 'scripts', 'tools',
127 'dump_master_cfg.py') 223 'dump_master_cfg.py')
128 224
129 with namedTempFile() as fname: 225 master_json = os.path.join(workdir, 'dump_master_cfg.json')
130 dump_cmd = [sys.executable, 226 dump_cmd = [sys.executable,
131 script_path, 227 script_path,
132 master_path, fname] 228 master_path, master_json]
133 proc = subprocess.Popen(dump_cmd, cwd=BUILD_ROOT, stdout=subprocess.PIPE, 229 proc = subprocess.Popen(dump_cmd, cwd=common.env.Build,
134 stderr=subprocess.PIPE) 230 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
135 out, err = proc.communicate() 231 out, err = proc.communicate()
136 exit_code = proc.returncode 232 if proc.returncode:
233 raise LookupError('Failed to get the master config; dump_master_cfg %s'
234 'returned %d):\n%s\n%s\n'% (
235 mastername, proc.returncode, out, err))
137 236
138 if exit_code: 237 with open(master_json, 'rU') as f:
139 raise LookupError('Failed to get the master config; dump_master_cfg %s' 238 config = json.load(f)
140 'returned %d):\n%s\n%s\n'% (
141 mastername, exit_code, out, err))
142
143 with open(fname, 'rU') as f:
144 config = json.load(f)
145 239
146 # Now extract just the factory properties for the requested builder 240 # Now extract just the factory properties for the requested builder
147 # from the master config. 241 # from the master config.
148 props = {} 242 props = {}
149 found = False 243 found = False
150 for builder_dict in config['builders']: 244 for builder_dict in config['builders']:
151 if builder_dict['name'] == buildername: 245 if builder_dict['name'] == buildername:
152 found = True 246 found = True
153 factory_properties = builder_dict['factory']['properties'] 247 factory_properties = builder_dict['factory']['properties']
154 for name, (value, _) in factory_properties.items(): 248 for name, (value, _) in factory_properties.items():
155 props[name] = value 249 props[name] = value
156 250
157 if not found: 251 if not found:
158 raise LookupError('builder "%s" not found on in master "%s"' % 252 raise LookupError('builder "%s" not found on in master "%s"' %
159 (buildername, mastername)) 253 (buildername, mastername))
160 254
161 if 'recipe' not in props: 255 if 'recipe' not in props:
162 raise LookupError('Cannot find recipe for %s on %s' % 256 raise LookupError('Cannot find recipe for %s on %s' %
163 (buildername, mastername)) 257 (buildername, mastername))
164 258
165 return props 259 return props
166 260
167 261
168 def get_args(argv): 262 def get_args(argv):
169 """Process command-line arguments.""" 263 """Process command-line arguments."""
170 parser = argparse.ArgumentParser( 264 parser = argparse.ArgumentParser(
171 description='Entry point for annotated builds.') 265 description='Entry point for annotated builds.')
266 parser.add_argument('-v', '--verbose',
267 action='count', default=0,
268 help='Increase verbosity. This can be specified multiple times.')
269 parser.add_argument('-d', '--dry-run', action='store_true',
270 help='Perform the setup, but refrain from executing the recipe.')
271 parser.add_argument('-l', '--leak', action='store_true',
272 help="Refrain from cleaning up generated artifacts.")
172 parser.add_argument('--build-properties', 273 parser.add_argument('--build-properties',
173 type=json.loads, default={}, 274 type=json.loads, default={},
174 help='build properties in JSON format') 275 help='build properties in JSON format')
175 parser.add_argument('--factory-properties', 276 parser.add_argument('--factory-properties',
176 type=json.loads, default={}, 277 type=json.loads, default={},
177 help='factory properties in JSON format') 278 help='factory properties in JSON format')
178 parser.add_argument('--build-properties-gz', dest='build_properties', 279 parser.add_argument('--build-properties-gz', dest='build_properties',
179 type=chromium_utils.convert_gz_json_type, default={}, 280 type=chromium_utils.convert_gz_json_type, default={},
180 help='build properties in b64 gz JSON format') 281 help='build properties in b64 gz JSON format')
181 parser.add_argument('--factory-properties-gz', dest='factory_properties', 282 parser.add_argument('--factory-properties-gz', dest='factory_properties',
182 type=chromium_utils.convert_gz_json_type, default={}, 283 type=chromium_utils.convert_gz_json_type, default={},
183 help='factory properties in b64 gz JSON format') 284 help='factory properties in b64 gz JSON format')
184 parser.add_argument('--keep-stdin', action='store_true', default=False, 285 parser.add_argument('--keep-stdin', action='store_true', default=False,
185 help='don\'t close stdin when running recipe steps') 286 help='don\'t close stdin when running recipe steps')
186 parser.add_argument('--master-overrides-slave', action='store_true', 287 parser.add_argument('--master-overrides-slave', action='store_true',
187 help='use the property values given on the command line from the master, ' 288 help='use the property values given on the command line from the master, '
188 'not the ones looked up on the slave') 289 'not the ones looked up on the slave')
189 parser.add_argument('--use-factory-properties-from-disk', 290 parser.add_argument('--use-factory-properties-from-disk',
190 action='store_true', default=False, 291 action='store_true', default=False,
191 help='use factory properties loaded from disk on the slave') 292 help='use factory properties loaded from disk on the slave')
293
192 return parser.parse_args(argv) 294 return parser.parse_args(argv)
193 295
194 296
195 def update_scripts(): 297 def update_scripts():
196 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): 298 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'):
197 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') 299 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS')
198 return False 300 return False
199 301
200 stream = annotator.StructuredAnnotationStream() 302 stream = annotator.StructuredAnnotationStream()
201 303
202 with stream.step('update_scripts') as s: 304 with stream.step('update_scripts') as s:
203 gclient_name = 'gclient' 305 gclient_name = 'gclient'
204 if sys.platform.startswith('win'): 306 if sys.platform.startswith('win'):
205 gclient_name += '.bat' 307 gclient_name += '.bat'
206 gclient_path = os.path.join(BUILD_ROOT, '..', 'depot_tools', gclient_name) 308 gclient_path = os.path.join(common.env.Build, '..', 'depot_tools',
309 gclient_name)
207 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose', '--jobs=2'] 310 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose', '--jobs=2']
208 try: 311 try:
209 fd, output_json = tempfile.mkstemp() 312 fd, output_json = tempfile.mkstemp()
210 os.close(fd) 313 os.close(fd)
211 gclient_cmd += ['--output-json', output_json] 314 gclient_cmd += ['--output-json', output_json]
212 except Exception: 315 except Exception:
213 # Super paranoia try block. 316 # Super paranoia try block.
214 output_json = None 317 output_json = None
215 cmd_dict = { 318 cmd_dict = {
216 'name': 'update_scripts', 319 'name': 'update_scripts',
217 'cmd': gclient_cmd, 320 'cmd': gclient_cmd,
218 'cwd': BUILD_ROOT, 321 'cwd': common.env.Build,
219 } 322 }
220 annotator.print_step(cmd_dict, os.environ, stream) 323 annotator.print_step(cmd_dict, os.environ, stream)
221 if subprocess.call(gclient_cmd, cwd=BUILD_ROOT) != 0: 324 rv, _ = _run_command(gclient_cmd, cwd=common.env.Build)
325 if rv != 0:
222 s.step_text('gclient sync failed!') 326 s.step_text('gclient sync failed!')
223 s.step_warnings() 327 s.step_warnings()
224 elif output_json: 328 elif output_json:
225 try: 329 try:
226 with open(output_json, 'r') as f: 330 with open(output_json, 'r') as f:
227 gclient_json = json.load(f) 331 gclient_json = json.load(f)
228 for line in json.dumps( 332 for line in json.dumps(
229 gclient_json, sort_keys=True, 333 gclient_json, sort_keys=True,
230 indent=4, separators=(',', ': ')).splitlines(): 334 indent=4, separators=(',', ': ')).splitlines():
231 s.step_log_line('gclient_json', line) 335 s.step_log_line('gclient_json', line)
232 s.step_log_end('gclient_json') 336 s.step_log_end('gclient_json')
233 revision = gclient_json['solutions']['build/']['revision'] 337 revision = gclient_json['solutions']['build/']['revision']
234 scm = gclient_json['solutions']['build/']['scm'] 338 scm = gclient_json['solutions']['build/']['scm']
235 s.step_text('%s - %s' % (scm, revision)) 339 s.step_text('%s - %s' % (scm, revision))
236 s.set_build_property('build_scm', json.dumps(scm)) 340 s.set_build_property('build_scm', json.dumps(scm))
237 s.set_build_property('build_revision', json.dumps(revision)) 341 s.set_build_property('build_revision', json.dumps(revision))
238 except Exception as e: 342 except Exception as e:
239 s.step_text('Unable to process gclient JSON %s' % repr(e)) 343 s.step_text('Unable to process gclient JSON %s' % repr(e))
240 s.step_warnings() 344 s.step_warnings()
241 finally: 345 finally:
242 try: 346 try:
243 os.remove(output_json) 347 os.remove(output_json)
244 except Exception as e: 348 except Exception as e:
245 print >> sys.stderr, "LEAKED:", output_json, e 349 LOGGER.warning("LEAKED: %s", output_json, exc_info=True)
246 else: 350 else:
247 s.step_text('Unable to get SCM data') 351 s.step_text('Unable to get SCM data')
248 s.step_warnings() 352 s.step_warnings()
249 353
250 os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' 354 os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1'
251 355
252 # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real 356 # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real
253 # annotated_run. 357 # annotated_run.
254 os.environ['PYTHONIOENCODING'] = 'UTF-8' 358 os.environ['PYTHONIOENCODING'] = 'UTF-8'
255 359
256 return True 360 return True
257 361
258 362
259 def clean_old_recipe_engine(): 363 def clean_old_recipe_engine():
260 """Clean stale pycs from the old location of recipe_engine. 364 """Clean stale pycs from the old location of recipe_engine.
261 365
262 This function should only be needed for a little while after the recipe 366 This function should only be needed for a little while after the recipe
263 packages rollout (2015-09-16). 367 packages rollout (2015-09-16).
264 """ 368 """
265 for (dirpath, _, filenames) in os.walk( 369 for (dirpath, _, filenames) in os.walk(
266 os.path.join(BUILD_ROOT, 'third_party', 'recipe_engine')): 370 os.path.join(common.env.Build, 'third_party', 'recipe_engine')):
267 for filename in filenames: 371 for filename in filenames:
268 if filename.endswith('.pyc'): 372 if filename.endswith('.pyc'):
269 path = os.path.join(dirpath, filename) 373 os.remove(os.path.join(dirpath, filename))
270 os.remove(path)
271 374
272 375
273 @contextlib.contextmanager 376 def write_monitoring_event(config, outdir, build_properties):
274 def build_data_directory(): 377 # Ensure that all command components of "run_cmd" are available.
275 """Context manager that creates a build-specific directory. 378 if not config.run_cmd:
379 LOGGER.warning('No run.py is defined for this platform.')
380 return
381 run_cmd_missing = [p for p in config.run_cmd if not os.path.exists(p)]
382 if run_cmd_missing:
383 LOGGER.warning('Unable to find run.py. Some components are missing: %s',
384 run_cmd_missing)
385 return
276 386
277 The directory is wiped when exiting. 387 hostname = socket.getfqdn()
388 if hostname: # just in case getfqdn() returns None.
389 hostname = hostname.split('.')[0]
390 else:
391 hostname = None
278 392
279 Yields: 393 try:
280 build_data (str or None): full path to a writeable directory. Return None if 394 cmd = config.run_cmd + ['infra.tools.send_monitoring_event',
281 no directory can be found or if it's not writeable. 395 '--event-mon-output-file',
282 """ 396 ensure_directory(outdir, 'log_request_proto'),
283 prefix = 'build_data' 397 '--event-mon-run-type', 'file',
284 398 '--event-mon-service-name',
285 # TODO(pgervais): import that from infra_libs.logs instead 399 'buildbot/master/master.%s'
286 if sys.platform.startswith('win'): # pragma: no cover 400 % build_properties.get('mastername', 'UNKNOWN'),
287 DEFAULT_LOG_DIRECTORIES = [ 401 '--build-event-build-name',
288 'E:\\chrome-infra-logs', 402 build_properties.get('buildername', 'UNKNOWN'),
289 'C:\\chrome-infra-logs', 403 '--build-event-build-number',
290 ] 404 str(build_properties.get('buildnumber', 0)),
291 else: 405 '--build-event-build-scheduling-time',
292 DEFAULT_LOG_DIRECTORIES = ['/var/log/chrome-infra'] 406 str(1000*int(build_properties.get('requestedAt', 0))),
293 407 '--build-event-type', 'BUILD',
294 build_data_dir = None 408 '--event-mon-timestamp-kind', 'POINT',
295 for candidate in DEFAULT_LOG_DIRECTORIES: 409 # And use only defaults for credentials.
296 if os.path.isdir(candidate): 410 ]
297 build_data_dir = os.path.join(candidate, prefix) 411 # Add this conditionally so that we get an error in
298 break 412 # send_monitoring_event log files in case it isn't present.
299 413 if hostname:
300 # Remove any leftovers and recreate the dir. 414 cmd += ['--build-event-hostname', hostname]
301 if build_data_dir: 415 _check_command(cmd)
302 print >> sys.stderr, "Creating directory" 416 except Exception:
303 # TODO(pgervais): use infra_libs.rmtree instead. 417 LOGGER.warning("Failed to send monitoring event.", exc_info=True)
304 if os.path.exists(build_data_dir):
305 try:
306 shutil.rmtree(build_data_dir)
307 except Exception as exc:
308 # Catching everything: we don't want to break any builds for that reason
309 print >> sys.stderr, (
310 "FAILURE: path can't be deleted: %s.\n%s" % (build_data_dir, str(exc))
311 )
312 print >> sys.stderr, "Creating directory"
313
314 if not os.path.exists(build_data_dir):
315 try:
316 os.mkdir(build_data_dir)
317 except Exception as exc:
318 print >> sys.stderr, (
319 "FAILURE: directory can't be created: %s.\n%s" %
320 (build_data_dir, str(exc))
321 )
322 build_data_dir = None
323
324 # Under this line build_data_dir should point to an existing empty dir
325 # or be None.
326 yield build_data_dir
327
328 # Clean up after ourselves
329 if build_data_dir:
330 # TODO(pgervais): use infra_libs.rmtree instead.
331 try:
332 shutil.rmtree(build_data_dir)
333 except Exception as exc:
334 # Catching everything: we don't want to break any builds for that reason.
335 print >> sys.stderr, (
336 "FAILURE: path can't be deleted: %s.\n%s" % (build_data_dir, str(exc))
337 )
338 418
339 419
340 def main(argv): 420 def main(argv):
341 opts = get_args(argv) 421 opts = get_args(argv)
342 # TODO(crbug.com/551165): remove flag "factory_properties". 422
343 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or 423 if opts.verbose == 0:
344 bool(opts.factory_properties)) 424 level = logging.INFO
345 properties = get_recipe_properties( 425 else:
346 opts.build_properties, use_factory_properties_from_disk) 426 level = logging.DEBUG
427 logging.getLogger().setLevel(level)
347 428
348 clean_old_recipe_engine() 429 clean_old_recipe_engine()
349 430
350 # Find out if the recipe we intend to run is in build_internal's recipes. If 431 # Enter our runtime environment.
351 # so, use recipes.py from there, otherwise use the one from build. 432 with recipe_tempdir(leak=opts.leak) as tdir:
352 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py' 433 LOGGER.debug('Using temporary directory: [%s].', tdir)
353 if os.path.exists(os.path.join(BUILD_LIMITED_ROOT, 'recipes', recipe_file)):
354 recipe_runner = os.path.join(BUILD_LIMITED_ROOT, 'recipes.py')
355 else:
356 recipe_runner = os.path.join(SCRIPT_PATH, 'recipes.py')
357 434
358 with build_data_directory() as build_data_dir: 435 # Load factory properties and configuration.
359 # Create a LogRequestLite proto containing this build's information. 436 # TODO(crbug.com/551165): remove flag "factory_properties".
360 if build_data_dir: 437 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or
361 properties['build_data_dir'] = build_data_dir 438 bool(opts.factory_properties))
439 properties = get_recipe_properties(
440 tdir, opts.build_properties, use_factory_properties_from_disk)
441 LOGGER.debug('Loaded properties: %s', properties)
362 442
363 hostname = socket.getfqdn() 443 config = get_config()
364 if hostname: # just in case getfqdn() returns None. 444 LOGGER.debug('Loaded runtime configuration: %s', config)
365 hostname = hostname.split('.')[0]
366 else:
367 hostname = None
368 445
369 if RUN_CMD and os.path.exists(RUN_CMD): 446 SCRIPT_PATH = os.path.join(common.env.Build, 'scripts', 'slave')
370 try:
371 cmd = [RUN_CMD, 'infra.tools.send_monitoring_event',
372 '--event-mon-output-file',
373 os.path.join(build_data_dir, 'log_request_proto'),
374 '--event-mon-run-type', 'file',
375 '--event-mon-service-name',
376 'buildbot/master/master.%s'
377 % properties.get('mastername', 'UNKNOWN'),
378 '--build-event-build-name',
379 properties.get('buildername', 'UNKNOWN'),
380 '--build-event-build-number',
381 str(properties.get('buildnumber', 0)),
382 '--build-event-build-scheduling-time',
383 str(1000*int(properties.get('requestedAt', 0))),
384 '--build-event-type', 'BUILD',
385 '--event-mon-timestamp-kind', 'POINT',
386 # And use only defaults for credentials.
387 ]
388 # Add this conditionally so that we get an error in
389 # send_monitoring_event log files in case it isn't present.
390 if hostname:
391 cmd += ['--build-event-hostname', hostname]
392 subprocess.call(cmd)
393 except Exception:
394 print >> sys.stderr, traceback.format_exc()
395 447
396 else: 448 # Find out if the recipe we intend to run is in build_internal's recipes. If
397 print >> sys.stderr, ( 449 # so, use recipes.py from there, otherwise use the one from build.
398 'WARNING: Unable to find run.py at %r, no events will be sent.' 450 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py'
399 % str(RUN_CMD) 451 if common.env.BuildInternal:
400 ) 452 build_limited = os.path.join(common.env.BuildInternal,
453 'scripts', 'slave')
454 if os.path.exists(os.path.join(build_limited, 'recipes', recipe_file)):
455 recipe_runner = os.path.join(build_limited, 'recipes.py')
456 else:
457 recipe_runner = os.path.join(common.env.Build,
iannucci 2015/12/03 00:26:39 move up to 451
458 'scripts', 'slave', 'recipes.py')
401 459
402 with namedTempFile() as props_file: 460 # Setup monitoring directory and send a monitoring event.
403 with open(props_file, 'w') as fh: 461 build_data_dir = ensure_directory(tdir, 'build_data')
404 fh.write(json.dumps(properties)) 462 properties['build_data_dir'] = build_data_dir
405 cmd = [
406 sys.executable, '-u', recipe_runner,
407 'run',
408 '--workdir=%s' % os.getcwd(),
409 '--properties-file=%s' % props_file,
410 properties['recipe'] ]
411 status = subprocess.call(cmd)
412 463
413 # TODO(pgervais): Send events from build_data_dir to the endpoint. 464 # Write our annotated_run.py monitoring event.
465 write_monitoring_event(config, tdir, properties)
466
467 # Dump properties to JSON and build recipe command.
468 props_file = os.path.join(tdir, 'recipe_properties.json')
469 with open(props_file, 'w') as fh:
470 json.dump(properties, fh)
471 cmd = [
472 sys.executable, '-u', recipe_runner,
473 'run',
474 '--workdir=%s' % os.getcwd(),
475 '--properties-file=%s' % props_file,
476 properties['recipe'],
477 ]
478
479 status, _ = _run_command(cmd, dry_run=opts.dry_run)
480
414 return status 481 return status
415 482
483
416 def shell_main(argv): 484 def shell_main(argv):
417 if update_scripts(): 485 if update_scripts():
418 return subprocess.call([sys.executable] + argv) 486 # Re-execute with the updated annotated_run.py.
487 rv, _ = _run_command([sys.executable] + argv)
488 return rv
419 else: 489 else:
420 return main(argv[1:]) 490 return main(argv[1:])
421 491
422 492
423 if __name__ == '__main__': 493 if __name__ == '__main__':
494 logging.basicConfig(level=logging.INFO)
424 sys.exit(shell_main(sys.argv)) 495 sys.exit(shell_main(sys.argv))
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698