OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 import argparse | 6 import argparse |
7 import collections | |
7 import contextlib | 8 import contextlib |
8 import json | 9 import json |
10 import logging | |
9 import os | 11 import os |
12 import platform | |
10 import shutil | 13 import shutil |
11 import socket | 14 import socket |
12 import subprocess | 15 import subprocess |
13 import sys | 16 import sys |
14 import tempfile | 17 import tempfile |
15 import traceback | |
16 | 18 |
19 | |
20 # Install Infra build environment. | |
17 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname( | 21 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname( |
18 os.path.abspath(__file__)))) | 22 os.path.abspath(__file__)))) |
19 sys.path.append(os.path.join(BUILD_ROOT, 'scripts')) | 23 sys.path.insert(0, os.path.join(BUILD_ROOT, 'scripts')) |
20 sys.path.append(os.path.join(BUILD_ROOT, 'third_party')) | 24 import common.env |
25 common.env.Install() | |
21 | 26 |
22 from common import annotator | 27 from common import annotator |
23 from common import chromium_utils | 28 from common import chromium_utils |
24 from common import master_cfg_utils | 29 from common import master_cfg_utils |
30 from slave import gce | |
25 | 31 |
26 SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) | 32 SCRIPT_PATH = os.path.join(common.env.Build, 'scripts', 'slave') |
27 BUILD_LIMITED_ROOT = os.path.join( | 33 BUILD_LIMITED_ROOT = os.path.join(common.env.BuildInternal, 'scripts', 'slave') |
28 os.path.dirname(BUILD_ROOT), 'build_internal', 'scripts', 'slave') | |
29 | 34 |
30 PACKAGE_CFG = os.path.join( | 35 # Logging instance. |
31 os.path.dirname(os.path.dirname(SCRIPT_PATH)), | 36 LOGGER = logging.getLogger('annotated_run') |
32 'infra', 'config', 'recipes.cfg') | |
33 | 37 |
34 if sys.platform.startswith('win'): | 38 |
35 # TODO(pgervais): add windows support | 39 # RecipeRuntime will probe this for values. |
36 # QQ: Where is infra/run.py on windows machines? | 40 # - First, (system, platform) |
37 RUN_CMD = None | 41 # - Then, (system,) |
38 else: | 42 # - Finally, (), |
39 RUN_CMD = os.path.join('/', 'opt', 'infra-python', 'run.py') | 43 PLATFORM_CONFIG = { |
44 # All systems. | |
45 (): {}, | |
46 | |
47 # Linux | |
48 ('Linux',): { | |
49 'run_cmd': '/opt/infra-python/run.py', | |
50 }, | |
51 | |
52 # Mac OSX | |
53 ('Darwin',): { | |
54 'run_cmd': '/opt/infra-python/run.py', | |
55 }, | |
56 | |
57 # Windows | |
58 ('Windows',): {}, | |
pgervais
2015/12/02 19:02:06
While you're at it, could you integrate this CL?ht
| |
59 } | |
60 | |
61 | |
62 # Config is the runtime configuration used by `annotated_run.py` to bootstrap | |
63 # the recipe engine. | |
64 Config = collections.namedtuple('Config', ( | |
65 'run_cmd', | |
66 )) | |
67 | |
68 | |
69 def get_config(): | |
70 """Returns (Config): The constructed Config object. | |
71 | |
72 The Config object is constructed from: | |
73 - Cascading the PLATFORM_CONFIG fields together based on current | |
74 OS/Architecture. | |
75 | |
76 Raises: | |
77 KeyError: if a required configuration key/parameter is not available. | |
78 """ | |
79 # Cascade the platform configuration. | |
80 p = (platform.system(), platform.processor()) | |
81 platform_config = {} | |
82 for i in xrange(len(p)+1): | |
83 platform_config.update(PLATFORM_CONFIG.get(p[:i], {})) | |
84 | |
85 # Construct runtime configuration. | |
86 return Config( | |
87 run_cmd=platform_config.get('run_cmd'), | |
88 ) | |
89 | |
90 | |
91 def ensure_directory(*path): | |
92 path = os.path.join(*path) | |
93 if not os.path.isdir(path): | |
94 os.makedirs(path) | |
95 return path | |
96 | |
97 | |
98 def _run_command(cmd, **kwargs): | |
99 if kwargs.pop('dry_run', False): | |
100 LOGGER.info('(Dry Run) Would have executed command: %s', cmd) | |
101 return 0, '' | |
102 | |
103 LOGGER.debug('Executing command: %s', cmd) | |
104 proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT) | |
105 stdout, _ = proc.communicate() | |
106 | |
107 LOGGER.debug('Process [%s] returned [%d] with output:\n%s', | |
108 cmd, proc.returncode, stdout) | |
109 return proc.returncode, stdout | |
110 | |
111 | |
112 def _check_command(*args, **kwargs): | |
113 rv, stdout = _run_command(args, **kwargs) | |
114 if rv != 0: | |
115 raise subprocess.CalledProcessError(rv, args, output=stdout) | |
116 return stdout | |
117 | |
40 | 118 |
41 @contextlib.contextmanager | 119 @contextlib.contextmanager |
42 def namedTempFile(): | 120 def recipe_tempdir(root=None, leak=False): |
43 fd, name = tempfile.mkstemp() | 121 """Creates a temporary recipe-local working directory and yields it. |
44 os.close(fd) # let the exceptions fly | 122 |
123 This creates a temporary directory for this annotation run that is | |
124 automatically cleaned up. It returns the directory. | |
125 | |
126 Args: | |
127 root (str/None): If not None, the root directory. Otherwise, |os.cwd| will | |
128 be used. | |
129 leak (bool): If true, don't clean up the temporary directory on exit. | |
130 """ | |
131 basedir = ensure_directory((root or os.getcwd()), '.recipe_runtime') | |
45 try: | 132 try: |
46 yield name | 133 tdir = tempfile.mkdtemp(dir=basedir) |
134 yield tdir | |
47 finally: | 135 finally: |
48 try: | 136 if basedir and os.path.isdir(basedir): |
49 os.remove(name) | 137 if not leak: |
50 except OSError as e: | 138 LOGGER.debug('Cleaning up temporary directory [%s].', basedir) |
51 print >> sys.stderr, "LEAK: %s: %s" % (name, e) | 139 try: |
140 chromium_utils.RemoveDirectory(basedir) | |
141 except Exception: | |
142 LOGGER.exception('Failed to clean up temporary directory [%s].', | |
143 basedir) | |
144 else: | |
145 LOGGER.warning('(--leak) Leaking temporary directory [%s].', basedir) | |
52 | 146 |
53 | 147 |
54 def get_recipe_properties(build_properties, use_factory_properties_from_disk): | 148 def get_recipe_properties(workdir, build_properties, |
149 use_factory_properties_from_disk): | |
55 """Constructs the recipe's properties from buildbot's properties. | 150 """Constructs the recipe's properties from buildbot's properties. |
56 | 151 |
57 This retrieves the current factory properties from the master_config | 152 This retrieves the current factory properties from the master_config |
58 in the slave's checkout (no factory properties are handed to us from the | 153 in the slave's checkout (no factory properties are handed to us from the |
59 master), and merges in the build properties. | 154 master), and merges in the build properties. |
60 | 155 |
61 Using the values from the checkout allows us to do things like change | 156 Using the values from the checkout allows us to do things like change |
62 the recipe and other factory properties for a builder without needing | 157 the recipe and other factory properties for a builder without needing |
63 a master restart. | 158 a master restart. |
64 | 159 |
65 As the build properties doesn't include the factory properties, we would: | 160 As the build properties doesn't include the factory properties, we would: |
66 1. Load factory properties from checkout on the slave. | 161 1. Load factory properties from checkout on the slave. |
67 2. Override the factory properties with the build properties. | 162 2. Override the factory properties with the build properties. |
68 3. Set the factory-only properties as build properties using annotation so | 163 3. Set the factory-only properties as build properties using annotation so |
69 that they will show up on the build page. | 164 that they will show up on the build page. |
70 """ | 165 """ |
71 if not use_factory_properties_from_disk: | 166 if not use_factory_properties_from_disk: |
72 return build_properties | 167 return build_properties |
73 | 168 |
74 stream = annotator.StructuredAnnotationStream() | 169 stream = annotator.StructuredAnnotationStream() |
75 with stream.step('setup_properties') as s: | 170 with stream.step('setup_properties') as s: |
76 factory_properties = {} | 171 factory_properties = {} |
77 | 172 |
78 mastername = build_properties.get('mastername') | 173 mastername = build_properties.get('mastername') |
79 buildername = build_properties.get('buildername') | 174 buildername = build_properties.get('buildername') |
80 if mastername and buildername: | 175 if mastername and buildername: |
81 # Load factory properties from tip-of-tree checkout on the slave builder. | 176 # Load factory properties from tip-of-tree checkout on the slave builder. |
82 factory_properties = get_factory_properties_from_disk( | 177 factory_properties = get_factory_properties_from_disk( |
83 mastername, buildername) | 178 workdir, mastername, buildername) |
84 | 179 |
85 # Check conflicts between factory properties and build properties. | 180 # Check conflicts between factory properties and build properties. |
86 conflicting_properties = {} | 181 conflicting_properties = {} |
87 for name, value in factory_properties.items(): | 182 for name, value in factory_properties.items(): |
88 if not build_properties.has_key(name) or build_properties[name] == value: | 183 if not build_properties.has_key(name) or build_properties[name] == value: |
89 continue | 184 continue |
90 conflicting_properties[name] = (value, build_properties[name]) | 185 conflicting_properties[name] = (value, build_properties[name]) |
91 | 186 |
92 if conflicting_properties: | 187 if conflicting_properties: |
93 s.step_text( | 188 s.step_text( |
94 '<br/>detected %d conflict[s] between factory and build properties' | 189 '<br/>detected %d conflict[s] between factory and build properties' |
95 % len(conflicting_properties)) | 190 % len(conflicting_properties)) |
96 print 'Conflicting factory and build properties:' | 191 |
97 for name, (factory_value, build_value) in conflicting_properties.items(): | 192 conflicts = [' "%s": factory: "%s", build: "%s"' % ( |
98 print (' "%s": factory: "%s", build: "%s"' % ( | |
99 name, | 193 name, |
100 '<unset>' if (factory_value is None) else factory_value, | 194 '<unset>' if (fv is None) else fv, |
101 '<unset>' if (build_value is None) else build_value)) | 195 '<unset>' if (bv is None) else bv) |
102 print "Will use the values from build properties." | 196 for name, (fv, bv) in conflicting_properties.items()] |
197 LOGGER.warning('Conflicting factory and build properties:\n%s', | |
198 '\n'.join(conflicts)) | |
199 LOGGER.warning("Will use the values from build properties.") | |
103 | 200 |
104 # Figure out the factory-only properties and set them as build properties so | 201 # Figure out the factory-only properties and set them as build properties so |
105 # that they will show up on the build page. | 202 # that they will show up on the build page. |
106 for name, value in factory_properties.items(): | 203 for name, value in factory_properties.items(): |
107 if not build_properties.has_key(name): | 204 if not build_properties.has_key(name): |
108 s.set_build_property(name, json.dumps(value)) | 205 s.set_build_property(name, json.dumps(value)) |
109 | 206 |
110 # Build properties override factory properties. | 207 # Build properties override factory properties. |
111 properties = factory_properties.copy() | 208 properties = factory_properties.copy() |
112 properties.update(build_properties) | 209 properties.update(build_properties) |
113 return properties | 210 return properties |
114 | 211 |
115 | 212 |
116 def get_factory_properties_from_disk(mastername, buildername): | 213 def get_factory_properties_from_disk(workdir, mastername, buildername): |
117 master_list = master_cfg_utils.GetMasters() | 214 master_list = master_cfg_utils.GetMasters() |
118 master_path = None | 215 master_path = None |
119 for name, path in master_list: | 216 for name, path in master_list: |
120 if name == mastername: | 217 if name == mastername: |
121 master_path = path | 218 master_path = path |
122 | 219 |
123 if not master_path: | 220 if not master_path: |
124 raise LookupError('master "%s" not found.' % mastername) | 221 raise LookupError('master "%s" not found.' % mastername) |
125 | 222 |
126 script_path = os.path.join(BUILD_ROOT, 'scripts', 'tools', | 223 script_path = os.path.join(BUILD_ROOT, 'scripts', 'tools', |
127 'dump_master_cfg.py') | 224 'dump_master_cfg.py') |
128 | 225 |
129 with namedTempFile() as fname: | 226 master_json = os.path.join(workdir, 'dump_master_cfg.json') |
130 dump_cmd = [sys.executable, | 227 dump_cmd = [sys.executable, |
131 script_path, | 228 script_path, |
132 master_path, fname] | 229 master_path, master_json] |
133 proc = subprocess.Popen(dump_cmd, cwd=BUILD_ROOT, stdout=subprocess.PIPE, | 230 proc = subprocess.Popen(dump_cmd, cwd=BUILD_ROOT, stdout=subprocess.PIPE, |
134 stderr=subprocess.PIPE) | 231 stderr=subprocess.PIPE) |
135 out, err = proc.communicate() | 232 out, err = proc.communicate() |
136 exit_code = proc.returncode | 233 if proc.returncode: |
234 raise LookupError('Failed to get the master config; dump_master_cfg %s' | |
235 'returned %d):\n%s\n%s\n'% ( | |
236 mastername, proc.returncode, out, err)) | |
137 | 237 |
138 if exit_code: | 238 with open(master_json, 'rU') as f: |
139 raise LookupError('Failed to get the master config; dump_master_cfg %s' | 239 config = json.load(f) |
140 'returned %d):\n%s\n%s\n'% ( | |
141 mastername, exit_code, out, err)) | |
142 | |
143 with open(fname, 'rU') as f: | |
144 config = json.load(f) | |
145 | 240 |
146 # Now extract just the factory properties for the requested builder | 241 # Now extract just the factory properties for the requested builder |
147 # from the master config. | 242 # from the master config. |
148 props = {} | 243 props = {} |
149 found = False | 244 found = False |
150 for builder_dict in config['builders']: | 245 for builder_dict in config['builders']: |
151 if builder_dict['name'] == buildername: | 246 if builder_dict['name'] == buildername: |
152 found = True | 247 found = True |
153 factory_properties = builder_dict['factory']['properties'] | 248 factory_properties = builder_dict['factory']['properties'] |
154 for name, (value, _) in factory_properties.items(): | 249 for name, (value, _) in factory_properties.items(): |
155 props[name] = value | 250 props[name] = value |
156 | 251 |
157 if not found: | 252 if not found: |
158 raise LookupError('builder "%s" not found on in master "%s"' % | 253 raise LookupError('builder "%s" not found on in master "%s"' % |
159 (buildername, mastername)) | 254 (buildername, mastername)) |
160 | 255 |
161 if 'recipe' not in props: | 256 if 'recipe' not in props: |
162 raise LookupError('Cannot find recipe for %s on %s' % | 257 raise LookupError('Cannot find recipe for %s on %s' % |
163 (buildername, mastername)) | 258 (buildername, mastername)) |
164 | 259 |
165 return props | 260 return props |
166 | 261 |
167 | 262 |
168 def get_args(argv): | 263 def get_args(argv): |
169 """Process command-line arguments.""" | 264 """Process command-line arguments.""" |
170 parser = argparse.ArgumentParser( | 265 parser = argparse.ArgumentParser( |
171 description='Entry point for annotated builds.') | 266 description='Entry point for annotated builds.') |
267 parser.add_argument('-v', '--verbose', | |
268 action='count', default=0, | |
269 help='Increase verbosity. This can be specified multiple times.') | |
270 parser.add_argument('-d', '--dry-run', action='store_true', | |
271 help='Perform the setup, but refrain from executing the recipe.') | |
272 parser.add_argument('-l', '--leak', action='store_true', | |
273 help="Refrain from cleaning up generated artifacts.") | |
172 parser.add_argument('--build-properties', | 274 parser.add_argument('--build-properties', |
173 type=json.loads, default={}, | 275 type=json.loads, default={}, |
174 help='build properties in JSON format') | 276 help='build properties in JSON format') |
175 parser.add_argument('--factory-properties', | 277 parser.add_argument('--factory-properties', |
176 type=json.loads, default={}, | 278 type=json.loads, default={}, |
177 help='factory properties in JSON format') | 279 help='factory properties in JSON format') |
178 parser.add_argument('--build-properties-gz', dest='build_properties', | 280 parser.add_argument('--build-properties-gz', dest='build_properties', |
179 type=chromium_utils.convert_gz_json_type, default={}, | 281 type=chromium_utils.convert_gz_json_type, default={}, |
180 help='build properties in b64 gz JSON format') | 282 help='build properties in b64 gz JSON format') |
181 parser.add_argument('--factory-properties-gz', dest='factory_properties', | 283 parser.add_argument('--factory-properties-gz', dest='factory_properties', |
182 type=chromium_utils.convert_gz_json_type, default={}, | 284 type=chromium_utils.convert_gz_json_type, default={}, |
183 help='factory properties in b64 gz JSON format') | 285 help='factory properties in b64 gz JSON format') |
184 parser.add_argument('--keep-stdin', action='store_true', default=False, | 286 parser.add_argument('--keep-stdin', action='store_true', default=False, |
185 help='don\'t close stdin when running recipe steps') | 287 help='don\'t close stdin when running recipe steps') |
186 parser.add_argument('--master-overrides-slave', action='store_true', | 288 parser.add_argument('--master-overrides-slave', action='store_true', |
187 help='use the property values given on the command line from the master, ' | 289 help='use the property values given on the command line from the master, ' |
188 'not the ones looked up on the slave') | 290 'not the ones looked up on the slave') |
189 parser.add_argument('--use-factory-properties-from-disk', | 291 parser.add_argument('--use-factory-properties-from-disk', |
190 action='store_true', default=False, | 292 action='store_true', default=False, |
191 help='use factory properties loaded from disk on the slave') | 293 help='use factory properties loaded from disk on the slave') |
294 | |
192 return parser.parse_args(argv) | 295 return parser.parse_args(argv) |
193 | 296 |
194 | 297 |
195 def update_scripts(): | 298 def update_scripts(): |
196 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): | 299 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): |
197 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') | 300 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') |
198 return False | 301 return False |
199 | 302 |
200 stream = annotator.StructuredAnnotationStream() | 303 stream = annotator.StructuredAnnotationStream() |
201 | 304 |
202 with stream.step('update_scripts') as s: | 305 with stream.step('update_scripts') as s: |
203 gclient_name = 'gclient' | 306 gclient_name = 'gclient' |
204 if sys.platform.startswith('win'): | 307 if sys.platform.startswith('win'): |
205 gclient_name += '.bat' | 308 gclient_name += '.bat' |
206 gclient_path = os.path.join(BUILD_ROOT, '..', 'depot_tools', gclient_name) | 309 gclient_path = os.path.join(BUILD_ROOT, '..', 'depot_tools', gclient_name) |
207 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose'] | 310 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose'] |
208 try: | 311 try: |
209 fd, output_json = tempfile.mkstemp() | 312 fd, output_json = tempfile.mkstemp() |
210 os.close(fd) | 313 os.close(fd) |
211 gclient_cmd += ['--output-json', output_json] | 314 gclient_cmd += ['--output-json', output_json] |
212 except Exception: | 315 except Exception: |
213 # Super paranoia try block. | 316 # Super paranoia try block. |
214 output_json = None | 317 output_json = None |
215 cmd_dict = { | 318 cmd_dict = { |
216 'name': 'update_scripts', | 319 'name': 'update_scripts', |
217 'cmd': gclient_cmd, | 320 'cmd': gclient_cmd, |
218 'cwd': BUILD_ROOT, | 321 'cwd': BUILD_ROOT, |
219 } | 322 } |
220 annotator.print_step(cmd_dict, os.environ, stream) | 323 annotator.print_step(cmd_dict, os.environ, stream) |
221 if subprocess.call(gclient_cmd, cwd=BUILD_ROOT) != 0: | 324 rv, _ = _run_command(gclient_cmd, cwd=BUILD_ROOT) |
325 if rv != 0: | |
222 s.step_text('gclient sync failed!') | 326 s.step_text('gclient sync failed!') |
223 s.step_warnings() | 327 s.step_warnings() |
224 elif output_json: | 328 elif output_json: |
225 try: | 329 try: |
226 with open(output_json, 'r') as f: | 330 with open(output_json, 'r') as f: |
227 gclient_json = json.load(f) | 331 gclient_json = json.load(f) |
228 for line in json.dumps( | 332 for line in json.dumps( |
229 gclient_json, sort_keys=True, | 333 gclient_json, sort_keys=True, |
230 indent=4, separators=(',', ': ')).splitlines(): | 334 indent=4, separators=(',', ': ')).splitlines(): |
231 s.step_log_line('gclient_json', line) | 335 s.step_log_line('gclient_json', line) |
232 s.step_log_end('gclient_json') | 336 s.step_log_end('gclient_json') |
233 revision = gclient_json['solutions']['build/']['revision'] | 337 revision = gclient_json['solutions']['build/']['revision'] |
234 scm = gclient_json['solutions']['build/']['scm'] | 338 scm = gclient_json['solutions']['build/']['scm'] |
235 s.step_text('%s - %s' % (scm, revision)) | 339 s.step_text('%s - %s' % (scm, revision)) |
236 s.set_build_property('build_scm', json.dumps(scm)) | 340 s.set_build_property('build_scm', json.dumps(scm)) |
237 s.set_build_property('build_revision', json.dumps(revision)) | 341 s.set_build_property('build_revision', json.dumps(revision)) |
238 except Exception as e: | 342 except Exception as e: |
239 s.step_text('Unable to process gclient JSON %s' % repr(e)) | 343 s.step_text('Unable to process gclient JSON %s' % repr(e)) |
240 s.step_warnings() | 344 s.step_warnings() |
241 finally: | 345 finally: |
242 try: | 346 try: |
243 os.remove(output_json) | 347 os.remove(output_json) |
244 except Exception as e: | 348 except Exception as e: |
245 print >> sys.stderr, "LEAKED:", output_json, e | 349 LOGGER.warning("LEAKED: %s", output_json, exc_info=True) |
246 else: | 350 else: |
247 s.step_text('Unable to get SCM data') | 351 s.step_text('Unable to get SCM data') |
248 s.step_warnings() | 352 s.step_warnings() |
249 | 353 |
250 os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' | 354 os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' |
251 | 355 |
252 # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real | 356 # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real |
253 # annotated_run. | 357 # annotated_run. |
254 os.environ['PYTHONIOENCODING'] = 'UTF-8' | 358 os.environ['PYTHONIOENCODING'] = 'UTF-8' |
255 | 359 |
256 return True | 360 return True |
257 | 361 |
258 | 362 |
259 def clean_old_recipe_engine(): | 363 def clean_old_recipe_engine(): |
260 """Clean stale pycs from the old location of recipe_engine. | 364 """Clean stale pycs from the old location of recipe_engine. |
261 | 365 |
262 This function should only be needed for a little while after the recipe | 366 This function should only be needed for a little while after the recipe |
263 packages rollout (2015-09-16). | 367 packages rollout (2015-09-16). |
264 """ | 368 """ |
265 for (dirpath, _, filenames) in os.walk( | 369 for (dirpath, _, filenames) in os.walk( |
266 os.path.join(BUILD_ROOT, 'third_party', 'recipe_engine')): | 370 os.path.join(BUILD_ROOT, 'third_party', 'recipe_engine')): |
267 for filename in filenames: | 371 for filename in filenames: |
268 if filename.endswith('.pyc'): | 372 if filename.endswith('.pyc'): |
269 path = os.path.join(dirpath, filename) | 373 os.remove(os.path.join(dirpath, filename)) |
270 os.remove(path) | |
271 | 374 |
272 | 375 |
273 @contextlib.contextmanager | 376 def write_monitoring_event(config, outdir, build_properties): |
274 def build_data_directory(): | 377 if not (config.run_cmd and os.path.exists(config.run_cmd)): |
275 """Context manager that creates a build-specific directory. | 378 LOGGER.warning('Unable to find run.py at %s, no events will be sent.', |
379 config.run_cmd) | |
380 return | |
276 | 381 |
277 The directory is wiped when exiting. | 382 hostname = socket.getfqdn() |
383 if hostname: # just in case getfqdn() returns None. | |
384 hostname = hostname.split('.')[0] | |
385 else: | |
386 hostname = None | |
278 | 387 |
279 Yields: | 388 try: |
280 build_data (str or None): full path to a writeable directory. Return None if | 389 cmd = [config.run_cmd, 'infra.tools.send_monitoring_event', |
281 no directory can be found or if it's not writeable. | 390 '--event-mon-output-file', |
282 """ | 391 ensure_directory(outdir, 'log_request_proto'), |
283 prefix = 'build_data' | 392 '--event-mon-run-type', 'file', |
284 | 393 '--event-mon-service-name', |
285 # TODO(pgervais): import that from infra_libs.logs instead | 394 'buildbot/master/master.%s' |
286 if sys.platform.startswith('win'): # pragma: no cover | 395 % build_properties.get('mastername', 'UNKNOWN'), |
287 DEFAULT_LOG_DIRECTORIES = [ | 396 '--build-event-build-name', |
288 'E:\\chrome-infra-logs', | 397 build_properties.get('buildername', 'UNKNOWN'), |
289 'C:\\chrome-infra-logs', | 398 '--build-event-build-number', |
290 ] | 399 str(build_properties.get('buildnumber', 0)), |
291 else: | 400 '--build-event-build-scheduling-time', |
292 DEFAULT_LOG_DIRECTORIES = ['/var/log/chrome-infra'] | 401 str(1000*int(build_properties.get('requestedAt', 0))), |
293 | 402 '--build-event-type', 'BUILD', |
294 build_data_dir = None | 403 '--event-mon-timestamp-kind', 'POINT', |
295 for candidate in DEFAULT_LOG_DIRECTORIES: | 404 # And use only defaults for credentials. |
296 if os.path.isdir(candidate): | 405 ] |
297 build_data_dir = os.path.join(candidate, prefix) | 406 # Add this conditionally so that we get an error in |
298 break | 407 # send_monitoring_event log files in case it isn't present. |
299 | 408 if hostname: |
300 # Remove any leftovers and recreate the dir. | 409 cmd += ['--build-event-hostname', hostname] |
301 if build_data_dir: | 410 _check_command(cmd) |
302 print >> sys.stderr, "Creating directory" | 411 except Exception: |
303 # TODO(pgervais): use infra_libs.rmtree instead. | 412 LOGGER.warning("Failed to send monitoring event.", exc_info=True) |
304 if os.path.exists(build_data_dir): | |
305 try: | |
306 shutil.rmtree(build_data_dir) | |
307 except Exception as exc: | |
308 # Catching everything: we don't want to break any builds for that reason | |
309 print >> sys.stderr, ( | |
310 "FAILURE: path can't be deleted: %s.\n%s" % (build_data_dir, str(exc)) | |
311 ) | |
312 print >> sys.stderr, "Creating directory" | |
313 | |
314 if not os.path.exists(build_data_dir): | |
315 try: | |
316 os.mkdir(build_data_dir) | |
317 except Exception as exc: | |
318 print >> sys.stderr, ( | |
319 "FAILURE: directory can't be created: %s.\n%s" % | |
320 (build_data_dir, str(exc)) | |
321 ) | |
322 build_data_dir = None | |
323 | |
324 # Under this line build_data_dir should point to an existing empty dir | |
325 # or be None. | |
326 yield build_data_dir | |
327 | |
328 # Clean up after ourselves | |
329 if build_data_dir: | |
330 # TODO(pgervais): use infra_libs.rmtree instead. | |
331 try: | |
332 shutil.rmtree(build_data_dir) | |
333 except Exception as exc: | |
334 # Catching everything: we don't want to break any builds for that reason. | |
335 print >> sys.stderr, ( | |
336 "FAILURE: path can't be deleted: %s.\n%s" % (build_data_dir, str(exc)) | |
337 ) | |
338 | 413 |
339 | 414 |
340 def main(argv): | 415 def main(argv): |
341 opts = get_args(argv) | 416 opts = get_args(argv) |
342 # TODO(crbug.com/551165): remove flag "factory_properties". | 417 |
343 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or | 418 if opts.verbose == 0: |
344 bool(opts.factory_properties)) | 419 level = logging.INFO |
345 properties = get_recipe_properties( | 420 else: |
346 opts.build_properties, use_factory_properties_from_disk) | 421 level = logging.DEBUG |
422 logging.getLogger().setLevel(level) | |
347 | 423 |
348 clean_old_recipe_engine() | 424 clean_old_recipe_engine() |
349 | 425 |
350 # Find out if the recipe we intend to run is in build_internal's recipes. If | 426 # Enter our runtime environment. |
351 # so, use recipes.py from there, otherwise use the one from build. | 427 with recipe_tempdir(leak=opts.leak) as tdir: |
352 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py' | 428 LOGGER.debug('Using temporary directory: [%s].', tdir) |
353 if os.path.exists(os.path.join(BUILD_LIMITED_ROOT, 'recipes', recipe_file)): | |
354 recipe_runner = os.path.join(BUILD_LIMITED_ROOT, 'recipes.py') | |
355 else: | |
356 recipe_runner = os.path.join(SCRIPT_PATH, 'recipes.py') | |
357 | 429 |
358 with build_data_directory() as build_data_dir: | 430 # Load factory properties and configuration. |
359 # Create a LogRequestLite proto containing this build's information. | 431 # TODO(crbug.com/551165): remove flag "factory_properties". |
360 if build_data_dir: | 432 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or |
361 properties['build_data_dir'] = build_data_dir | 433 bool(opts.factory_properties)) |
434 properties = get_recipe_properties( | |
435 tdir, opts.build_properties, use_factory_properties_from_disk) | |
436 LOGGER.debug('Loaded properties: %s', properties) | |
362 | 437 |
363 hostname = socket.getfqdn() | 438 config = get_config(tdir) |
364 if hostname: # just in case getfqdn() returns None. | 439 LOGGER.debug('Loaded runtime configuration: %s', config) |
365 hostname = hostname.split('.')[0] | |
366 else: | |
367 hostname = None | |
368 | 440 |
369 if RUN_CMD and os.path.exists(RUN_CMD): | 441 # Find out if the recipe we intend to run is in build_internal's recipes. If |
370 try: | 442 # so, use recipes.py from there, otherwise use the one from build. |
371 cmd = [RUN_CMD, 'infra.tools.send_monitoring_event', | 443 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py' |
372 '--event-mon-output-file', | 444 if os.path.exists(os.path.join(BUILD_LIMITED_ROOT, 'recipes', recipe_file)): |
373 os.path.join(build_data_dir, 'log_request_proto'), | 445 recipe_runner = os.path.join(BUILD_LIMITED_ROOT, 'recipes.py') |
374 '--event-mon-run-type', 'file', | 446 else: |
375 '--event-mon-service-name', | 447 recipe_runner = os.path.join(SCRIPT_PATH, 'recipes.py') |
376 'buildbot/master/master.%s' | |
377 % properties.get('mastername', 'UNKNOWN'), | |
378 '--build-event-build-name', | |
379 properties.get('buildername', 'UNKNOWN'), | |
380 '--build-event-build-number', | |
381 str(properties.get('buildnumber', 0)), | |
382 '--build-event-build-scheduling-time', | |
383 str(1000*int(properties.get('requestedAt', 0))), | |
384 '--build-event-type', 'BUILD', | |
385 '--event-mon-timestamp-kind', 'POINT', | |
386 # And use only defaults for credentials. | |
387 ] | |
388 # Add this conditionally so that we get an error in | |
389 # send_monitoring_event log files in case it isn't present. | |
390 if hostname: | |
391 cmd += ['--build-event-hostname', hostname] | |
392 subprocess.call(cmd) | |
393 except Exception: | |
394 print >> sys.stderr, traceback.format_exc() | |
395 | 448 |
396 else: | 449 # Setup monitoring directory and send a monitoring event. |
397 print >> sys.stderr, ( | 450 build_data_dir = ensure_directory(tdir, 'build_data') |
398 'WARNING: Unable to find run.py at %r, no events will be sent.' | 451 properties['build_data_dir'] = build_data_dir |
399 % str(RUN_CMD) | |
400 ) | |
401 | 452 |
402 with namedTempFile() as props_file: | 453 # Write our annotated_run.py monitoring event. |
403 with open(props_file, 'w') as fh: | 454 write_monitoring_event(config, tdir, properties) |
404 fh.write(json.dumps(properties)) | |
405 cmd = [ | |
406 sys.executable, '-u', recipe_runner, | |
407 'run', | |
408 '--workdir=%s' % os.getcwd(), | |
409 '--properties-file=%s' % props_file, | |
410 properties['recipe'] ] | |
411 status = subprocess.call(cmd) | |
412 | 455 |
413 # TODO(pgervais): Send events from build_data_dir to the endpoint. | 456 # Dump properties to JSON and build recipe command. |
457 props_file = os.path.join(tdir, 'recipe_properties.json') | |
458 with open(props_file, 'w') as fh: | |
459 json.dump(properties, fh) | |
460 cmd = [ | |
461 sys.executable, '-u', recipe_runner, | |
462 'run', | |
463 '--workdir=%s' % os.getcwd(), | |
464 '--properties-file=%s' % props_file, | |
465 properties['recipe'], | |
466 ] | |
467 | |
468 status, _ = _run_command(cmd, dry_run=opts.dry_run) | |
469 | |
414 return status | 470 return status |
415 | 471 |
472 | |
416 def shell_main(argv): | 473 def shell_main(argv): |
417 if update_scripts(): | 474 if update_scripts(): |
418 return subprocess.call([sys.executable] + argv) | 475 # Re-execute with the updated annotated_run.py. |
476 rv, _ = _run_command([sys.executable] + argv) | |
477 return rv | |
419 else: | 478 else: |
420 return main(argv[1:]) | 479 return main(argv[1:]) |
421 | 480 |
422 | 481 |
423 if __name__ == '__main__': | 482 if __name__ == '__main__': |
483 logging.basicConfig(level=logging.INFO) | |
424 sys.exit(shell_main(sys.argv)) | 484 sys.exit(shell_main(sys.argv)) |
OLD | NEW |