OLD | NEW |
(Empty) | |
| 1 #!/usr/bin/python |
| 2 # Copyright (c) 2012 The Native Client Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. |
| 5 |
| 6 import codecs |
| 7 import hashlib |
| 8 import json |
| 9 import math |
| 10 import os |
| 11 import shutil |
| 12 import struct |
| 13 import subprocess |
| 14 import sys |
| 15 import threading |
| 16 import time |
| 17 import zipfile |
| 18 |
| 19 SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) |
| 20 TESTS_DIR = os.path.dirname(SCRIPT_DIR) |
| 21 NACL_DIR = os.path.dirname(TESTS_DIR) |
| 22 |
| 23 # Imports from the build directory. |
| 24 sys.path.insert(0, os.path.join(NACL_DIR, 'build')) |
| 25 from download_utils import RemoveDir |
| 26 |
| 27 |
| 28 class DownloadError(Exception): |
| 29 """Indicates a download failed.""" |
| 30 pass |
| 31 |
| 32 |
| 33 class FailedTests(Exception): |
| 34 """Indicates a test run failed.""" |
| 35 pass |
| 36 |
| 37 |
| 38 def GsutilCopySilent(src, dst): |
| 39 """Invoke gsutil cp, swallowing the output, with retry. |
| 40 |
| 41 Args: |
| 42 src: src url. |
| 43 dst: dst path. |
| 44 """ |
| 45 env = os.environ.copy() |
| 46 env['PATH'] = '/b/build/scripts/slave' + os.pathsep + env['PATH'] |
| 47 # Retry to compensate for storage flake. |
| 48 for attempt in range(3): |
| 49 process = subprocess.Popen( |
| 50 ['gsutil', 'cp', src, dst], |
| 51 env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
| 52 process_stdout, process_stderr = process.communicate() |
| 53 if process.returncode == 0: |
| 54 return |
| 55 time.sleep(math.pow(2, attempt + 1) * 5) |
| 56 raise DownloadError( |
| 57 'Unexpected return code: %s\n' |
| 58 '>>> STDOUT\n%s\n' |
| 59 '>>> STDERR\n%s\n' % ( |
| 60 process.returncode, process_stdout, process_stderr)) |
| 61 |
| 62 |
| 63 def DownloadFileFromCorpus(src_path, dst_filename): |
| 64 """Download a file from our snapshot. |
| 65 |
| 66 Args: |
| 67 src_path: datastore relative path to download from. |
| 68 dst_filename: destination filename. |
| 69 """ |
| 70 GsutilCopySilent('gs://nativeclient-snaps/%s' % src_path, dst_filename) |
| 71 |
| 72 |
| 73 def DownloadCorpusCRXList(list_filename): |
| 74 """Download list of all crx files in test corpus. |
| 75 |
| 76 Args: |
| 77 list_filename: destination filename (kept around for debugging). |
| 78 Returns: |
| 79 List of CRXs. |
| 80 """ |
| 81 DownloadFileFromCorpus('naclapps.all', list_filename) |
| 82 fh = open(list_filename) |
| 83 filenames = fh.read().splitlines() |
| 84 fh.close() |
| 85 crx_filenames = [f for f in filenames if f.endswith('.crx')] |
| 86 return crx_filenames |
| 87 |
| 88 |
| 89 def DownloadNexeList(filename): |
| 90 """Download list of NEXEs. |
| 91 |
| 92 Args: |
| 93 filename: destination filename. |
| 94 Returns: |
| 95 List of NEXEs. |
| 96 """ |
| 97 DownloadFileFromCorpus('naclapps.list', filename) |
| 98 fh = open(filename) |
| 99 filenames = fh.read().splitlines() |
| 100 fh.close() |
| 101 return filenames |
| 102 |
| 103 |
| 104 def Sha1Digest(path): |
| 105 """Determine the sha1 hash of a file's contents given its path.""" |
| 106 m = hashlib.sha1() |
| 107 fh = open(path, 'rb') |
| 108 m.update(fh.read()) |
| 109 fh.close() |
| 110 return m.hexdigest() |
| 111 |
| 112 |
| 113 def Hex2Alpha(ch): |
| 114 """Convert a hexadecimal digit from 0-9 / a-f to a-p. |
| 115 |
| 116 Args: |
| 117 ch: a character in 0-9 / a-f. |
| 118 Returns: |
| 119 A character in a-p. |
| 120 """ |
| 121 if ch >= '0' and ch <= '9': |
| 122 return chr(ord(ch) - ord('0') + ord('a')) |
| 123 else: |
| 124 return chr(ord(ch) + 10) |
| 125 |
| 126 |
| 127 def ChromeAppIdFromPath(path): |
| 128 """Converts a path to the corrisponding chrome app id. |
| 129 |
| 130 A stable but semi-undocumented property of unpacked chrome extensions is |
| 131 that they are assigned an app-id based on the first 32 characters of the |
| 132 sha256 digest of the absolute symlink expanded path of the extension. |
| 133 Instead of hexadecimal digits, characters a-p. |
| 134 From discussion with webstore team + inspection of extensions code. |
| 135 Args: |
| 136 path: Path to an unpacked extension. |
| 137 Returns: |
| 138 A 32 character chrome extension app id. |
| 139 """ |
| 140 hasher = hashlib.sha256() |
| 141 hasher.update(os.path.realpath(path)) |
| 142 hexhash = hasher.hexdigest()[:32] |
| 143 return ''.join([Hex2Alpha(ch) for ch in hexhash]) |
| 144 |
| 145 |
| 146 def RunWithTimeout(cmd, timeout): |
| 147 """Run a program, capture output, allowing to run up to a timeout. |
| 148 |
| 149 Args: |
| 150 cmd: List of strings containing command to run. |
| 151 timeout: Duration to timeout. |
| 152 Returns: |
| 153 Tuple of stdout, stderr, returncode. |
| 154 """ |
| 155 process = subprocess.Popen(cmd, |
| 156 stdout=subprocess.PIPE, |
| 157 stderr=subprocess.PIPE) |
| 158 # Put the read in another thread so the buffer doesn't fill up. |
| 159 def GatherOutput(fh, dst): |
| 160 dst.append(fh.read()) |
| 161 # Gather stdout. |
| 162 stdout_output = [] |
| 163 stdout_thread = threading.Thread( |
| 164 target=GatherOutput, args=(process.stdout, stdout_output)) |
| 165 stdout_thread.start() |
| 166 # Gather stderr. |
| 167 stderr_output = [] |
| 168 stderr_thread = threading.Thread( |
| 169 target=GatherOutput, args=(process.stderr, stderr_output)) |
| 170 stderr_thread.start() |
| 171 # Wait for a small span for the app to load. |
| 172 time.sleep(timeout) |
| 173 process.kill() |
| 174 # Join up. |
| 175 process.wait() |
| 176 stdout_thread.join() |
| 177 stderr_thread.join() |
| 178 # Pick out result. |
| 179 return stdout_output[0], stderr_output[0], process.returncode |
| 180 |
| 181 |
| 182 def LoadManifest(app_path): |
| 183 manifest_data = codecs.open(os.path.join(app_path, 'manifest.json'), |
| 184 'r', encoding='utf-8').read() |
| 185 # Ignore CRs as they confuse json.loads. |
| 186 manifest_data = manifest_data.replace('\r', '') |
| 187 # Ignore unicode endian markers as they confuse json.loads. |
| 188 manifest_data = manifest_data.replace(u'\ufeff', '') |
| 189 manifest_data = manifest_data.replace(u'\uffee', '') |
| 190 return json.loads(manifest_data) |
| 191 |
| 192 |
| 193 def CachedPath(cache_dir, filename): |
| 194 """Find the full path of a cached file, a cache root relative path. |
| 195 |
| 196 Args: |
| 197 cache_dir: directory to keep the cache in. |
| 198 filename: filename relative to the top of the download url / cache. |
| 199 Returns: |
| 200 Absolute path of where the file goes in the cache. |
| 201 """ |
| 202 return os.path.join(cache_dir, 'nacl_abi_corpus_cache', filename) |
| 203 |
| 204 |
| 205 def Sha1FromFilename(filename): |
| 206 """Get the expected sha1 of a file path. |
| 207 |
| 208 Throughout we use the convention that files are store to a name of the form: |
| 209 <path_to_file>/<sha1hex>[.<some_extention>] |
| 210 This function extracts the expected sha1. |
| 211 |
| 212 Args: |
| 213 filename: filename to extract. |
| 214 Returns: |
| 215 Excepted sha1. |
| 216 """ |
| 217 return os.path.splitext(os.path.basename(filename))[0] |
| 218 |
| 219 |
| 220 def PrimeCache(cache_dir, filename): |
| 221 """Attempt to add a file to the cache directory if its not already there. |
| 222 |
| 223 Args: |
| 224 cache_dir: directory to keep the cache in. |
| 225 filename: filename relative to the top of the download url / cache. |
| 226 """ |
| 227 dpath = CachedPath(cache_dir, filename) |
| 228 if (not os.path.exists(dpath) or |
| 229 Sha1Digest(dpath) != Sha1FromFilename(filename)): |
| 230 # Try to make the directory, fail is ok, let the download fail instead. |
| 231 try: |
| 232 os.makedirs(os.path.basename(dpath)) |
| 233 except OSError: |
| 234 pass |
| 235 DownloadFileFromCorpus(filename, dpath) |
| 236 |
| 237 |
| 238 def CopyFromCache(cache_dir, filename, dest_filename): |
| 239 """Copy an item from the cache. |
| 240 |
| 241 Args: |
| 242 cache_dir: directory to keep the cache in. |
| 243 filename: filename relative to the top of the download url / cache. |
| 244 dest_filename: location to copy the file to. |
| 245 """ |
| 246 dpath = CachedPath(cache_dir, filename) |
| 247 shutil.copy(dpath, dest_filename) |
| 248 assert Sha1Digest(dest_filename) == Sha1FromFilename(filename) |
| 249 |
| 250 |
| 251 def ExtractFromCache(cache_dir, source, dest): |
| 252 """Extract a crx from the cache. |
| 253 |
| 254 Args: |
| 255 cache_dir: directory to keep the cache in. |
| 256 source: crx file to extract (cache relative). |
| 257 dest: location to extract to. |
| 258 """ |
| 259 # We don't want to accidentally extract two extensions on top of each other. |
| 260 # Assert that the destination doesn't yet exist. |
| 261 assert not os.path.exists(dest) |
| 262 dpath = CachedPath(cache_dir, source) |
| 263 # The cached location must exist. |
| 264 assert os.path.exists(dpath) |
| 265 zf = zipfile.ZipFile(dpath, 'r') |
| 266 os.makedirs(dest) |
| 267 for info in zf.infolist(): |
| 268 # Skip directories. |
| 269 if info.filename.endswith('/'): |
| 270 continue |
| 271 # Do not support absolute paths or paths containing .. |
| 272 if not os.path.isabs(info.filename) or '..' in info.filename: |
| 273 raise Exception('Unacceptable zip filename %s' % info.filename) |
| 274 tpath = os.path.join(dest, info.filename) |
| 275 tdir = os.path.dirname(tpath) |
| 276 if not os.path.exists(tdir): |
| 277 os.makedirs(tdir) |
| 278 zf.extract(info, dest) |
| 279 zf.close() |
| 280 |
| 281 |
| 282 def DefaultCacheDirectory(): |
| 283 """Decide a default cache directory. |
| 284 |
| 285 Decide a default cache directory. |
| 286 Prefer /b (for the bots) |
| 287 Failing that, use scons-out. |
| 288 Failing that, use the current users's home dir. |
| 289 Returns: |
| 290 Default to use for a corpus cache directory. |
| 291 """ |
| 292 default_cache_dir = '/b' |
| 293 if not os.path.isdir(default_cache_dir): |
| 294 default_cache_dir = os.path.join(NACL_DIR, 'scons-out') |
| 295 if not os.path.isdir(default_cache_dir): |
| 296 default_cache_dir = os.path.expanduser('~/') |
| 297 default_cache_dir = os.path.realpath(default_cache_dir) |
| 298 assert os.path.isdir(default_cache_dir) |
| 299 assert os.path.realpath('.') != default_cache_dir |
| 300 return default_cache_dir |
| 301 |
| 302 |
| 303 def NexeArchitecture(filename): |
| 304 """Decide the architecture of a nexe. |
| 305 |
| 306 Args: |
| 307 filename: filename of the nexe. |
| 308 Returns: |
| 309 Architecture string (x86-32 / x86-64) or None. |
| 310 """ |
| 311 fh = open(filename, 'rb') |
| 312 head = fh.read(20) |
| 313 # Must not be too short. |
| 314 if len(head) != 20: |
| 315 print 'ERROR - header too short' |
| 316 return None |
| 317 # Must have ELF header. |
| 318 if head[0:4] != '\x7fELF': |
| 319 print 'ERROR - no elf header' |
| 320 return None |
| 321 # Decode e_machine |
| 322 machine = struct.unpack('<H', head[18:])[0] |
| 323 return { |
| 324 3: 'x86-32', |
| 325 #40: 'arm', # TODO(bradnelson): handle arm. |
| 326 62: 'x86-64', |
| 327 }.get(machine) |
| 328 |
| 329 |
| 330 class Progress(object): |
| 331 def __init__(self, total): |
| 332 self.total = total |
| 333 self.count = 0 |
| 334 self.successes = 0 |
| 335 self.failures = 0 |
| 336 self.start = time.time() |
| 337 |
| 338 def Tally(self): |
| 339 if self.count > 0: |
| 340 tm = time.time() |
| 341 eta = (self.total - self.count) * (tm - self.start) / self.count |
| 342 eta_minutes = int(eta / 60) |
| 343 eta_seconds = int(eta - eta_minutes * 60) |
| 344 eta_str = ' (ETA %d:%02d)' % (eta_minutes, eta_seconds) |
| 345 else: |
| 346 eta_str = '' |
| 347 self.count += 1 |
| 348 print 'Processing %d of %d%s...' % (self.count, self.total, eta_str) |
| 349 |
| 350 def Result(self, success): |
| 351 if success: |
| 352 self.successes += 1 |
| 353 else: |
| 354 self.failures += 1 |
| 355 |
| 356 def Summary(self, warn_only=False): |
| 357 print 'Ran tests on %d of %d items.' % ( |
| 358 self.successes + self.failures, self.total) |
| 359 if self.failures: |
| 360 # Our alternate validators don't currently cover everything. |
| 361 # For now, don't fail just emit warning (and a tally of failures). |
| 362 print '@@@STEP_TEXT@FAILED %d times (%.1f%% are incorrect)@@@' % ( |
| 363 self.failures, self.failures * 100 / (self.successes + self.failures)) |
| 364 if warn_only: |
| 365 print '@@@STEP_WARNINGS@@@' |
| 366 else: |
| 367 raise FailedTests('FAILED %d tests' % self.failures) |
| 368 else: |
| 369 print 'SUCCESS' |
OLD | NEW |