OLD | NEW |
1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """Wrappers for gsutil, for basic interaction with Google Cloud Storage.""" | 5 """Wrappers for gsutil, for basic interaction with Google Cloud Storage.""" |
6 | 6 |
7 import cStringIO | 7 import cStringIO |
| 8 import hashlib |
8 import logging | 9 import logging |
9 import os | 10 import os |
10 import subprocess | 11 import subprocess |
11 import sys | 12 import sys |
12 import tarfile | 13 import tarfile |
13 import urllib2 | 14 import urllib2 |
14 | 15 |
15 from telemetry.core import util | 16 from telemetry.core import util |
16 | 17 |
17 | 18 |
18 _GSUTIL_URL = 'http://storage.googleapis.com/pub/gsutil.tar.gz' | 19 _GSUTIL_URL = 'http://storage.googleapis.com/pub/gsutil.tar.gz' |
19 _DOWNLOAD_PATH = os.path.join(util.GetTelemetryDir(), 'third_party', 'gsutil') | 20 _DOWNLOAD_PATH = os.path.join(util.GetTelemetryDir(), 'third_party', 'gsutil') |
20 | 21 |
21 | 22 |
| 23 class CloudStorageError(Exception): |
| 24 pass |
| 25 |
| 26 |
22 def _DownloadGsutil(): | 27 def _DownloadGsutil(): |
23 logging.info('Downloading gsutil') | 28 logging.info('Downloading gsutil') |
24 response = urllib2.urlopen(_GSUTIL_URL) | 29 response = urllib2.urlopen(_GSUTIL_URL) |
25 with tarfile.open(fileobj=cStringIO.StringIO(response.read())) as tar_file: | 30 with tarfile.open(fileobj=cStringIO.StringIO(response.read())) as tar_file: |
26 tar_file.extractall(os.path.dirname(_DOWNLOAD_PATH)) | 31 tar_file.extractall(os.path.dirname(_DOWNLOAD_PATH)) |
27 logging.info('Downloaded gsutil to %s' % _DOWNLOAD_PATH) | 32 logging.info('Downloaded gsutil to %s' % _DOWNLOAD_PATH) |
28 | 33 |
29 return os.path.join(_DOWNLOAD_PATH, 'gsutil') | 34 return os.path.join(_DOWNLOAD_PATH, 'gsutil') |
30 | 35 |
31 | 36 |
(...skipping 17 matching lines...) Expand all Loading... |
49 return _DownloadGsutil() | 54 return _DownloadGsutil() |
50 | 55 |
51 | 56 |
52 def _RunCommand(args): | 57 def _RunCommand(args): |
53 gsutil_path = _FindGsutil() | 58 gsutil_path = _FindGsutil() |
54 gsutil = subprocess.Popen([sys.executable, gsutil_path] + args, | 59 gsutil = subprocess.Popen([sys.executable, gsutil_path] + args, |
55 stdout=subprocess.PIPE, stderr=subprocess.PIPE) | 60 stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
56 stdout, stderr = gsutil.communicate() | 61 stdout, stderr = gsutil.communicate() |
57 | 62 |
58 if gsutil.returncode: | 63 if gsutil.returncode: |
59 raise Exception(stderr.splitlines()[-1]) | 64 raise CloudStorageError(stderr.splitlines()[-1]) |
60 | 65 |
61 return stdout | 66 return stdout |
62 | 67 |
63 | 68 |
64 def List(bucket): | 69 def List(bucket): |
65 stdout = _RunCommand(['ls', 'gs://%s' % bucket]) | 70 stdout = _RunCommand(['ls', 'gs://%s' % bucket]) |
66 return [url.split('/')[-1] for url in stdout.splitlines()] | 71 return [url.split('/')[-1] for url in stdout.splitlines()] |
67 | 72 |
68 | 73 |
69 def Delete(bucket, remote_path): | 74 def Delete(bucket, remote_path): |
70 url = 'gs://%s/%s' % (bucket, remote_path) | 75 url = 'gs://%s/%s' % (bucket, remote_path) |
71 logging.debug('Deleting %s' % url) | 76 logging.debug('Deleting %s' % url) |
72 _RunCommand(['rm', url]) | 77 _RunCommand(['rm', url]) |
73 | 78 |
74 | 79 |
75 def Get(bucket, remote_path, local_path): | 80 def Get(bucket, remote_path, local_path): |
76 url = 'gs://%s/%s' % (bucket, remote_path) | 81 url = 'gs://%s/%s' % (bucket, remote_path) |
77 logging.debug('Downloading %s to %s' % (url, local_path)) | 82 logging.debug('Downloading %s to %s' % (url, local_path)) |
78 _RunCommand(['cp', url, local_path]) | 83 _RunCommand(['cp', url, local_path]) |
79 | 84 |
80 | 85 |
81 def Insert(bucket, remote_path, local_path): | 86 def Insert(bucket, remote_path, local_path): |
82 url = 'gs://%s/%s' % (bucket, remote_path) | 87 url = 'gs://%s/%s' % (bucket, remote_path) |
83 logging.debug('Uploading %s to %s' % (local_path, url)) | 88 logging.debug('Uploading %s to %s' % (local_path, url)) |
84 _RunCommand(['cp', local_path, url]) | 89 _RunCommand(['cp', local_path, url]) |
| 90 |
| 91 |
| 92 def GetHash(file_path): |
| 93 sha1 = hashlib.sha1() |
| 94 with open(file_path, 'rb') as f: |
| 95 while True: |
| 96 # Read in 1mb chunks, so it doesn't all have to be loaded into memory. |
| 97 chunk = f.read(1024*1024) |
| 98 if not chunk: |
| 99 break |
| 100 sha1.update(chunk) |
| 101 return sha1.hexdigest() |
OLD | NEW |