| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 # pylint: disable=W0212 |
| 6 # pylint: disable=W0223 | 7 # pylint: disable=W0223 |
| 7 # pylint: disable=W0231 | 8 # pylint: disable=W0231 |
| 8 | 9 |
| 9 import binascii | |
| 10 import hashlib | 10 import hashlib |
| 11 import json | 11 import json |
| 12 import logging | 12 import logging |
| 13 import os | 13 import os |
| 14 import random | |
| 15 import shutil | 14 import shutil |
| 16 import StringIO | 15 import StringIO |
| 17 import sys | 16 import sys |
| 18 import tempfile | 17 import tempfile |
| 19 import threading | 18 import threading |
| 20 import unittest | 19 import unittest |
| 20 import urllib |
| 21 import zlib | 21 import zlib |
| 22 | 22 |
| 23 BASE_PATH = os.path.dirname(os.path.abspath(__file__)) | 23 BASE_PATH = os.path.dirname(os.path.abspath(__file__)) |
| 24 ROOT_DIR = os.path.dirname(BASE_PATH) | 24 ROOT_DIR = os.path.dirname(BASE_PATH) |
| 25 sys.path.insert(0, ROOT_DIR) | 25 sys.path.insert(0, ROOT_DIR) |
| 26 | 26 |
| 27 import auto_stub | 27 import auto_stub |
| 28 import isolateserver | 28 import isolateserver |
| 29 | 29 |
| 30 from utils import threading_utils | 30 from utils import threading_utils |
| (...skipping 26 matching lines...) Expand all Loading... |
| 57 for i, n in enumerate(self._requests): | 57 for i, n in enumerate(self._requests): |
| 58 if n[0] == url: | 58 if n[0] == url: |
| 59 _, expected_kwargs, result = self._requests.pop(i) | 59 _, expected_kwargs, result = self._requests.pop(i) |
| 60 self.assertEqual(expected_kwargs, kwargs) | 60 self.assertEqual(expected_kwargs, kwargs) |
| 61 if result is not None: | 61 if result is not None: |
| 62 return isolateserver.net.HttpResponse.get_fake_response(result, url) | 62 return isolateserver.net.HttpResponse.get_fake_response(result, url) |
| 63 return None | 63 return None |
| 64 self.fail('Unknown request %s' % url) | 64 self.fail('Unknown request %s' % url) |
| 65 | 65 |
| 66 | 66 |
| 67 class TestZipCompression(TestCase): |
| 68 """Test zip_compress and zip_decompress generators.""" |
| 69 |
| 70 def test_compress_and_decompress(self): |
| 71 """Test data === decompress(compress(data)).""" |
| 72 original = [str(x) for x in xrange(0, 1000)] |
| 73 processed = isolateserver.zip_decompress( |
| 74 isolateserver.zip_compress(original)) |
| 75 self.assertEqual(''.join(original), ''.join(processed)) |
| 76 |
| 77 def test_zip_bomb(self): |
| 78 """Verify zip_decompress always returns small chunks.""" |
| 79 original = '\x00' * 100000 |
| 80 bomb = ''.join(isolateserver.zip_compress(original)) |
| 81 decompressed = [] |
| 82 chunk_size = 1000 |
| 83 for chunk in isolateserver.zip_decompress([bomb], chunk_size): |
| 84 self.assertLessEqual(len(chunk), chunk_size) |
| 85 decompressed.append(chunk) |
| 86 self.assertEqual(original, ''.join(decompressed)) |
| 87 |
| 88 def test_bad_zip_file(self): |
| 89 """Verify decompressing broken file raises IOError.""" |
| 90 with self.assertRaises(IOError): |
| 91 ''.join(isolateserver.zip_decompress(['Im not a zip file'])) |
| 92 |
| 93 |
| 94 class FakeItem(isolateserver.Item): |
| 95 def __init__(self, data, is_isolated=False): |
| 96 super(FakeItem, self).__init__( |
| 97 ALGO(data).hexdigest(), len(data), is_isolated) |
| 98 self.data = data |
| 99 |
| 100 def content(self, _chunk_size): |
| 101 return [self.data] |
| 102 |
| 103 @property |
| 104 def zipped(self): |
| 105 return zlib.compress(self.data, self.compression_level) |
| 106 |
| 107 |
| 67 class StorageTest(TestCase): | 108 class StorageTest(TestCase): |
| 68 """Tests for Storage methods.""" | 109 """Tests for Storage methods.""" |
| 69 | 110 |
| 70 @staticmethod | 111 @staticmethod |
| 71 def mock_push(side_effect=None): | 112 def mock_push(side_effect=None): |
| 72 """Returns StorageApi subclass with mocked 'push' method.""" | 113 """Returns StorageApi subclass with mocked 'push' method.""" |
| 73 class MockedStorageApi(isolateserver.StorageApi): | 114 class MockedStorageApi(isolateserver.StorageApi): |
| 74 def __init__(self): | 115 def __init__(self): |
| 75 self.pushed = [] | 116 self.pushed = [] |
| 76 def push(self, item, expected_size, content_generator, push_urls=None): | 117 def push(self, item, content, size): |
| 77 self.pushed.append( | 118 self.pushed.append((item, ''.join(content), size)) |
| 78 (item, expected_size, ''.join(content_generator), push_urls)) | |
| 79 if side_effect: | 119 if side_effect: |
| 80 side_effect() | 120 side_effect() |
| 81 return MockedStorageApi() | 121 return MockedStorageApi() |
| 82 | 122 |
| 83 def test_batch_files_for_check(self): | 123 def test_batch_items_for_check(self): |
| 84 items = { | 124 items = [ |
| 85 'foo': {'s': 12}, | 125 isolateserver.Item('foo', 12), |
| 86 'bar': {}, | 126 isolateserver.Item('blow', 0), |
| 87 'blow': {'s': 0}, | 127 isolateserver.Item('bizz', 1222), |
| 88 'bizz': {'s': 1222}, | 128 isolateserver.Item('buzz', 1223), |
| 89 'buzz': {'s': 1223}, | 129 ] |
| 90 } | |
| 91 expected = [ | 130 expected = [ |
| 92 [ | 131 [items[3], items[2], items[0], items[1]], |
| 93 ('buzz', {'s': 1223}), | |
| 94 ('bizz', {'s': 1222}), | |
| 95 ('foo', {'s': 12}), | |
| 96 ('blow', {'s': 0}), | |
| 97 ], | |
| 98 ] | 132 ] |
| 99 batches = list(isolateserver.Storage.batch_files_for_check(items)) | 133 batches = list(isolateserver.Storage.batch_items_for_check(items)) |
| 100 self.assertEqual(batches, expected) | 134 self.assertEqual(batches, expected) |
| 101 | 135 |
| 102 def test_get_missing_files(self): | 136 def test_get_missing_items(self): |
| 103 items = { | 137 items = [ |
| 104 'foo': {'s': 12}, | 138 isolateserver.Item('foo', 12), |
| 105 'bar': {}, | 139 isolateserver.Item('blow', 0), |
| 106 'blow': {'s': 0}, | 140 isolateserver.Item('bizz', 1222), |
| 107 'bizz': {'s': 1222}, | 141 isolateserver.Item('buzz', 1223), |
| 108 'buzz': {'s': 1223}, | 142 ] |
| 109 } | 143 missing = [ |
| 110 missing = { | 144 [items[2], items[3]], |
| 111 'bizz': {'s': 1222}, | 145 ] |
| 112 'buzz': {'s': 1223}, | |
| 113 } | |
| 114 fake_upload_urls = ('a', 'b') | |
| 115 | 146 |
| 116 class MockedStorageApi(isolateserver.StorageApi): | 147 class MockedStorageApi(isolateserver.StorageApi): |
| 117 def contains(self, files): | 148 def contains(self, _items): |
| 118 return [f + (fake_upload_urls,) for f in files if f[0] in missing] | 149 return missing |
| 119 storage = isolateserver.Storage(MockedStorageApi(), use_zip=False) | 150 storage = isolateserver.Storage(MockedStorageApi(), use_zip=False) |
| 120 | 151 |
| 121 # 'get_missing_files' is a generator, materialize its result in a list. | 152 # 'get_missing_items' is a generator, materialize its result in a list. |
| 122 result = list(storage.get_missing_files(items)) | 153 result = list(storage.get_missing_items(items)) |
| 123 | 154 self.assertEqual(missing, result) |
| 124 # Ensure it's a list of triplets. | |
| 125 self.assertTrue(all(len(x) == 3 for x in result)) | |
| 126 # Verify upload urls are set. | |
| 127 self.assertTrue(all(x[2] == fake_upload_urls for x in result)) | |
| 128 # 'get_missing_files' doesn't guarantee order of its results, so convert | |
| 129 # it to unordered dict and compare dicts. | |
| 130 self.assertEqual(dict(x[:2] for x in result), missing) | |
| 131 | 155 |
| 132 def test_async_push(self): | 156 def test_async_push(self): |
| 133 data_to_push = '1234567' | |
| 134 digest = ALGO(data_to_push).hexdigest() | |
| 135 compression_level = 5 | |
| 136 zipped = zlib.compress(data_to_push, compression_level) | |
| 137 push_urls = ('fake1', 'fake2') | |
| 138 | |
| 139 for use_zip in (False, True): | 157 for use_zip in (False, True): |
| 158 item = FakeItem('1234567') |
| 140 storage_api = self.mock_push() | 159 storage_api = self.mock_push() |
| 141 storage = isolateserver.Storage(storage_api, use_zip) | 160 storage = isolateserver.Storage(storage_api, use_zip) |
| 142 channel = threading_utils.TaskChannel() | 161 channel = threading_utils.TaskChannel() |
| 143 storage.async_push( | 162 storage.async_push(channel, 0, item) |
| 144 channel, 0, digest, len(data_to_push), [data_to_push], | |
| 145 compression_level, push_urls) | |
| 146 # Wait for push to finish. | 163 # Wait for push to finish. |
| 147 pushed_item = channel.pull() | 164 pushed_item = channel.pull() |
| 148 self.assertEqual(digest, pushed_item) | 165 self.assertEqual(item, pushed_item) |
| 149 # StorageApi.push was called with correct arguments. | 166 # StorageApi.push was called with correct arguments. |
| 150 if use_zip: | 167 if use_zip: |
| 151 expected_data = zipped | 168 expected_data = item.zipped |
| 152 expected_size = isolateserver.UNKNOWN_FILE_SIZE | 169 expected_size = isolateserver.UNKNOWN_FILE_SIZE |
| 153 else: | 170 else: |
| 154 expected_data = data_to_push | 171 expected_data = item.data |
| 155 expected_size = len(data_to_push) | 172 expected_size = len(item.data) |
| 156 self.assertEqual( | 173 self.assertEqual( |
| 157 [(digest, expected_size, expected_data, push_urls)], | 174 [(item, expected_data, expected_size)], |
| 158 storage_api.pushed) | 175 storage_api.pushed) |
| 159 | 176 |
| 160 def test_async_push_generator_errors(self): | 177 def test_async_push_generator_errors(self): |
| 161 class FakeException(Exception): | 178 class FakeException(Exception): |
| 162 pass | 179 pass |
| 163 | 180 |
| 164 def faulty_generator(): | 181 def faulty_generator(_chunk_size): |
| 165 yield 'Hi!' | 182 yield 'Hi!' |
| 166 raise FakeException('fake exception') | 183 raise FakeException('fake exception') |
| 167 | 184 |
| 168 for use_zip in (False, True): | 185 for use_zip in (False, True): |
| 186 item = FakeItem('') |
| 187 self.mock(item, 'content', faulty_generator) |
| 169 storage_api = self.mock_push() | 188 storage_api = self.mock_push() |
| 170 storage = isolateserver.Storage(storage_api, use_zip) | 189 storage = isolateserver.Storage(storage_api, use_zip) |
| 171 channel = threading_utils.TaskChannel() | 190 channel = threading_utils.TaskChannel() |
| 172 storage.async_push( | 191 storage.async_push(channel, 0, item) |
| 173 channel, 0, 'item', isolateserver.UNKNOWN_FILE_SIZE, | |
| 174 faulty_generator(), 0, None) | |
| 175 with self.assertRaises(FakeException): | 192 with self.assertRaises(FakeException): |
| 176 channel.pull() | 193 channel.pull() |
| 177 # StorageApi's push should never complete when data can not be read. | 194 # StorageApi's push should never complete when data can not be read. |
| 178 self.assertEqual(0, len(storage_api.pushed)) | 195 self.assertEqual(0, len(storage_api.pushed)) |
| 179 | 196 |
| 180 def test_async_push_upload_errors(self): | 197 def test_async_push_upload_errors(self): |
| 181 chunk = 'data_chunk' | 198 chunk = 'data_chunk' |
| 182 compression_level = 5 | |
| 183 zipped = zlib.compress(chunk, compression_level) | |
| 184 | 199 |
| 185 def _generator(): | 200 def _generator(_chunk_size): |
| 186 yield chunk | 201 yield chunk |
| 187 | 202 |
| 188 def push_side_effect(): | 203 def push_side_effect(): |
| 189 raise IOError('Nope') | 204 raise IOError('Nope') |
| 190 | 205 |
| 191 # TODO(vadimsh): Retrying push when fetching data from a generator is | 206 # TODO(vadimsh): Retrying push when fetching data from a generator is |
| 192 # broken now (it reuses same generator instance when retrying). | 207 # broken now (it reuses same generator instance when retrying). |
| 193 content_sources = ( | 208 content_sources = ( |
| 194 # generator(), | 209 # generator(), |
| 195 [chunk], | 210 lambda _chunk_size: [chunk], |
| 196 ) | 211 ) |
| 197 | 212 |
| 198 for use_zip in (False, True): | 213 for use_zip in (False, True): |
| 199 for source in content_sources: | 214 for source in content_sources: |
| 215 item = FakeItem(chunk) |
| 216 self.mock(item, 'content', source) |
| 200 storage_api = self.mock_push(push_side_effect) | 217 storage_api = self.mock_push(push_side_effect) |
| 201 storage = isolateserver.Storage(storage_api, use_zip) | 218 storage = isolateserver.Storage(storage_api, use_zip) |
| 202 channel = threading_utils.TaskChannel() | 219 channel = threading_utils.TaskChannel() |
| 203 storage.async_push( | 220 storage.async_push(channel, 0, item) |
| 204 channel, 0, 'item', isolateserver.UNKNOWN_FILE_SIZE, | |
| 205 source, compression_level, None) | |
| 206 with self.assertRaises(IOError): | 221 with self.assertRaises(IOError): |
| 207 channel.pull() | 222 channel.pull() |
| 208 # First initial attempt + all retries. | 223 # First initial attempt + all retries. |
| 209 attempts = 1 + isolateserver.WorkerPool.RETRIES | 224 attempts = 1 + isolateserver.WorkerPool.RETRIES |
| 210 # Single push attempt parameters. | 225 # Single push attempt parameters. |
| 211 expected_push = ( | 226 expected_push = ( |
| 212 'item', isolateserver.UNKNOWN_FILE_SIZE, | 227 item, |
| 213 zipped if use_zip else chunk, None) | 228 item.zipped if use_zip else item.data, |
| 229 isolateserver.UNKNOWN_FILE_SIZE if use_zip else item.size) |
| 214 # Ensure all pushes are attempted. | 230 # Ensure all pushes are attempted. |
| 215 self.assertEqual( | 231 self.assertEqual( |
| 216 [expected_push] * attempts, storage_api.pushed) | 232 [expected_push] * attempts, storage_api.pushed) |
| 217 | 233 |
| 218 def test_upload_tree(self): | 234 def test_upload_tree(self): |
| 219 root = 'root' | 235 root = 'root' |
| 220 files = { | 236 files = { |
| 221 'a': { | 237 'a': { |
| 222 's': 100, | 238 's': 100, |
| 223 'h': 'hash_a', | 239 'h': 'hash_a', |
| 224 }, | 240 }, |
| 225 'b': { | 241 'b': { |
| 226 's': 200, | 242 's': 200, |
| 227 'h': 'hash_b', | 243 'h': 'hash_b', |
| 228 }, | 244 }, |
| 229 'c': { | 245 'c': { |
| 230 's': 300, | 246 's': 300, |
| 231 'h': 'hash_c', | 247 'h': 'hash_c', |
| 232 }, | 248 }, |
| 233 } | 249 } |
| 234 push_urls = { | 250 files_data = dict((k, 'x' * files[k]['s']) for k in files) |
| 235 'a': ('upload_a', 'finalize_a'), | |
| 236 'b': ('upload_b', None), | |
| 237 'c': ('upload_c', None), | |
| 238 } | |
| 239 files_data = dict((k, k * files[k]['s']) for k in files) | |
| 240 missing = set(['a', 'b']) | 251 missing = set(['a', 'b']) |
| 241 | 252 |
| 242 # Files read by mocked_file_read. | 253 # Files read by mocked_file_read. |
| 243 read_calls = [] | 254 read_calls = [] |
| 244 # 'contains' calls. | 255 # 'contains' calls. |
| 245 contains_calls = [] | 256 contains_calls = [] |
| 246 # 'push' calls. | 257 # 'push' calls. |
| 247 push_calls = [] | 258 push_calls = [] |
| 248 | 259 |
| 249 def mocked_file_read(filepath, _chunk_size=0): | 260 def mocked_file_read(filepath, _chunk_size=0): |
| 250 self.assertEqual(root, os.path.dirname(filepath)) | 261 self.assertEqual(root, os.path.dirname(filepath)) |
| 251 filename = os.path.basename(filepath) | 262 filename = os.path.basename(filepath) |
| 252 self.assertIn(filename, files_data) | 263 self.assertIn(filename, files_data) |
| 253 read_calls.append(filename) | 264 read_calls.append(filename) |
| 254 return files_data[filename] | 265 return files_data[filename] |
| 255 self.mock(isolateserver, 'file_read', mocked_file_read) | 266 self.mock(isolateserver, 'file_read', mocked_file_read) |
| 256 | 267 |
| 257 class MockedStorageApi(isolateserver.StorageApi): | 268 class MockedStorageApi(isolateserver.StorageApi): |
| 258 def contains(self, files): | 269 def contains(self, items): |
| 259 contains_calls.append(files) | 270 contains_calls.append(items) |
| 260 return [f + (push_urls[f[0]],) for f in files if f[0] in missing] | 271 return [i for i in items if os.path.basename(i.path) in missing] |
| 261 | 272 |
| 262 def push(self, item, expected_size, content_generator, push_urls=None): | 273 def push(self, item, content, size): |
| 263 push_calls.append( | 274 push_calls.append((item, ''.join(content), size)) |
| 264 (item, expected_size, ''.join(content_generator), push_urls)) | |
| 265 | 275 |
| 266 storage_api = MockedStorageApi() | 276 storage_api = MockedStorageApi() |
| 267 storage = isolateserver.Storage(storage_api, use_zip=False) | 277 storage = isolateserver.Storage(storage_api, use_zip=False) |
| 268 storage.upload_tree(root, files) | 278 storage.upload_tree(root, files) |
| 269 | 279 |
| 270 # Was reading only missing files. | 280 # Was reading only missing files. |
| 271 self.assertEqual(missing, set(read_calls)) | 281 self.assertEqual(missing, set(read_calls)) |
| 272 # 'contains' checked for existence of all files. | 282 # 'contains' checked for existence of all files. |
| 273 self.assertEqual(files, dict(sum(contains_calls, []))) | 283 self.assertEqual( |
| 284 set(f['h'] for f in files.itervalues()), |
| 285 set(i.digest for i in sum(contains_calls, []))) |
| 274 # Pushed only missing files. | 286 # Pushed only missing files. |
| 275 self.assertEqual( | 287 self.assertEqual( |
| 276 set(files[name]['h'] for name in missing), | 288 set(files[name]['h'] for name in missing), |
| 277 set(call[0] for call in push_calls)) | 289 set(call[0].digest for call in push_calls)) |
| 278 # Pushing with correct data, size and push urls. | 290 # Pushing with correct data, size and push urls. |
| 279 for push_call in push_calls: | 291 for pushed_item, pushed_content, pushed_size in push_calls: |
| 280 digest = push_call[0] | |
| 281 filenames = [ | 292 filenames = [ |
| 282 name for name, metadata in files.iteritems() | 293 name for name, metadata in files.iteritems() |
| 283 if metadata['h'] == digest | 294 if metadata['h'] == pushed_item.digest |
| 284 ] | 295 ] |
| 285 self.assertEqual(1, len(filenames)) | 296 self.assertEqual(1, len(filenames)) |
| 286 filename = filenames[0] | 297 filename = filenames[0] |
| 287 data = files_data[filename] | 298 self.assertEqual(os.path.join(root, filename), pushed_item.path) |
| 288 self.assertEqual( | 299 self.assertEqual(files_data[filename], pushed_content) |
| 289 (digest, len(data), data, push_urls[filename]), | 300 self.assertEqual(len(files_data[filename]), pushed_size) |
| 290 push_call) | 301 |
| 291 | 302 |
| 292 | 303 class IsolateServerStorageApiTest(TestCase): |
| 293 class IsolateServerArchiveTest(TestCase): | 304 @staticmethod |
| 294 def setUp(self): | 305 def mock_handshake_request(server, token='fake token', error=None): |
| 295 super(IsolateServerArchiveTest, self).setUp() | 306 handshake_request = { |
| 296 self.mock(isolateserver, 'randomness', lambda: 'not_really_random') | 307 'client_app_version': isolateserver.__version__, |
| 297 self.mock(sys, 'stdout', StringIO.StringIO()) | 308 'fetcher': True, |
| 298 | 309 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION, |
| 299 def test_present(self): | 310 'pusher': True, |
| 311 } |
| 312 handshake_response = { |
| 313 'access_token': token, |
| 314 'error': error, |
| 315 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION, |
| 316 'server_app_version': 'mocked server T1000', |
| 317 } |
| 318 return ( |
| 319 server + '/content-gs/handshake', |
| 320 { |
| 321 'content_type': 'application/json', |
| 322 'method': 'POST', |
| 323 'data': json.dumps(handshake_request, separators=(',', ':')), |
| 324 }, |
| 325 json.dumps(handshake_response), |
| 326 ) |
| 327 |
| 328 @staticmethod |
| 329 def mock_fetch_request(server, namespace, item, data): |
| 330 return ( |
| 331 server + '/content-gs/retrieve/%s/%s' % (namespace, item), |
| 332 {'retry_404': True, 'read_timeout': 60}, |
| 333 data, |
| 334 ) |
| 335 |
| 336 @staticmethod |
| 337 def mock_contains_request(server, namespace, token, request, response): |
| 338 url = server + '/content-gs/pre-upload/%s?token=%s' % ( |
| 339 namespace, urllib.quote(token)) |
| 340 return ( |
| 341 url, |
| 342 { |
| 343 'data': json.dumps(request, separators=(',', ':')), |
| 344 'content_type': 'application/json', |
| 345 'method': 'POST', |
| 346 }, |
| 347 json.dumps(response), |
| 348 ) |
| 349 |
| 350 def test_server_capabilities_success(self): |
| 351 server = 'http://example.com' |
| 352 namespace = 'default' |
| 353 access_token = 'fake token' |
| 354 self._requests = [ |
| 355 self.mock_handshake_request(server, access_token), |
| 356 ] |
| 357 storage = isolateserver.IsolateServer(server, namespace) |
| 358 caps = storage._server_capabilities |
| 359 self.assertEqual(access_token, caps['access_token']) |
| 360 |
| 361 def test_server_capabilities_network_failure(self): |
| 362 self.mock(isolateserver.net, 'url_open', lambda *_args, **_kwargs: None) |
| 363 with self.assertRaises(isolateserver.MappingError): |
| 364 storage = isolateserver.IsolateServer('http://example.com', 'default') |
| 365 _ = storage._server_capabilities |
| 366 |
| 367 def test_server_capabilities_format_failure(self): |
| 368 server = 'http://example.com' |
| 369 namespace = 'default' |
| 370 handshake_req = self.mock_handshake_request(server) |
| 371 self._requests = [ |
| 372 (handshake_req[0], handshake_req[1], 'Im a bad response'), |
| 373 ] |
| 374 storage = isolateserver.IsolateServer(server, namespace) |
| 375 with self.assertRaises(isolateserver.MappingError): |
| 376 _ = storage._server_capabilities |
| 377 |
| 378 def test_server_capabilities_respects_error(self): |
| 379 server = 'http://example.com' |
| 380 namespace = 'default' |
| 381 error = 'Im sorry, Dave. Im afraid I cant do that.' |
| 382 self._requests = [ |
| 383 self.mock_handshake_request(server, error=error) |
| 384 ] |
| 385 storage = isolateserver.IsolateServer(server, namespace) |
| 386 with self.assertRaises(isolateserver.MappingError) as context: |
| 387 _ = storage._server_capabilities |
| 388 # Server error message should be reported to user. |
| 389 self.assertIn(error, str(context.exception)) |
| 390 |
| 391 def test_fetch_success_default(self): |
| 392 server = 'http://example.com' |
| 393 namespace = 'default' |
| 394 data = ''.join(str(x) for x in xrange(1000)) |
| 395 item = ALGO(data).hexdigest() |
| 396 self._requests = [ |
| 397 self.mock_fetch_request(server, namespace, item, data), |
| 398 ] |
| 399 storage = isolateserver.IsolateServer(server, namespace) |
| 400 fetched = ''.join(storage.fetch(item, len(data))) |
| 401 self.assertEqual(data, fetched) |
| 402 |
| 403 def test_fetch_success_default_gzip(self): |
| 404 server = 'http://example.com' |
| 405 namespace = 'default-gzip' |
| 406 data = ''.join(str(x) for x in xrange(1000)) |
| 407 item = ALGO(data).hexdigest() |
| 408 self._requests = [ |
| 409 self.mock_fetch_request(server, namespace, item, zlib.compress(data)), |
| 410 ] |
| 411 storage = isolateserver.IsolateServer(server, namespace) |
| 412 fetched = ''.join(storage.fetch(item, len(data))) |
| 413 self.assertEqual(data, fetched) |
| 414 |
| 415 def test_fetch_failure_missing(self): |
| 416 server = 'http://example.com' |
| 417 namespace = 'default' |
| 418 item = ALGO('something').hexdigest() |
| 419 self._requests = [ |
| 420 self.mock_fetch_request(server, namespace, item, None), |
| 421 ] |
| 422 storage = isolateserver.IsolateServer(server, namespace) |
| 423 with self.assertRaises(IOError): |
| 424 _ = ''.join(storage.fetch(item, isolateserver.UNKNOWN_FILE_SIZE)) |
| 425 |
| 426 def test_fetch_failure_bad_size(self): |
| 427 server = 'http://example.com' |
| 428 namespace = 'default' |
| 429 data = ''.join(str(x) for x in xrange(1000)) |
| 430 expected_size = len(data) |
| 431 item = ALGO(data).hexdigest() |
| 432 self._requests = [ |
| 433 self.mock_fetch_request(server, namespace, item, data[:100]), |
| 434 ] |
| 435 storage = isolateserver.IsolateServer(server, namespace) |
| 436 with self.assertRaises(IOError): |
| 437 _ = ''.join(storage.fetch(item, expected_size)) |
| 438 |
| 439 def test_fetch_failure_bad_zip(self): |
| 440 server = 'http://example.com' |
| 441 namespace = 'default-gzip' |
| 442 item = ALGO('something').hexdigest() |
| 443 self._requests = [ |
| 444 self.mock_fetch_request(server, namespace, item, 'Im not a zip'), |
| 445 ] |
| 446 storage = isolateserver.IsolateServer(server, namespace) |
| 447 with self.assertRaises(IOError): |
| 448 _ = ''.join(storage.fetch(item, isolateserver.UNKNOWN_FILE_SIZE)) |
| 449 |
| 450 def test_push_success(self): |
| 451 server = 'http://example.com' |
| 452 namespace = 'default' |
| 453 token = 'fake token' |
| 454 data = ''.join(str(x) for x in xrange(1000)) |
| 455 item = FakeItem(data) |
| 456 push_urls = (server + '/push_here', server + '/call_this') |
| 457 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}] |
| 458 contains_response = [push_urls] |
| 459 self._requests = [ |
| 460 self.mock_handshake_request(server, token), |
| 461 self.mock_contains_request( |
| 462 server, namespace, token, contains_request, contains_response), |
| 463 ( |
| 464 push_urls[0], |
| 465 { |
| 466 'data': data, |
| 467 'content_type': 'application/octet-stream', |
| 468 'method': 'PUT', |
| 469 }, |
| 470 '' |
| 471 ), |
| 472 ( |
| 473 push_urls[1], |
| 474 { |
| 475 'data': '', |
| 476 'content_type': 'application/json', |
| 477 'method': 'POST', |
| 478 }, |
| 479 '' |
| 480 ), |
| 481 ] |
| 482 storage = isolateserver.IsolateServer(server, namespace) |
| 483 missing = storage.contains([item]) |
| 484 self.assertEqual([item], missing) |
| 485 storage.push(item, [data], len(data)) |
| 486 self.assertTrue(item.push_state.uploaded) |
| 487 self.assertTrue(item.push_state.finalized) |
| 488 |
| 489 def test_push_failure_upload(self): |
| 490 server = 'http://example.com' |
| 491 namespace = 'default' |
| 492 token = 'fake token' |
| 493 data = ''.join(str(x) for x in xrange(1000)) |
| 494 item = FakeItem(data) |
| 495 push_urls = (server + '/push_here', server + '/call_this') |
| 496 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}] |
| 497 contains_response = [push_urls] |
| 498 self._requests = [ |
| 499 self.mock_handshake_request(server, token), |
| 500 self.mock_contains_request( |
| 501 server, namespace, token, contains_request, contains_response), |
| 502 ( |
| 503 push_urls[0], |
| 504 { |
| 505 'data': data, |
| 506 'content_type': 'application/octet-stream', |
| 507 'method': 'PUT', |
| 508 }, |
| 509 None |
| 510 ), |
| 511 ] |
| 512 storage = isolateserver.IsolateServer(server, namespace) |
| 513 missing = storage.contains([item]) |
| 514 self.assertEqual([item], missing) |
| 515 with self.assertRaises(IOError): |
| 516 storage.push(item, [data], len(data)) |
| 517 self.assertFalse(item.push_state.uploaded) |
| 518 self.assertFalse(item.push_state.finalized) |
| 519 |
| 520 def test_push_failure_finalize(self): |
| 521 server = 'http://example.com' |
| 522 namespace = 'default' |
| 523 token = 'fake token' |
| 524 data = ''.join(str(x) for x in xrange(1000)) |
| 525 item = FakeItem(data) |
| 526 push_urls = (server + '/push_here', server + '/call_this') |
| 527 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}] |
| 528 contains_response = [push_urls] |
| 529 self._requests = [ |
| 530 self.mock_handshake_request(server, token), |
| 531 self.mock_contains_request( |
| 532 server, namespace, token, contains_request, contains_response), |
| 533 ( |
| 534 push_urls[0], |
| 535 { |
| 536 'data': data, |
| 537 'content_type': 'application/octet-stream', |
| 538 'method': 'PUT', |
| 539 }, |
| 540 '' |
| 541 ), |
| 542 ( |
| 543 push_urls[1], |
| 544 { |
| 545 'data': '', |
| 546 'content_type': 'application/json', |
| 547 'method': 'POST', |
| 548 }, |
| 549 None |
| 550 ), |
| 551 ] |
| 552 storage = isolateserver.IsolateServer(server, namespace) |
| 553 missing = storage.contains([item]) |
| 554 self.assertEqual([item], missing) |
| 555 with self.assertRaises(IOError): |
| 556 storage.push(item, [data], len(data)) |
| 557 self.assertTrue(item.push_state.uploaded) |
| 558 self.assertFalse(item.push_state.finalized) |
| 559 |
| 560 def test_contains_success(self): |
| 561 server = 'http://example.com' |
| 562 namespace = 'default' |
| 563 token = 'fake token' |
| 300 files = [ | 564 files = [ |
| 301 os.path.join(BASE_PATH, 'isolateserver', f) | 565 FakeItem('1', is_isolated=True), |
| 302 for f in ('small_file.txt', 'empty_file.txt') | 566 FakeItem('2' * 100), |
| 303 ] | 567 FakeItem('3' * 200), |
| 304 hash_encoded = ''.join( | 568 ] |
| 305 binascii.unhexlify(isolateserver.hash_file(f, ALGO)) for f in files) | 569 request = [ |
| 306 path = 'http://random/' | 570 {'h': files[0].digest, 's': files[0].size, 'i': 1}, |
| 307 self._requests = [ | 571 {'h': files[1].digest, 's': files[1].size, 'i': 0}, |
| 308 (path + 'content/get_token', {}, 'foo bar'), | 572 {'h': files[2].digest, 's': files[2].size, 'i': 0}, |
| 309 ( | 573 ] |
| 310 path + 'content/contains/default-gzip?token=foo%20bar', | 574 response = [ |
| 311 {'data': hash_encoded, 'content_type': 'application/octet-stream'}, | 575 None, |
| 312 '\1\1', | 576 ['http://example/upload_here_1', None], |
| 313 ), | 577 ['http://example/upload_here_2', 'http://example/call_this'], |
| 314 ] | 578 ] |
| 315 result = isolateserver.main(['archive', '--isolate-server', path] + files) | 579 missing = [ |
| 316 self.assertEqual(0, result) | 580 files[1], |
| 317 | 581 files[2], |
| 318 def test_missing(self): | 582 ] |
| 319 files = [ | 583 self._requests = [ |
| 320 os.path.join(BASE_PATH, 'isolateserver', f) | 584 self.mock_handshake_request(server, token), |
| 321 for f in ('small_file.txt', 'empty_file.txt') | 585 self.mock_contains_request(server, namespace, token, request, response), |
| 322 ] | 586 ] |
| 323 hashes = [isolateserver.hash_file(f, ALGO) for f in files] | 587 storage = isolateserver.IsolateServer(server, namespace) |
| 324 hash_encoded = ''.join(map(binascii.unhexlify, hashes)) | 588 result = storage.contains(files) |
| 325 compressed = [ | 589 self.assertEqual(missing, result) |
| 326 zlib.compress( | 590 self.assertEqual( |
| 327 open(f, 'rb').read(), | 591 [x for x in response if x], |
| 328 isolateserver.get_zip_compression_level(f)) | 592 [[i.push_state.upload_url, i.push_state.finalize_url] for i in missing]) |
| 329 for f in files | 593 |
| 330 ] | 594 def test_contains_network_failure(self): |
| 331 path = 'http://random/' | 595 server = 'http://example.com' |
| 332 self._requests = [ | 596 namespace = 'default' |
| 333 (path + 'content/get_token', {}, 'foo bar'), | 597 token = 'fake token' |
| 334 ( | 598 req = self.mock_contains_request(server, namespace, token, [], []) |
| 335 path + 'content/contains/default-gzip?token=foo%20bar', | 599 self._requests = [ |
| 336 {'data': hash_encoded, 'content_type': 'application/octet-stream'}, | 600 self.mock_handshake_request(server, token), |
| 337 '\0\0', | 601 (req[0], req[1], None), |
| 338 ), | 602 ] |
| 339 ( | 603 storage = isolateserver.IsolateServer(server, namespace) |
| 340 path + 'content/store/default-gzip/%s?token=foo%%20bar' % hashes[0], | 604 with self.assertRaises(isolateserver.MappingError): |
| 341 {'data': compressed[0], 'content_type': 'application/octet-stream'}, | 605 storage.contains([]) |
| 342 'ok', | 606 |
| 343 ), | 607 def test_contains_format_failure(self): |
| 344 ( | 608 server = 'http://example.com' |
| 345 path + 'content/store/default-gzip/%s?token=foo%%20bar' % hashes[1], | 609 namespace = 'default' |
| 346 {'data': compressed[1], 'content_type': 'application/octet-stream'}, | 610 token = 'fake token' |
| 347 'ok', | 611 self._requests = [ |
| 348 ), | 612 self.mock_handshake_request(server, token), |
| 349 ] | 613 self.mock_contains_request(server, namespace, token, [], [1, 2, 3]) |
| 350 result = isolateserver.main(['archive', '--isolate-server', path] + files) | 614 ] |
| 351 self.assertEqual(0, result) | 615 storage = isolateserver.IsolateServer(server, namespace) |
| 352 | 616 with self.assertRaises(isolateserver.MappingError): |
| 353 def test_large(self): | 617 storage.contains([]) |
| 354 content = '' | |
| 355 compressed = '' | |
| 356 while ( | |
| 357 len(compressed) <= isolateserver.MIN_SIZE_FOR_DIRECT_BLOBSTORE): | |
| 358 # The goal here is to generate a file, once compressed, is at least | |
| 359 # MIN_SIZE_FOR_DIRECT_BLOBSTORE. | |
| 360 content += ''.join(chr(random.randint(0, 255)) for _ in xrange(20*1024)) | |
| 361 compressed = zlib.compress( | |
| 362 content, isolateserver.get_zip_compression_level('foo.txt')) | |
| 363 | |
| 364 s = ALGO(content).hexdigest() | |
| 365 infiles = { | |
| 366 'foo.txt': { | |
| 367 's': len(content), | |
| 368 'h': s, | |
| 369 }, | |
| 370 } | |
| 371 path = 'http://random/' | |
| 372 hash_encoded = binascii.unhexlify(s) | |
| 373 content_type, body = isolateserver.encode_multipart_formdata( | |
| 374 [('token', 'foo bar')], [('content', s, compressed)]) | |
| 375 | |
| 376 self._requests = [ | |
| 377 (path + 'content/get_token', {}, 'foo bar'), | |
| 378 ( | |
| 379 path + 'content/contains/default-gzip?token=foo%20bar', | |
| 380 {'data': hash_encoded, 'content_type': 'application/octet-stream'}, | |
| 381 '\0', | |
| 382 ), | |
| 383 ( | |
| 384 path + 'content/generate_blobstore_url/default-gzip/%s' % s, | |
| 385 {'data': [('token', 'foo bar')]}, | |
| 386 'an_url/', | |
| 387 ), | |
| 388 ( | |
| 389 'an_url/', | |
| 390 {'data': body, 'content_type': content_type, 'retry_50x': False}, | |
| 391 'ok', | |
| 392 ), | |
| 393 ] | |
| 394 | |
| 395 # Setup mocks for zip_compress to return |compressed|. | |
| 396 self.mock(isolateserver, 'file_read', lambda *_: None) | |
| 397 self.mock(isolateserver, 'zip_compress', lambda *_: [compressed]) | |
| 398 result = isolateserver.upload_tree( | |
| 399 base_url=path, | |
| 400 indir=os.getcwd(), | |
| 401 infiles=infiles, | |
| 402 namespace='default-gzip') | |
| 403 | |
| 404 self.assertEqual(0, result) | |
| 405 | |
| 406 def test_upload_blobstore_simple(self): | |
| 407 # A tad over 20kb so it triggers uploading to the blob store. | |
| 408 content = '0123456789' * 21*1024 | |
| 409 s = ALGO(content).hexdigest() | |
| 410 path = 'http://example.com:80/' | |
| 411 data = [('token', 'a_token')] | |
| 412 content_type, body = isolateserver.encode_multipart_formdata( | |
| 413 data, [('content', s, content)]) | |
| 414 self._requests = [ | |
| 415 ( | |
| 416 path + 'content/get_token', | |
| 417 {}, | |
| 418 'a_token', | |
| 419 ), | |
| 420 ( | |
| 421 path + 'content/generate_blobstore_url/x/' + s, | |
| 422 {'data': data[:]}, | |
| 423 'http://example.com/an_url/', | |
| 424 ), | |
| 425 ( | |
| 426 'http://example.com/an_url/', | |
| 427 {'data': body, 'content_type': content_type, 'retry_50x': False}, | |
| 428 'ok42', | |
| 429 ), | |
| 430 ] | |
| 431 # |size| is currently ignored. | |
| 432 result = isolateserver.IsolateServer(path, 'x').push(s, -2, [content]) | |
| 433 self.assertEqual('ok42', result) | |
| 434 | |
| 435 def test_upload_blobstore_retry_500(self): | |
| 436 # A tad over 20kb so it triggers uploading to the blob store. | |
| 437 content = '0123456789' * 21*1024 | |
| 438 s = ALGO(content).hexdigest() | |
| 439 path = 'http://example.com:80/' | |
| 440 data = [('token', 'a_token')] | |
| 441 content_type, body = isolateserver.encode_multipart_formdata( | |
| 442 data, [('content', s, content)]) | |
| 443 self._requests = [ | |
| 444 ( | |
| 445 path + 'content/get_token', | |
| 446 {}, | |
| 447 'a_token', | |
| 448 ), | |
| 449 ( | |
| 450 path + 'content/generate_blobstore_url/x/' + s, | |
| 451 {'data': data[:]}, | |
| 452 'http://example.com/an_url/', | |
| 453 ), | |
| 454 ( | |
| 455 'http://example.com/an_url/', | |
| 456 {'data': body, 'content_type': content_type, 'retry_50x': False}, | |
| 457 # Let's say an HTTP 500 was returned. | |
| 458 None, | |
| 459 ), | |
| 460 # In that case, a new url must be generated since the last one may have | |
| 461 # been "consumed". | |
| 462 ( | |
| 463 path + 'content/generate_blobstore_url/x/' + s, | |
| 464 {'data': data[:]}, | |
| 465 'http://example.com/an_url_2/', | |
| 466 ), | |
| 467 ( | |
| 468 'http://example.com/an_url_2/', | |
| 469 {'data': body, 'content_type': content_type, 'retry_50x': False}, | |
| 470 'ok42', | |
| 471 ), | |
| 472 ] | |
| 473 # |size| is currently ignored. | |
| 474 result = isolateserver.IsolateServer(path, 'x').push(s, -2, [content]) | |
| 475 self.assertEqual('ok42', result) | |
| 476 | 618 |
| 477 | 619 |
| 478 class IsolateServerDownloadTest(TestCase): | 620 class IsolateServerDownloadTest(TestCase): |
| 479 tempdir = None | 621 tempdir = None |
| 480 | 622 |
| 481 def tearDown(self): | 623 def tearDown(self): |
| 482 try: | 624 try: |
| 483 if self.tempdir: | 625 if self.tempdir: |
| 484 shutil.rmtree(self.tempdir) | 626 shutil.rmtree(self.tempdir) |
| 485 finally: | 627 finally: |
| 486 super(IsolateServerDownloadTest, self).tearDown() | 628 super(IsolateServerDownloadTest, self).tearDown() |
| 487 | 629 |
| 488 def test_download_two_files(self): | 630 def test_download_two_files(self): |
| 489 # Test downloading two files. | 631 # Test downloading two files. |
| 490 actual = {} | 632 actual = {} |
| 491 def out(key, generator): | 633 def out(key, generator): |
| 492 actual[key] = ''.join(generator) | 634 actual[key] = ''.join(generator) |
| 493 self.mock(isolateserver, 'file_write', out) | 635 self.mock(isolateserver, 'file_write', out) |
| 494 server = 'http://example.com' | 636 server = 'http://example.com' |
| 495 self._requests = [ | 637 self._requests = [ |
| 496 ( | 638 ( |
| 497 server + '/content/retrieve/default-gzip/sha-1', | 639 server + '/content-gs/retrieve/default-gzip/sha-1', |
| 498 {'read_timeout': 60, 'retry_404': True}, | 640 {'read_timeout': 60, 'retry_404': True}, |
| 499 zlib.compress('Coucou'), | 641 zlib.compress('Coucou'), |
| 500 ), | 642 ), |
| 501 ( | 643 ( |
| 502 server + '/content/retrieve/default-gzip/sha-2', | 644 server + '/content-gs/retrieve/default-gzip/sha-2', |
| 503 {'read_timeout': 60, 'retry_404': True}, | 645 {'read_timeout': 60, 'retry_404': True}, |
| 504 zlib.compress('Bye Bye'), | 646 zlib.compress('Bye Bye'), |
| 505 ), | 647 ), |
| 506 ] | 648 ] |
| 507 cmd = [ | 649 cmd = [ |
| 508 'download', | 650 'download', |
| 509 '--isolate-server', server, | 651 '--isolate-server', server, |
| 510 '--target', ROOT_DIR, | 652 '--target', ROOT_DIR, |
| 511 '--file', 'sha-1', 'path/to/a', | 653 '--file', 'sha-1', 'path/to/a', |
| 512 '--file', 'sha-2', 'path/to/b', | 654 '--file', 'sha-2', 'path/to/b', |
| (...skipping 27 matching lines...) Expand all Loading... |
| 540 'files': dict( | 682 'files': dict( |
| 541 (k, {'h': ALGO(v).hexdigest(), 's': len(v)}) | 683 (k, {'h': ALGO(v).hexdigest(), 's': len(v)}) |
| 542 for k, v in files.iteritems()), | 684 for k, v in files.iteritems()), |
| 543 } | 685 } |
| 544 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':')) | 686 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':')) |
| 545 isolated_hash = ALGO(isolated_data).hexdigest() | 687 isolated_hash = ALGO(isolated_data).hexdigest() |
| 546 requests = [(v['h'], files[k]) for k, v in isolated['files'].iteritems()] | 688 requests = [(v['h'], files[k]) for k, v in isolated['files'].iteritems()] |
| 547 requests.append((isolated_hash, isolated_data)) | 689 requests.append((isolated_hash, isolated_data)) |
| 548 self._requests = [ | 690 self._requests = [ |
| 549 ( | 691 ( |
| 550 server + '/content/retrieve/default-gzip/' + h, | 692 server + '/content-gs/retrieve/default-gzip/' + h, |
| 551 { | 693 { |
| 552 'read_timeout': isolateserver.DOWNLOAD_READ_TIMEOUT, | 694 'read_timeout': isolateserver.DOWNLOAD_READ_TIMEOUT, |
| 553 'retry_404': True, | 695 'retry_404': True, |
| 554 }, | 696 }, |
| 555 zlib.compress(v), | 697 zlib.compress(v), |
| 556 ) for h, v in requests | 698 ) for h, v in requests |
| 557 ] | 699 ] |
| 558 cmd = [ | 700 cmd = [ |
| 559 'download', | 701 'download', |
| 560 '--isolate-server', server, | 702 '--isolate-server', server, |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 649 expected = gen_data(os.path.sep) | 791 expected = gen_data(os.path.sep) |
| 650 self.assertEqual(expected, actual) | 792 self.assertEqual(expected, actual) |
| 651 | 793 |
| 652 | 794 |
| 653 if __name__ == '__main__': | 795 if __name__ == '__main__': |
| 654 if '-v' in sys.argv: | 796 if '-v' in sys.argv: |
| 655 unittest.TestCase.maxDiff = None | 797 unittest.TestCase.maxDiff = None |
| 656 logging.basicConfig( | 798 logging.basicConfig( |
| 657 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR)) | 799 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR)) |
| 658 unittest.main() | 800 unittest.main() |
| OLD | NEW |