OLD | NEW |
(Empty) | |
| 1 # Copyright 2010 Google Inc. |
| 2 # |
| 3 # Permission is hereby granted, free of charge, to any person obtaining a |
| 4 # copy of this software and associated documentation files (the |
| 5 # "Software"), to deal in the Software without restriction, including |
| 6 # without limitation the rights to use, copy, modify, merge, publish, dis- |
| 7 # tribute, sublicense, and/or sell copies of the Software, and to permit |
| 8 # persons to whom the Software is furnished to do so, subject to the fol- |
| 9 # lowing conditions: |
| 10 # |
| 11 # The above copyright notice and this permission notice shall be included |
| 12 # in all copies or substantial portions of the Software. |
| 13 # |
| 14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 15 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- |
| 16 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT |
| 17 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
| 18 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 19 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 20 # IN THE SOFTWARE. |
| 21 |
| 22 """ |
| 23 Provides basic mocks of core storage service classes, for unit testing: |
| 24 ACL, Key, Bucket, Connection, and StorageUri. We implement a subset of |
| 25 the interfaces defined in the real boto classes, but don't handle most |
| 26 of the optional params (which we indicate with the constant "NOT_IMPL"). |
| 27 """ |
| 28 |
| 29 import copy |
| 30 import boto |
| 31 import base64 |
| 32 import re |
| 33 |
| 34 from boto.utils import compute_md5 |
| 35 from boto.s3.prefix import Prefix |
| 36 |
| 37 try: |
| 38 from hashlib import md5 |
| 39 except ImportError: |
| 40 from md5 import md5 |
| 41 |
| 42 NOT_IMPL = None |
| 43 |
| 44 |
| 45 class MockAcl(object): |
| 46 |
| 47 def __init__(self, parent=NOT_IMPL): |
| 48 pass |
| 49 |
| 50 def startElement(self, name, attrs, connection): |
| 51 pass |
| 52 |
| 53 def endElement(self, name, value, connection): |
| 54 pass |
| 55 |
| 56 def to_xml(self): |
| 57 return '<mock_ACL_XML/>' |
| 58 |
| 59 |
| 60 class MockKey(object): |
| 61 |
| 62 def __init__(self, bucket=None, name=None): |
| 63 self.bucket = bucket |
| 64 self.name = name |
| 65 self.data = None |
| 66 self.etag = None |
| 67 self.size = None |
| 68 self.closed = True |
| 69 self.content_encoding = None |
| 70 self.content_language = None |
| 71 self.content_type = None |
| 72 self.last_modified = 'Wed, 06 Oct 2010 05:11:54 GMT' |
| 73 self.BufferSize = 8192 |
| 74 |
| 75 def __repr__(self): |
| 76 if self.bucket: |
| 77 return '<MockKey: %s,%s>' % (self.bucket.name, self.name) |
| 78 else: |
| 79 return '<MockKey: %s>' % self.name |
| 80 |
| 81 def get_contents_as_string(self, headers=NOT_IMPL, |
| 82 cb=NOT_IMPL, num_cb=NOT_IMPL, |
| 83 torrent=NOT_IMPL, |
| 84 version_id=NOT_IMPL): |
| 85 return self.data |
| 86 |
| 87 def get_contents_to_file(self, fp, headers=NOT_IMPL, |
| 88 cb=NOT_IMPL, num_cb=NOT_IMPL, |
| 89 torrent=NOT_IMPL, |
| 90 version_id=NOT_IMPL, |
| 91 res_download_handler=NOT_IMPL): |
| 92 fp.write(self.data) |
| 93 |
| 94 def get_file(self, fp, headers=NOT_IMPL, cb=NOT_IMPL, num_cb=NOT_IMPL, |
| 95 torrent=NOT_IMPL, version_id=NOT_IMPL, |
| 96 override_num_retries=NOT_IMPL): |
| 97 fp.write(self.data) |
| 98 |
| 99 def _handle_headers(self, headers): |
| 100 if not headers: |
| 101 return |
| 102 if 'Content-Encoding' in headers: |
| 103 self.content_encoding = headers['Content-Encoding'] |
| 104 if 'Content-Type' in headers: |
| 105 self.content_type = headers['Content-Type'] |
| 106 if 'Content-Language' in headers: |
| 107 self.content_language = headers['Content-Language'] |
| 108 |
| 109 # Simplistic partial implementation for headers: Just supports range GETs |
| 110 # of flavor 'Range: bytes=xyz-'. |
| 111 def open_read(self, headers=None, query_args=NOT_IMPL, |
| 112 override_num_retries=NOT_IMPL): |
| 113 if self.closed: |
| 114 self.read_pos = 0 |
| 115 self.closed = False |
| 116 if headers and 'Range' in headers: |
| 117 match = re.match('bytes=([0-9]+)-$', headers['Range']) |
| 118 if match: |
| 119 self.read_pos = int(match.group(1)) |
| 120 |
| 121 def close(self, fast=NOT_IMPL): |
| 122 self.closed = True |
| 123 |
| 124 def read(self, size=0): |
| 125 self.open_read() |
| 126 if size == 0: |
| 127 data = self.data[self.read_pos:] |
| 128 self.read_pos = self.size |
| 129 else: |
| 130 data = self.data[self.read_pos:self.read_pos+size] |
| 131 self.read_pos += size |
| 132 if not data: |
| 133 self.close() |
| 134 return data |
| 135 |
| 136 def set_contents_from_file(self, fp, headers=None, replace=NOT_IMPL, |
| 137 cb=NOT_IMPL, num_cb=NOT_IMPL, |
| 138 policy=NOT_IMPL, md5=NOT_IMPL, |
| 139 res_upload_handler=NOT_IMPL): |
| 140 self.data = fp.read() |
| 141 self.set_etag() |
| 142 self.size = len(self.data) |
| 143 self._handle_headers(headers) |
| 144 |
| 145 def set_contents_from_stream(self, fp, headers=None, replace=NOT_IMPL, |
| 146 cb=NOT_IMPL, num_cb=NOT_IMPL, policy=NOT_IMPL, |
| 147 reduced_redundancy=NOT_IMPL, query_args=NOT_IMPL, |
| 148 size=NOT_IMPL): |
| 149 self.data = '' |
| 150 chunk = fp.read(self.BufferSize) |
| 151 while chunk: |
| 152 self.data += chunk |
| 153 chunk = fp.read(self.BufferSize) |
| 154 self.set_etag() |
| 155 self.size = len(self.data) |
| 156 self._handle_headers(headers) |
| 157 |
| 158 def set_contents_from_string(self, s, headers=NOT_IMPL, replace=NOT_IMPL, |
| 159 cb=NOT_IMPL, num_cb=NOT_IMPL, policy=NOT_IMPL, |
| 160 md5=NOT_IMPL, reduced_redundancy=NOT_IMPL): |
| 161 self.data = copy.copy(s) |
| 162 self.set_etag() |
| 163 self.size = len(s) |
| 164 self._handle_headers(headers) |
| 165 |
| 166 def set_contents_from_filename(self, filename, headers=None, |
| 167 replace=NOT_IMPL, cb=NOT_IMPL, |
| 168 num_cb=NOT_IMPL, policy=NOT_IMPL, |
| 169 md5=NOT_IMPL, res_upload_handler=NOT_IMPL): |
| 170 fp = open(filename, 'rb') |
| 171 self.set_contents_from_file(fp, headers, replace, cb, num_cb, |
| 172 policy, md5, res_upload_handler) |
| 173 fp.close() |
| 174 |
| 175 def copy(self, dst_bucket_name, dst_key, metadata=NOT_IMPL, |
| 176 reduced_redundancy=NOT_IMPL, preserve_acl=NOT_IMPL): |
| 177 dst_bucket = self.bucket.connection.get_bucket(dst_bucket_name) |
| 178 return dst_bucket.copy_key(dst_key, self.bucket.name, |
| 179 self.name, metadata) |
| 180 |
| 181 @property |
| 182 def provider(self): |
| 183 provider = None |
| 184 if self.bucket and self.bucket.connection: |
| 185 provider = self.bucket.connection.provider |
| 186 return provider |
| 187 |
| 188 def set_etag(self): |
| 189 """ |
| 190 Set etag attribute by generating hex MD5 checksum on current |
| 191 contents of mock key. |
| 192 """ |
| 193 m = md5() |
| 194 m.update(self.data) |
| 195 hex_md5 = m.hexdigest() |
| 196 self.etag = hex_md5 |
| 197 |
| 198 def compute_md5(self, fp): |
| 199 """ |
| 200 :type fp: file |
| 201 :param fp: File pointer to the file to MD5 hash. The file pointer |
| 202 will be reset to the beginning of the file before the |
| 203 method returns. |
| 204 |
| 205 :rtype: tuple |
| 206 :return: A tuple containing the hex digest version of the MD5 hash |
| 207 as the first element and the base64 encoded version of the |
| 208 plain digest as the second element. |
| 209 """ |
| 210 tup = compute_md5(fp) |
| 211 # Returned values are MD5 hash, base64 encoded MD5 hash, and file size. |
| 212 # The internal implementation of compute_md5() needs to return the |
| 213 # file size but we don't want to return that value to the external |
| 214 # caller because it changes the class interface (i.e. it might |
| 215 # break some code) so we consume the third tuple value here and |
| 216 # return the remainder of the tuple to the caller, thereby preserving |
| 217 # the existing interface. |
| 218 self.size = tup[2] |
| 219 return tup[0:2] |
| 220 |
| 221 class MockBucket(object): |
| 222 |
| 223 def __init__(self, connection=None, name=None, key_class=NOT_IMPL): |
| 224 self.name = name |
| 225 self.keys = {} |
| 226 self.acls = {name: MockAcl()} |
| 227 # default object ACLs are one per bucket and not supported for keys |
| 228 self.def_acl = MockAcl() |
| 229 self.subresources = {} |
| 230 self.connection = connection |
| 231 self.logging = False |
| 232 |
| 233 def __repr__(self): |
| 234 return 'MockBucket: %s' % self.name |
| 235 |
| 236 def copy_key(self, new_key_name, src_bucket_name, |
| 237 src_key_name, metadata=NOT_IMPL, src_version_id=NOT_IMPL, |
| 238 storage_class=NOT_IMPL, preserve_acl=NOT_IMPL, |
| 239 encrypt_key=NOT_IMPL, headers=NOT_IMPL, query_args=NOT_IMPL): |
| 240 new_key = self.new_key(key_name=new_key_name) |
| 241 src_key = self.connection.get_bucket( |
| 242 src_bucket_name).get_key(src_key_name) |
| 243 new_key.data = copy.copy(src_key.data) |
| 244 new_key.size = len(new_key.data) |
| 245 return new_key |
| 246 |
| 247 def disable_logging(self): |
| 248 self.logging = False |
| 249 |
| 250 def enable_logging(self, target_bucket_prefix): |
| 251 self.logging = True |
| 252 |
| 253 def get_acl(self, key_name='', headers=NOT_IMPL, version_id=NOT_IMPL): |
| 254 if key_name: |
| 255 # Return ACL for the key. |
| 256 return self.acls[key_name] |
| 257 else: |
| 258 # Return ACL for the bucket. |
| 259 return self.acls[self.name] |
| 260 |
| 261 def get_def_acl(self, key_name=NOT_IMPL, headers=NOT_IMPL, |
| 262 version_id=NOT_IMPL): |
| 263 # Return default ACL for the bucket. |
| 264 return self.def_acl |
| 265 |
| 266 def get_subresource(self, subresource, key_name=NOT_IMPL, headers=NOT_IMPL, |
| 267 version_id=NOT_IMPL): |
| 268 if subresource in self.subresources: |
| 269 return self.subresources[subresource] |
| 270 else: |
| 271 return '<Subresource/>' |
| 272 |
| 273 def new_key(self, key_name=None): |
| 274 mock_key = MockKey(self, key_name) |
| 275 self.keys[key_name] = mock_key |
| 276 self.acls[key_name] = MockAcl() |
| 277 return mock_key |
| 278 |
| 279 def delete_key(self, key_name, headers=NOT_IMPL, |
| 280 version_id=NOT_IMPL, mfa_token=NOT_IMPL): |
| 281 if key_name not in self.keys: |
| 282 raise boto.exception.StorageResponseError(404, 'Not Found') |
| 283 del self.keys[key_name] |
| 284 |
| 285 def get_all_keys(self, headers=NOT_IMPL): |
| 286 return self.keys.itervalues() |
| 287 |
| 288 def get_key(self, key_name, headers=NOT_IMPL, version_id=NOT_IMPL): |
| 289 # Emulate behavior of boto when get_key called with non-existent key. |
| 290 if key_name not in self.keys: |
| 291 return None |
| 292 return self.keys[key_name] |
| 293 |
| 294 def list(self, prefix='', delimiter='', marker=NOT_IMPL, |
| 295 headers=NOT_IMPL): |
| 296 prefix = prefix or '' # Turn None into '' for prefix match. |
| 297 # Return list instead of using a generator so we don't get |
| 298 # 'dictionary changed size during iteration' error when performing |
| 299 # deletions while iterating (e.g., during test cleanup). |
| 300 result = [] |
| 301 key_name_set = set() |
| 302 for k in self.keys.itervalues(): |
| 303 if k.name.startswith(prefix): |
| 304 k_name_past_prefix = k.name[len(prefix):] |
| 305 if delimiter: |
| 306 pos = k_name_past_prefix.find(delimiter) |
| 307 else: |
| 308 pos = -1 |
| 309 if (pos != -1): |
| 310 key_or_prefix = Prefix( |
| 311 bucket=self, name=k.name[:len(prefix)+pos+1]) |
| 312 else: |
| 313 key_or_prefix = MockKey(bucket=self, name=k.name) |
| 314 if key_or_prefix.name not in key_name_set: |
| 315 key_name_set.add(key_or_prefix.name) |
| 316 result.append(key_or_prefix) |
| 317 return result |
| 318 |
| 319 def set_acl(self, acl_or_str, key_name='', headers=NOT_IMPL, |
| 320 version_id=NOT_IMPL): |
| 321 # We only handle setting ACL XML here; if you pass a canned ACL |
| 322 # the get_acl call will just return that string name. |
| 323 if key_name: |
| 324 # Set ACL for the key. |
| 325 self.acls[key_name] = MockAcl(acl_or_str) |
| 326 else: |
| 327 # Set ACL for the bucket. |
| 328 self.acls[self.name] = MockAcl(acl_or_str) |
| 329 |
| 330 def set_def_acl(self, acl_or_str, key_name=NOT_IMPL, headers=NOT_IMPL, |
| 331 version_id=NOT_IMPL): |
| 332 # We only handle setting ACL XML here; if you pass a canned ACL |
| 333 # the get_acl call will just return that string name. |
| 334 # Set default ACL for the bucket. |
| 335 self.def_acl = acl_or_str |
| 336 |
| 337 def set_subresource(self, subresource, value, key_name=NOT_IMPL, |
| 338 headers=NOT_IMPL, version_id=NOT_IMPL): |
| 339 self.subresources[subresource] = value |
| 340 |
| 341 |
| 342 class MockProvider(object): |
| 343 |
| 344 def __init__(self, provider): |
| 345 self.provider = provider |
| 346 |
| 347 def get_provider_name(self): |
| 348 return self.provider |
| 349 |
| 350 |
| 351 class MockConnection(object): |
| 352 |
| 353 def __init__(self, aws_access_key_id=NOT_IMPL, |
| 354 aws_secret_access_key=NOT_IMPL, is_secure=NOT_IMPL, |
| 355 port=NOT_IMPL, proxy=NOT_IMPL, proxy_port=NOT_IMPL, |
| 356 proxy_user=NOT_IMPL, proxy_pass=NOT_IMPL, |
| 357 host=NOT_IMPL, debug=NOT_IMPL, |
| 358 https_connection_factory=NOT_IMPL, |
| 359 calling_format=NOT_IMPL, |
| 360 path=NOT_IMPL, provider='s3', |
| 361 bucket_class=NOT_IMPL): |
| 362 self.buckets = {} |
| 363 self.provider = MockProvider(provider) |
| 364 |
| 365 def create_bucket(self, bucket_name, headers=NOT_IMPL, location=NOT_IMPL, |
| 366 policy=NOT_IMPL, storage_class=NOT_IMPL): |
| 367 if bucket_name in self.buckets: |
| 368 raise boto.exception.StorageCreateError( |
| 369 409, 'BucketAlreadyOwnedByYou', |
| 370 "<Message>Your previous request to create the named bucket " |
| 371 "succeeded and you already own it.</Message>") |
| 372 mock_bucket = MockBucket(name=bucket_name, connection=self) |
| 373 self.buckets[bucket_name] = mock_bucket |
| 374 return mock_bucket |
| 375 |
| 376 def delete_bucket(self, bucket, headers=NOT_IMPL): |
| 377 if bucket not in self.buckets: |
| 378 raise boto.exception.StorageResponseError( |
| 379 404, 'NoSuchBucket', '<Message>no such bucket</Message>') |
| 380 del self.buckets[bucket] |
| 381 |
| 382 def get_bucket(self, bucket_name, validate=NOT_IMPL, headers=NOT_IMPL): |
| 383 if bucket_name not in self.buckets: |
| 384 raise boto.exception.StorageResponseError(404, 'NoSuchBucket', |
| 385 'Not Found') |
| 386 return self.buckets[bucket_name] |
| 387 |
| 388 def get_all_buckets(self, headers=NOT_IMPL): |
| 389 return self.buckets.itervalues() |
| 390 |
| 391 |
| 392 # We only mock a single provider/connection. |
| 393 mock_connection = MockConnection() |
| 394 |
| 395 |
| 396 class MockBucketStorageUri(object): |
| 397 |
| 398 delim = '/' |
| 399 |
| 400 def __init__(self, scheme, bucket_name=None, object_name=None, |
| 401 debug=NOT_IMPL, suppress_consec_slashes=NOT_IMPL, |
| 402 version_id=None, generation=None, is_latest=False): |
| 403 self.scheme = scheme |
| 404 self.bucket_name = bucket_name |
| 405 self.object_name = object_name |
| 406 self.suppress_consec_slashes = suppress_consec_slashes |
| 407 if self.bucket_name and self.object_name: |
| 408 self.uri = ('%s://%s/%s' % (self.scheme, self.bucket_name, |
| 409 self.object_name)) |
| 410 elif self.bucket_name: |
| 411 self.uri = ('%s://%s/' % (self.scheme, self.bucket_name)) |
| 412 else: |
| 413 self.uri = ('%s://' % self.scheme) |
| 414 |
| 415 self.version_id = version_id |
| 416 self.generation = generation and int(generation) |
| 417 self.is_version_specific = (bool(self.generation) |
| 418 or bool(self.version_id)) |
| 419 self.is_latest = is_latest |
| 420 if bucket_name and object_name: |
| 421 self.versionless_uri = '%s://%s/%s' % (scheme, bucket_name, |
| 422 object_name) |
| 423 |
| 424 def __repr__(self): |
| 425 """Returns string representation of URI.""" |
| 426 return self.uri |
| 427 |
| 428 def acl_class(self): |
| 429 return MockAcl |
| 430 |
| 431 def canned_acls(self): |
| 432 return boto.provider.Provider('aws').canned_acls |
| 433 |
| 434 def clone_replace_name(self, new_name): |
| 435 return self.__class__(self.scheme, self.bucket_name, new_name) |
| 436 |
| 437 def clone_replace_key(self, key): |
| 438 return self.__class__( |
| 439 key.provider.get_provider_name(), |
| 440 bucket_name=key.bucket.name, |
| 441 object_name=key.name, |
| 442 suppress_consec_slashes=self.suppress_consec_slashes, |
| 443 version_id=getattr(key, 'version_id', None), |
| 444 generation=getattr(key, 'generation', None), |
| 445 is_latest=getattr(key, 'is_latest', None)) |
| 446 |
| 447 def connect(self, access_key_id=NOT_IMPL, secret_access_key=NOT_IMPL): |
| 448 return mock_connection |
| 449 |
| 450 def create_bucket(self, headers=NOT_IMPL, location=NOT_IMPL, |
| 451 policy=NOT_IMPL, storage_class=NOT_IMPL): |
| 452 return self.connect().create_bucket(self.bucket_name) |
| 453 |
| 454 def delete_bucket(self, headers=NOT_IMPL): |
| 455 return self.connect().delete_bucket(self.bucket_name) |
| 456 |
| 457 def has_version(self): |
| 458 return (issubclass(type(self), MockBucketStorageUri) |
| 459 and ((self.version_id is not None) |
| 460 or (self.generation is not None))) |
| 461 |
| 462 def delete_key(self, validate=NOT_IMPL, headers=NOT_IMPL, |
| 463 version_id=NOT_IMPL, mfa_token=NOT_IMPL): |
| 464 self.get_bucket().delete_key(self.object_name) |
| 465 |
| 466 def disable_logging(self, validate=NOT_IMPL, headers=NOT_IMPL, |
| 467 version_id=NOT_IMPL): |
| 468 self.get_bucket().disable_logging() |
| 469 |
| 470 def enable_logging(self, target_bucket, target_prefix, validate=NOT_IMPL, |
| 471 headers=NOT_IMPL, version_id=NOT_IMPL): |
| 472 self.get_bucket().enable_logging(target_bucket) |
| 473 |
| 474 def equals(self, uri): |
| 475 return self.uri == uri.uri |
| 476 |
| 477 def get_acl(self, validate=NOT_IMPL, headers=NOT_IMPL, version_id=NOT_IMPL): |
| 478 return self.get_bucket().get_acl(self.object_name) |
| 479 |
| 480 def get_def_acl(self, validate=NOT_IMPL, headers=NOT_IMPL, |
| 481 version_id=NOT_IMPL): |
| 482 return self.get_bucket().get_def_acl(self.object_name) |
| 483 |
| 484 def get_subresource(self, subresource, validate=NOT_IMPL, headers=NOT_IMPL, |
| 485 version_id=NOT_IMPL): |
| 486 return self.get_bucket().get_subresource(subresource, self.object_name) |
| 487 |
| 488 def get_all_buckets(self, headers=NOT_IMPL): |
| 489 return self.connect().get_all_buckets() |
| 490 |
| 491 def get_all_keys(self, validate=NOT_IMPL, headers=NOT_IMPL): |
| 492 return self.get_bucket().get_all_keys(self) |
| 493 |
| 494 def list_bucket(self, prefix='', delimiter='', headers=NOT_IMPL, |
| 495 all_versions=NOT_IMPL): |
| 496 return self.get_bucket().list(prefix=prefix, delimiter=delimiter) |
| 497 |
| 498 def get_bucket(self, validate=NOT_IMPL, headers=NOT_IMPL): |
| 499 return self.connect().get_bucket(self.bucket_name) |
| 500 |
| 501 def get_key(self, validate=NOT_IMPL, headers=NOT_IMPL, |
| 502 version_id=NOT_IMPL): |
| 503 return self.get_bucket().get_key(self.object_name) |
| 504 |
| 505 def is_file_uri(self): |
| 506 return False |
| 507 |
| 508 def is_cloud_uri(self): |
| 509 return True |
| 510 |
| 511 def names_container(self): |
| 512 return bool(not self.object_name) |
| 513 |
| 514 def names_singleton(self): |
| 515 return bool(self.object_name) |
| 516 |
| 517 def names_directory(self): |
| 518 return False |
| 519 |
| 520 def names_provider(self): |
| 521 return bool(not self.bucket_name) |
| 522 |
| 523 def names_bucket(self): |
| 524 return self.names_container() |
| 525 |
| 526 def names_file(self): |
| 527 return False |
| 528 |
| 529 def names_object(self): |
| 530 return not self.names_container() |
| 531 |
| 532 def is_stream(self): |
| 533 return False |
| 534 |
| 535 def new_key(self, validate=NOT_IMPL, headers=NOT_IMPL): |
| 536 bucket = self.get_bucket() |
| 537 return bucket.new_key(self.object_name) |
| 538 |
| 539 def set_acl(self, acl_or_str, key_name='', validate=NOT_IMPL, |
| 540 headers=NOT_IMPL, version_id=NOT_IMPL): |
| 541 self.get_bucket().set_acl(acl_or_str, key_name) |
| 542 |
| 543 def set_def_acl(self, acl_or_str, key_name=NOT_IMPL, validate=NOT_IMPL, |
| 544 headers=NOT_IMPL, version_id=NOT_IMPL): |
| 545 self.get_bucket().set_def_acl(acl_or_str) |
| 546 |
| 547 def set_subresource(self, subresource, value, validate=NOT_IMPL, |
| 548 headers=NOT_IMPL, version_id=NOT_IMPL): |
| 549 self.get_bucket().set_subresource(subresource, value, self.object_name) |
| 550 |
| 551 def copy_key(self, src_bucket_name, src_key_name, metadata=NOT_IMPL, |
| 552 src_version_id=NOT_IMPL, storage_class=NOT_IMPL, |
| 553 preserve_acl=NOT_IMPL, encrypt_key=NOT_IMPL, headers=NOT_IMPL, |
| 554 query_args=NOT_IMPL, src_generation=NOT_IMPL): |
| 555 dst_bucket = self.get_bucket() |
| 556 return dst_bucket.copy_key(new_key_name=self.object_name, |
| 557 src_bucket_name=src_bucket_name, |
| 558 src_key_name=src_key_name) |
| 559 |
| 560 def set_contents_from_string(self, s, headers=NOT_IMPL, replace=NOT_IMPL, |
| 561 cb=NOT_IMPL, num_cb=NOT_IMPL, policy=NOT_IMPL, |
| 562 md5=NOT_IMPL, reduced_redundancy=NOT_IMPL): |
| 563 key = self.new_key() |
| 564 key.set_contents_from_string(s) |
| 565 |
| 566 def set_contents_from_file(self, fp, headers=None, replace=NOT_IMPL, |
| 567 cb=NOT_IMPL, num_cb=NOT_IMPL, policy=NOT_IMPL, |
| 568 md5=NOT_IMPL, size=NOT_IMPL, rewind=NOT_IMPL, |
| 569 res_upload_handler=NOT_IMPL): |
| 570 key = self.new_key() |
| 571 return key.set_contents_from_file(fp, headers=headers) |
| 572 |
| 573 def set_contents_from_stream(self, fp, headers=NOT_IMPL, replace=NOT_IMPL, |
| 574 cb=NOT_IMPL, num_cb=NOT_IMPL, policy=NOT_IMPL, |
| 575 reduced_redundancy=NOT_IMPL, |
| 576 query_args=NOT_IMPL, size=NOT_IMPL): |
| 577 dst_key.set_contents_from_stream(fp) |
| 578 |
| 579 def get_contents_to_file(self, fp, headers=NOT_IMPL, cb=NOT_IMPL, |
| 580 num_cb=NOT_IMPL, torrent=NOT_IMPL, |
| 581 version_id=NOT_IMPL, res_download_handler=NOT_IMPL, |
| 582 response_headers=NOT_IMPL): |
| 583 key = self.get_key() |
| 584 key.get_contents_to_file(fp) |
| 585 |
| 586 def get_contents_to_stream(self, fp, headers=NOT_IMPL, cb=NOT_IMPL, |
| 587 num_cb=NOT_IMPL, version_id=NOT_IMPL): |
| 588 key = self.get_key() |
| 589 return key.get_contents_to_file(fp) |
OLD | NEW |