Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(251)

Side by Side Diff: third_party/boto/storage_uri.py

Issue 12633019: Added boto/ to depot_tools/third_party (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/depot_tools
Patch Set: Moved boto down by one Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « third_party/boto/ses/exceptions.py ('k') | third_party/boto/sts/__init__.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 # Copyright 2010 Google Inc.
2 # Copyright (c) 2011, Nexenta Systems Inc.
3 #
4 # Permission is hereby granted, free of charge, to any person obtaining a
5 # copy of this software and associated documentation files (the
6 # "Software"), to deal in the Software without restriction, including
7 # without limitation the rights to use, copy, modify, merge, publish, dis-
8 # tribute, sublicense, and/or sell copies of the Software, and to permit
9 # persons to whom the Software is furnished to do so, subject to the fol-
10 # lowing conditions:
11 #
12 # The above copyright notice and this permission notice shall be included
13 # in all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 # IN THE SOFTWARE.
22
23 import boto
24 import os
25 import sys
26 import textwrap
27 from boto.s3.deletemarker import DeleteMarker
28 from boto.exception import BotoClientError
29 from boto.exception import InvalidUriError
30
31
32 class StorageUri(object):
33 """
34 Base class for representing storage provider-independent bucket and
35 object name with a shorthand URI-like syntax.
36
37 This is an abstract class: the constructor cannot be called (throws an
38 exception if you try).
39 """
40
41 connection = None
42 # Optional args that can be set from one of the concrete subclass
43 # constructors, to change connection behavior (e.g., to override
44 # https_connection_factory).
45 connection_args = None
46
47 # Map of provider scheme ('s3' or 'gs') to AWSAuthConnection object. We
48 # maintain a pool here in addition to the connection pool implemented
49 # in AWSAuthConnection because the latter re-creates its connection pool
50 # every time that class is instantiated (so the current pool is used to
51 # avoid re-instantiating AWSAuthConnection).
52 provider_pool = {}
53
54 def __init__(self):
55 """Uncallable constructor on abstract base StorageUri class.
56 """
57 raise BotoClientError('Attempt to instantiate abstract StorageUri '
58 'class')
59
60 def __repr__(self):
61 """Returns string representation of URI."""
62 return self.uri
63
64 def equals(self, uri):
65 """Returns true if two URIs are equal."""
66 return self.uri == uri.uri
67
68 def check_response(self, resp, level, uri):
69 if resp is None:
70 raise InvalidUriError('\n'.join(textwrap.wrap(
71 'Attempt to get %s for "%s" failed. This can happen if '
72 'the URI refers to a non-existent object or if you meant to '
73 'operate on a directory (e.g., leaving off -R option on gsutil '
74 'cp, mv, or ls of a bucket)' % (level, uri), 80)))
75
76 def _check_bucket_uri(self, function_name):
77 if issubclass(type(self), BucketStorageUri) and not self.bucket_name:
78 raise InvalidUriError(
79 '%s on bucket-less URI (%s)' % (function_name, self.uri))
80
81 def _check_object_uri(self, function_name):
82 if issubclass(type(self), BucketStorageUri) and not self.object_name:
83 raise InvalidUriError('%s on object-less URI (%s)' %
84 (function_name, self.uri))
85
86 def _warn_about_args(self, function_name, **args):
87 for arg in args:
88 if args[arg]:
89 sys.stderr.write(
90 'Warning: %s ignores argument: %s=%s\n' %
91 (function_name, arg, str(args[arg])))
92
93 def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
94 """
95 Opens a connection to appropriate provider, depending on provider
96 portion of URI. Requires Credentials defined in boto config file (see
97 boto/pyami/config.py).
98 @type storage_uri: StorageUri
99 @param storage_uri: StorageUri specifying a bucket or a bucket+object
100 @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
101 @return: A connection to storage service provider of the given URI.
102 """
103 connection_args = dict(self.connection_args or ())
104 # Use OrdinaryCallingFormat instead of boto-default
105 # SubdomainCallingFormat because the latter changes the hostname
106 # that's checked during cert validation for HTTPS connections,
107 # which will fail cert validation (when cert validation is enabled).
108 # Note: the following import can't be moved up to the start of
109 # this file else it causes a config import failure when run from
110 # the resumable upload/download tests.
111 from boto.s3.connection import OrdinaryCallingFormat
112 connection_args['calling_format'] = OrdinaryCallingFormat()
113 if (hasattr(self, 'suppress_consec_slashes') and
114 'suppress_consec_slashes' not in connection_args):
115 connection_args['suppress_consec_slashes'] = (
116 self.suppress_consec_slashes)
117 connection_args.update(kwargs)
118 if not self.connection:
119 if self.scheme in self.provider_pool:
120 self.connection = self.provider_pool[self.scheme]
121 elif self.scheme == 's3':
122 from boto.s3.connection import S3Connection
123 self.connection = S3Connection(access_key_id,
124 secret_access_key,
125 **connection_args)
126 self.provider_pool[self.scheme] = self.connection
127 elif self.scheme == 'gs':
128 from boto.gs.connection import GSConnection
129 self.connection = GSConnection(access_key_id,
130 secret_access_key,
131 **connection_args)
132 self.provider_pool[self.scheme] = self.connection
133 elif self.scheme == 'file':
134 from boto.file.connection import FileConnection
135 self.connection = FileConnection(self)
136 else:
137 raise InvalidUriError('Unrecognized scheme "%s"' %
138 self.scheme)
139 self.connection.debug = self.debug
140 return self.connection
141
142 def has_version(self):
143 return (issubclass(type(self), BucketStorageUri)
144 and ((self.version_id is not None)
145 or (self.generation is not None)))
146
147 def delete_key(self, validate=False, headers=None, version_id=None,
148 mfa_token=None):
149 self._check_object_uri('delete_key')
150 bucket = self.get_bucket(validate, headers)
151 return bucket.delete_key(self.object_name, headers, version_id,
152 mfa_token)
153
154 def list_bucket(self, prefix='', delimiter='', headers=None,
155 all_versions=False):
156 self._check_bucket_uri('list_bucket')
157 bucket = self.get_bucket(headers=headers)
158 if all_versions:
159 return (v for v in bucket.list_versions(
160 prefix=prefix, delimiter=delimiter, headers=headers)
161 if not isinstance(v, DeleteMarker))
162 else:
163 return bucket.list(prefix=prefix, delimiter=delimiter,
164 headers=headers)
165
166 def get_all_keys(self, validate=False, headers=None, prefix=None):
167 bucket = self.get_bucket(validate, headers)
168 return bucket.get_all_keys(headers)
169
170 def get_bucket(self, validate=False, headers=None):
171 self._check_bucket_uri('get_bucket')
172 conn = self.connect()
173 bucket = conn.get_bucket(self.bucket_name, validate, headers)
174 self.check_response(bucket, 'bucket', self.uri)
175 return bucket
176
177 def get_key(self, validate=False, headers=None, version_id=None):
178 self._check_object_uri('get_key')
179 bucket = self.get_bucket(validate, headers)
180 key = bucket.get_key(self.object_name, headers, version_id)
181 self.check_response(key, 'key', self.uri)
182 return key
183
184 def new_key(self, validate=False, headers=None):
185 self._check_object_uri('new_key')
186 bucket = self.get_bucket(validate, headers)
187 return bucket.new_key(self.object_name)
188
189 def get_contents_to_stream(self, fp, headers=None, version_id=None):
190 self._check_object_uri('get_key')
191 self._warn_about_args('get_key', validate=False)
192 key = self.get_key(None, headers)
193 self.check_response(key, 'key', self.uri)
194 return key.get_contents_to_file(fp, headers, version_id=version_id)
195
196 def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10,
197 torrent=False, version_id=None,
198 res_download_handler=None, response_headers=None):
199 self._check_object_uri('get_contents_to_file')
200 key = self.get_key(None, headers)
201 self.check_response(key, 'key', self.uri)
202 key.get_contents_to_file(fp, headers, cb, num_cb, torrent, version_id,
203 res_download_handler, response_headers)
204
205 def get_contents_as_string(self, validate=False, headers=None, cb=None,
206 num_cb=10, torrent=False, version_id=None):
207 self._check_object_uri('get_contents_as_string')
208 key = self.get_key(validate, headers)
209 self.check_response(key, 'key', self.uri)
210 return key.get_contents_as_string(headers, cb, num_cb, torrent,
211 version_id)
212
213 def acl_class(self):
214 conn = self.connect()
215 acl_class = conn.provider.acl_class
216 self.check_response(acl_class, 'acl_class', self.uri)
217 return acl_class
218
219 def canned_acls(self):
220 conn = self.connect()
221 canned_acls = conn.provider.canned_acls
222 self.check_response(canned_acls, 'canned_acls', self.uri)
223 return canned_acls
224
225
226 class BucketStorageUri(StorageUri):
227 """
228 StorageUri subclass that handles bucket storage providers.
229 Callers should instantiate this class by calling boto.storage_uri().
230 """
231
232 delim = '/'
233 capabilities = set([]) # A set of additional capabilities.
234
235 def __init__(self, scheme, bucket_name=None, object_name=None,
236 debug=0, connection_args=None, suppress_consec_slashes=True,
237 version_id=None, generation=None, is_latest=False):
238 """Instantiate a BucketStorageUri from scheme,bucket,object tuple.
239
240 @type scheme: string
241 @param scheme: URI scheme naming the storage provider (gs, s3, etc.)
242 @type bucket_name: string
243 @param bucket_name: bucket name
244 @type object_name: string
245 @param object_name: object name, excluding generation/version.
246 @type debug: int
247 @param debug: debug level to pass in to connection (range 0..2)
248 @type connection_args: map
249 @param connection_args: optional map containing args to be
250 passed to {S3,GS}Connection constructor (e.g., to override
251 https_connection_factory).
252 @param suppress_consec_slashes: If provided, controls whether
253 consecutive slashes will be suppressed in key paths.
254 @param version_id: Object version id (S3-specific).
255 @param generation: Object generation number (GCS-specific).
256 @param is_latest: boolean indicating that a versioned object is the
257 current version
258
259 After instantiation the components are available in the following
260 fields: scheme, bucket_name, object_name, version_id, generation,
261 is_latest, versionless_uri, version_specific_uri, uri.
262 Note: If instantiated without version info, the string representation
263 for a URI stays versionless; similarly, if instantiated with version
264 info, the string representation for a URI stays version-specific. If you
265 call one of the uri.set_contents_from_xyz() methods, a specific object
266 version will be created, and its version-specific URI string can be
267 retrieved from version_specific_uri even if the URI was instantiated
268 without version info.
269 """
270
271 self.scheme = scheme
272 self.bucket_name = bucket_name
273 self.object_name = object_name
274 self.debug = debug
275 if connection_args:
276 self.connection_args = connection_args
277 self.suppress_consec_slashes = suppress_consec_slashes
278 self.version_id = version_id
279 self.generation = generation and int(generation)
280 self.is_latest = is_latest
281 self.is_version_specific = bool(self.generation) or bool(version_id)
282 self._build_uri_strings()
283
284 def _build_uri_strings(self):
285 if self.bucket_name and self.object_name:
286 self.versionless_uri = '%s://%s/%s' % (self.scheme, self.bucket_name,
287 self.object_name)
288 if self.generation:
289 self.version_specific_uri = '%s#%s' % (self.versionless_uri,
290 self.generation)
291 elif self.version_id:
292 self.version_specific_uri = '%s#%s' % (
293 self.versionless_uri, self.version_id)
294 if self.is_version_specific:
295 self.uri = self.version_specific_uri
296 else:
297 self.uri = self.versionless_uri
298 elif self.bucket_name:
299 self.uri = ('%s://%s/' % (self.scheme, self.bucket_name))
300 else:
301 self.uri = ('%s://' % self.scheme)
302
303 def _update_from_key(self, key):
304 self._update_from_values(
305 getattr(key, 'version_id', None),
306 getattr(key, 'generation', None),
307 getattr(key, 'is_latest', None))
308
309 def _update_from_values(self, version_id, generation, is_latest):
310 self.version_id = version_id
311 self.generation = generation
312 self.is_latest = is_latest
313 self._build_uri_strings()
314
315 def get_key(self, validate=False, headers=None, version_id=None):
316 self._check_object_uri('get_key')
317 bucket = self.get_bucket(validate, headers)
318 if self.get_provider().name == 'aws':
319 key = bucket.get_key(self.object_name, headers,
320 version_id=(version_id or self.version_id))
321 elif self.get_provider().name == 'google':
322 key = bucket.get_key(self.object_name, headers,
323 generation=self.generation)
324 self.check_response(key, 'key', self.uri)
325 return key
326
327 def delete_key(self, validate=False, headers=None, version_id=None,
328 mfa_token=None):
329 self._check_object_uri('delete_key')
330 bucket = self.get_bucket(validate, headers)
331 if self.get_provider().name == 'aws':
332 version_id = version_id or self.version_id
333 return bucket.delete_key(self.object_name, headers, version_id,
334 mfa_token)
335 elif self.get_provider().name == 'google':
336 return bucket.delete_key(self.object_name, headers,
337 generation=self.generation)
338
339 def clone_replace_name(self, new_name):
340 """Instantiate a BucketStorageUri from the current BucketStorageUri,
341 but replacing the object_name.
342
343 @type new_name: string
344 @param new_name: new object name
345 """
346 self._check_bucket_uri('clone_replace_name')
347 return BucketStorageUri(
348 self.scheme, bucket_name=self.bucket_name, object_name=new_name,
349 debug=self.debug,
350 suppress_consec_slashes=self.suppress_consec_slashes)
351
352 def clone_replace_key(self, key):
353 """Instantiate a BucketStorageUri from the current BucketStorageUri, by
354 replacing the object name with the object name and other metadata found
355 in the given Key object (including generation).
356
357 @type key: Key
358 @param key: key for the new StorageUri to represent
359 """
360 self._check_bucket_uri('clone_replace_key')
361 version_id = None
362 generation = None
363 is_latest = False
364 if hasattr(key, 'version_id'):
365 version_id = key.version_id
366 if hasattr(key, 'generation'):
367 generation = key.generation
368 if hasattr(key, 'is_latest'):
369 is_latest = key.is_latest
370
371 return BucketStorageUri(
372 key.provider.get_provider_name(),
373 bucket_name=key.bucket.name,
374 object_name=key.name,
375 debug=self.debug,
376 suppress_consec_slashes=self.suppress_consec_slashes,
377 version_id=version_id,
378 generation=generation,
379 is_latest=is_latest)
380
381 def get_acl(self, validate=False, headers=None, version_id=None):
382 """returns a bucket's acl"""
383 self._check_bucket_uri('get_acl')
384 bucket = self.get_bucket(validate, headers)
385 # This works for both bucket- and object- level ACLs (former passes
386 # key_name=None):
387 key_name = self.object_name or ''
388 if self.get_provider().name == 'aws':
389 version_id = version_id or self.version_id
390 acl = bucket.get_acl(key_name, headers, version_id)
391 else:
392 acl = bucket.get_acl(key_name, headers, generation=self.generation)
393 self.check_response(acl, 'acl', self.uri)
394 return acl
395
396 def get_def_acl(self, validate=False, headers=None):
397 """returns a bucket's default object acl"""
398 self._check_bucket_uri('get_def_acl')
399 bucket = self.get_bucket(validate, headers)
400 acl = bucket.get_def_acl(headers)
401 self.check_response(acl, 'acl', self.uri)
402 return acl
403
404 def get_cors(self, validate=False, headers=None):
405 """returns a bucket's CORS XML"""
406 self._check_bucket_uri('get_cors')
407 bucket = self.get_bucket(validate, headers)
408 cors = bucket.get_cors(headers)
409 self.check_response(cors, 'cors', self.uri)
410 return cors
411
412 def set_cors(self, cors, validate=False, headers=None):
413 """sets or updates a bucket's CORS XML"""
414 self._check_bucket_uri('set_cors ')
415 bucket = self.get_bucket(validate, headers)
416 bucket.set_cors(cors.to_xml(), headers)
417
418 def get_location(self, validate=False, headers=None):
419 self._check_bucket_uri('get_location')
420 bucket = self.get_bucket(validate, headers)
421 return bucket.get_location()
422
423 def get_storage_class(self, validate=False, headers=None):
424 self._check_bucket_uri('get_storage_class')
425 # StorageClass is defined as a bucket param for GCS, but as a key
426 # param for S3.
427 if self.scheme != 'gs':
428 raise ValueError('get_storage_class() not supported for %s '
429 'URIs.' % self.scheme)
430 bucket = self.get_bucket(validate, headers)
431 return bucket.get_storage_class()
432
433 def get_subresource(self, subresource, validate=False, headers=None,
434 version_id=None):
435 self._check_bucket_uri('get_subresource')
436 bucket = self.get_bucket(validate, headers)
437 return bucket.get_subresource(subresource, self.object_name, headers,
438 version_id)
439
440 def add_group_email_grant(self, permission, email_address, recursive=False,
441 validate=False, headers=None):
442 self._check_bucket_uri('add_group_email_grant')
443 if self.scheme != 'gs':
444 raise ValueError('add_group_email_grant() not supported for %s '
445 'URIs.' % self.scheme)
446 if self.object_name:
447 if recursive:
448 raise ValueError('add_group_email_grant() on key-ful URI cannot '
449 'specify recursive=True')
450 key = self.get_key(validate, headers)
451 self.check_response(key, 'key', self.uri)
452 key.add_group_email_grant(permission, email_address, headers)
453 elif self.bucket_name:
454 bucket = self.get_bucket(validate, headers)
455 bucket.add_group_email_grant(permission, email_address, recursive,
456 headers)
457 else:
458 raise InvalidUriError('add_group_email_grant() on bucket-less URI '
459 '%s' % self.uri)
460
461 def add_email_grant(self, permission, email_address, recursive=False,
462 validate=False, headers=None):
463 self._check_bucket_uri('add_email_grant')
464 if not self.object_name:
465 bucket = self.get_bucket(validate, headers)
466 bucket.add_email_grant(permission, email_address, recursive,
467 headers)
468 else:
469 key = self.get_key(validate, headers)
470 self.check_response(key, 'key', self.uri)
471 key.add_email_grant(permission, email_address)
472
473 def add_user_grant(self, permission, user_id, recursive=False,
474 validate=False, headers=None):
475 self._check_bucket_uri('add_user_grant')
476 if not self.object_name:
477 bucket = self.get_bucket(validate, headers)
478 bucket.add_user_grant(permission, user_id, recursive, headers)
479 else:
480 key = self.get_key(validate, headers)
481 self.check_response(key, 'key', self.uri)
482 key.add_user_grant(permission, user_id)
483
484 def list_grants(self, headers=None):
485 self._check_bucket_uri('list_grants ')
486 bucket = self.get_bucket(headers)
487 return bucket.list_grants(headers)
488
489 def is_file_uri(self):
490 """Returns True if this URI names a file or directory."""
491 return False
492
493 def is_cloud_uri(self):
494 """Returns True if this URI names a bucket or object."""
495 return True
496
497 def names_container(self):
498 """
499 Returns True if this URI names a directory or bucket. Will return
500 False for bucket subdirs; providing bucket subdir semantics needs to
501 be done by the caller (like gsutil does).
502 """
503 return bool(not self.object_name)
504
505 def names_singleton(self):
506 """Returns True if this URI names a file or object."""
507 return bool(self.object_name)
508
509 def names_directory(self):
510 """Returns True if this URI names a directory."""
511 return False
512
513 def names_provider(self):
514 """Returns True if this URI names a provider."""
515 return bool(not self.bucket_name)
516
517 def names_bucket(self):
518 """Returns True if this URI names a bucket."""
519 return bool(self.bucket_name) and bool(not self.object_name)
520
521 def names_file(self):
522 """Returns True if this URI names a file."""
523 return False
524
525 def names_object(self):
526 """Returns True if this URI names an object."""
527 return self.names_singleton()
528
529 def is_stream(self):
530 """Returns True if this URI represents input/output stream."""
531 return False
532
533 def create_bucket(self, headers=None, location='', policy=None,
534 storage_class=None):
535 self._check_bucket_uri('create_bucket ')
536 conn = self.connect()
537 # Pass storage_class param only if this is a GCS bucket. (In S3 the
538 # storage class is specified on the key object.)
539 if self.scheme == 'gs':
540 return conn.create_bucket(self.bucket_name, headers, location, policy,
541 storage_class)
542 else:
543 return conn.create_bucket(self.bucket_name, headers, location, policy)
544
545 def delete_bucket(self, headers=None):
546 self._check_bucket_uri('delete_bucket')
547 conn = self.connect()
548 return conn.delete_bucket(self.bucket_name, headers)
549
550 def get_all_buckets(self, headers=None):
551 conn = self.connect()
552 return conn.get_all_buckets(headers)
553
554 def get_provider(self):
555 conn = self.connect()
556 provider = conn.provider
557 self.check_response(provider, 'provider', self.uri)
558 return provider
559
560 def set_acl(self, acl_or_str, key_name='', validate=False, headers=None,
561 version_id=None, if_generation=None, if_metageneration=None):
562 """Sets or updates a bucket's ACL."""
563 self._check_bucket_uri('set_acl')
564 key_name = key_name or self.object_name or ''
565 bucket = self.get_bucket(validate, headers)
566 if self.generation:
567 bucket.set_acl(
568 acl_or_str, key_name, headers, generation=self.generation,
569 if_generation=if_generation, if_metageneration=if_metageneration)
570 else:
571 version_id = version_id or self.version_id
572 bucket.set_acl(acl_or_str, key_name, headers, version_id)
573
574 def set_xml_acl(self, xmlstring, key_name='', validate=False, headers=None,
575 version_id=None, if_generation=None, if_metageneration=None):
576 """Sets or updates a bucket's ACL with an XML string."""
577 self._check_bucket_uri('set_xml_acl')
578 key_name = key_name or self.object_name or ''
579 bucket = self.get_bucket(validate, headers)
580 if self.generation:
581 bucket.set_xml_acl(
582 xmlstring, key_name, headers, generation=self.generation,
583 if_generation=if_generation, if_metageneration=if_metageneration)
584 else:
585 version_id = version_id or self.version_id
586 bucket.set_xml_acl(xmlstring, key_name, headers,
587 version_id=version_id)
588
589 def set_def_xml_acl(self, xmlstring, validate=False, headers=None):
590 """Sets or updates a bucket's default object ACL with an XML string."""
591 self._check_bucket_uri('set_def_xml_acl')
592 self.get_bucket(validate, headers).set_def_xml_acl(xmlstring, headers)
593
594 def set_def_acl(self, acl_or_str, validate=False, headers=None,
595 version_id=None):
596 """Sets or updates a bucket's default object ACL."""
597 self._check_bucket_uri('set_def_acl')
598 self.get_bucket(validate, headers).set_def_acl(acl_or_str, headers)
599
600 def set_canned_acl(self, acl_str, validate=False, headers=None,
601 version_id=None):
602 """Sets or updates a bucket's acl to a predefined (canned) value."""
603 self._check_object_uri('set_canned_acl')
604 self._warn_about_args('set_canned_acl', version_id=version_id)
605 key = self.get_key(validate, headers)
606 self.check_response(key, 'key', self.uri)
607 key.set_canned_acl(acl_str, headers)
608
609 def set_def_canned_acl(self, acl_str, validate=False, headers=None,
610 version_id=None):
611 """Sets or updates a bucket's default object acl to a predefined
612 (canned) value."""
613 self._check_bucket_uri('set_def_canned_acl ')
614 key = self.get_key(validate, headers)
615 self.check_response(key, 'key', self.uri)
616 key.set_def_canned_acl(acl_str, headers, version_id)
617
618 def set_subresource(self, subresource, value, validate=False, headers=None,
619 version_id=None):
620 self._check_bucket_uri('set_subresource')
621 bucket = self.get_bucket(validate, headers)
622 bucket.set_subresource(subresource, value, self.object_name, headers,
623 version_id)
624
625 def set_contents_from_string(self, s, headers=None, replace=True,
626 cb=None, num_cb=10, policy=None, md5=None,
627 reduced_redundancy=False):
628 self._check_object_uri('set_contents_from_string')
629 key = self.new_key(headers=headers)
630 if self.scheme == 'gs':
631 if reduced_redundancy:
632 sys.stderr.write('Warning: GCS does not support '
633 'reduced_redundancy; argument ignored by '
634 'set_contents_from_string')
635 result = key.set_contents_from_string(
636 s, headers, replace, cb, num_cb, policy, md5)
637 else:
638 result = key.set_contents_from_string(
639 s, headers, replace, cb, num_cb, policy, md5,
640 reduced_redundancy)
641 self._update_from_key(key)
642 return result
643
644 def set_contents_from_file(self, fp, headers=None, replace=True, cb=None,
645 num_cb=10, policy=None, md5=None, size=None,
646 rewind=False, res_upload_handler=None):
647 self._check_object_uri('set_contents_from_file')
648 key = self.new_key(headers=headers)
649 if self.scheme == 'gs':
650 result = key.set_contents_from_file(
651 fp, headers, replace, cb, num_cb, policy, md5, size=size,
652 rewind=rewind, res_upload_handler=res_upload_handler)
653 if res_upload_handler:
654 self._update_from_values(None, res_upload_handler.generation,
655 None)
656 else:
657 self._warn_about_args('set_contents_from_file',
658 res_upload_handler=res_upload_handler)
659 result = key.set_contents_from_file(
660 fp, headers, replace, cb, num_cb, policy, md5, size=size,
661 rewind=rewind)
662 self._update_from_key(key)
663 return result
664
665 def set_contents_from_stream(self, fp, headers=None, replace=True, cb=None,
666 policy=None, reduced_redundancy=False):
667 self._check_object_uri('set_contents_from_stream')
668 dst_key = self.new_key(False, headers)
669 result = dst_key.set_contents_from_stream(
670 fp, headers, replace, cb, policy=policy,
671 reduced_redundancy=reduced_redundancy)
672 self._update_from_key(dst_key)
673 return result
674
675 def copy_key(self, src_bucket_name, src_key_name, metadata=None,
676 src_version_id=None, storage_class='STANDARD',
677 preserve_acl=False, encrypt_key=False, headers=None,
678 query_args=None, src_generation=None):
679 """Returns newly created key."""
680 self._check_object_uri('copy_key')
681 dst_bucket = self.get_bucket(validate=False, headers=headers)
682 if src_generation:
683 return dst_bucket.copy_key(new_key_name=self.object_name,
684 src_bucket_name=src_bucket_name,
685 src_key_name=src_key_name, metadata=metadata,
686 storage_class=storage_class, preserve_acl=preserve_acl,
687 encrypt_key=encrypt_key, headers=headers, query_args=query_args,
688 src_generation=src_generation)
689 else:
690 return dst_bucket.copy_key(new_key_name=self.object_name,
691 src_bucket_name=src_bucket_name, src_key_name=src_key_name,
692 metadata=metadata, src_version_id=src_version_id,
693 storage_class=storage_class, preserve_acl=preserve_acl,
694 encrypt_key=encrypt_key, headers=headers, query_args=query_args)
695
696 def enable_logging(self, target_bucket, target_prefix=None, validate=False,
697 headers=None, version_id=None):
698 self._check_bucket_uri('enable_logging')
699 bucket = self.get_bucket(validate, headers)
700 bucket.enable_logging(target_bucket, target_prefix, headers=headers)
701
702 def disable_logging(self, validate=False, headers=None, version_id=None):
703 self._check_bucket_uri('disable_logging')
704 bucket = self.get_bucket(validate, headers)
705 bucket.disable_logging(headers=headers)
706
707 def set_website_config(self, main_page_suffix=None, error_key=None,
708 validate=False, headers=None):
709 bucket = self.get_bucket(validate, headers)
710 if not (main_page_suffix or error_key):
711 bucket.delete_website_configuration(headers)
712 else:
713 bucket.configure_website(main_page_suffix, error_key, headers)
714
715 def get_website_config(self, validate=False, headers=None):
716 bucket = self.get_bucket(validate, headers)
717 return bucket.get_website_configuration_with_xml(headers)
718
719 def get_versioning_config(self, headers=None):
720 bucket = self.get_bucket(False, headers)
721 return bucket.get_versioning_status(headers)
722
723 def configure_versioning(self, enabled, headers=None):
724 self._check_bucket_uri('configure_versioning')
725 bucket = self.get_bucket(False, headers)
726 return bucket.configure_versioning(enabled, headers)
727
728 def set_metadata(self, metadata_plus, metadata_minus, preserve_acl,
729 headers=None):
730 return self.get_key(False).set_remote_metadata(metadata_plus,
731 metadata_minus,
732 preserve_acl,
733 headers=headers)
734
735 def exists(self, headers=None):
736 """Returns True if the object exists or False if it doesn't"""
737 if not self.object_name:
738 raise InvalidUriError('exists on object-less URI (%s)' % self.uri)
739 bucket = self.get_bucket()
740 key = bucket.get_key(self.object_name, headers=headers)
741 return bool(key)
742
743 class FileStorageUri(StorageUri):
744 """
745 StorageUri subclass that handles files in the local file system.
746 Callers should instantiate this class by calling boto.storage_uri().
747
748 See file/README about how we map StorageUri operations onto a file system.
749 """
750
751 delim = os.sep
752
753 def __init__(self, object_name, debug, is_stream=False):
754 """Instantiate a FileStorageUri from a path name.
755
756 @type object_name: string
757 @param object_name: object name
758 @type debug: boolean
759 @param debug: whether to enable debugging on this StorageUri
760
761 After instantiation the components are available in the following
762 fields: uri, scheme, bucket_name (always blank for this "anonymous"
763 bucket), object_name.
764 """
765
766 self.scheme = 'file'
767 self.bucket_name = ''
768 self.object_name = object_name
769 self.uri = 'file://' + object_name
770 self.debug = debug
771 self.stream = is_stream
772
773 def clone_replace_name(self, new_name):
774 """Instantiate a FileStorageUri from the current FileStorageUri,
775 but replacing the object_name.
776
777 @type new_name: string
778 @param new_name: new object name
779 """
780 return FileStorageUri(new_name, self.debug, self.stream)
781
782 def is_file_uri(self):
783 """Returns True if this URI names a file or directory."""
784 return True
785
786 def is_cloud_uri(self):
787 """Returns True if this URI names a bucket or object."""
788 return False
789
790 def names_container(self):
791 """Returns True if this URI names a directory or bucket."""
792 return self.names_directory()
793
794 def names_singleton(self):
795 """Returns True if this URI names a file (or stream) or object."""
796 return not self.names_container()
797
798 def names_directory(self):
799 """Returns True if this URI names a directory."""
800 if self.stream:
801 return False
802 return os.path.isdir(self.object_name)
803
804 def names_provider(self):
805 """Returns True if this URI names a provider."""
806 return False
807
808 def names_bucket(self):
809 """Returns True if this URI names a bucket."""
810 return False
811
812 def names_file(self):
813 """Returns True if this URI names a file."""
814 return self.names_singleton()
815
816 def names_object(self):
817 """Returns True if this URI names an object."""
818 return False
819
820 def is_stream(self):
821 """Returns True if this URI represents input/output stream.
822 """
823 return bool(self.stream)
824
825 def close(self):
826 """Closes the underlying file.
827 """
828 self.get_key().close()
829
830 def exists(self, _headers_not_used=None):
831 """Returns True if the file exists or False if it doesn't"""
832 # The _headers_not_used parameter is ignored. It is only there to ensure
833 # that this method's signature is identical to the exists method on the
834 # BucketStorageUri class.
835 return os.path.exists(self.object_name)
OLDNEW
« no previous file with comments | « third_party/boto/ses/exceptions.py ('k') | third_party/boto/sts/__init__.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698