Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(71)

Side by Side Diff: third_party/boto/s3/connection.py

Issue 12633019: Added boto/ to depot_tools/third_party (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/depot_tools
Patch Set: Moved boto down by one Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « third_party/boto/s3/bucketlogging.py ('k') | third_party/boto/s3/cors.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
2 # Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
3 # Copyright (c) 2010, Eucalyptus Systems, Inc.
4 # All rights reserved.
5 #
6 # Permission is hereby granted, free of charge, to any person obtaining a
7 # copy of this software and associated documentation files (the
8 # "Software"), to deal in the Software without restriction, including
9 # without limitation the rights to use, copy, modify, merge, publish, dis-
10 # tribute, sublicense, and/or sell copies of the Software, and to permit
11 # persons to whom the Software is furnished to do so, subject to the fol-
12 # lowing conditions:
13 #
14 # The above copyright notice and this permission notice shall be included
15 # in all copies or substantial portions of the Software.
16 #
17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
18 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
19 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
20 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
21 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
23 # IN THE SOFTWARE.
24
25 import xml.sax
26 import urllib
27 import base64
28 import time
29
30 import boto.utils
31 from boto.connection import AWSAuthConnection
32 from boto import handler
33 from boto.s3.bucket import Bucket
34 from boto.s3.key import Key
35 from boto.resultset import ResultSet
36 from boto.exception import BotoClientError, S3ResponseError
37
38
39 def check_lowercase_bucketname(n):
40 """
41 Bucket names must not contain uppercase characters. We check for
42 this by appending a lowercase character and testing with islower().
43 Note this also covers cases like numeric bucket names with dashes.
44
45 >>> check_lowercase_bucketname("Aaaa")
46 Traceback (most recent call last):
47 ...
48 BotoClientError: S3Error: Bucket names cannot contain upper-case
49 characters when using either the sub-domain or virtual hosting calling
50 format.
51
52 >>> check_lowercase_bucketname("1234-5678-9123")
53 True
54 >>> check_lowercase_bucketname("abcdefg1234")
55 True
56 """
57 if not (n + 'a').islower():
58 raise BotoClientError("Bucket names cannot contain upper-case " \
59 "characters when using either the sub-domain or virtual " \
60 "hosting calling format.")
61 return True
62
63
64 def assert_case_insensitive(f):
65 def wrapper(*args, **kwargs):
66 if len(args) == 3 and check_lowercase_bucketname(args[2]):
67 pass
68 return f(*args, **kwargs)
69 return wrapper
70
71
72 class _CallingFormat(object):
73
74 def get_bucket_server(self, server, bucket):
75 return ''
76
77 def build_url_base(self, connection, protocol, server, bucket, key=''):
78 url_base = '%s://' % protocol
79 url_base += self.build_host(server, bucket)
80 url_base += connection.get_path(self.build_path_base(bucket, key))
81 return url_base
82
83 def build_host(self, server, bucket):
84 if bucket == '':
85 return server
86 else:
87 return self.get_bucket_server(server, bucket)
88
89 def build_auth_path(self, bucket, key=''):
90 key = boto.utils.get_utf8_value(key)
91 path = ''
92 if bucket != '':
93 path = '/' + bucket
94 return path + '/%s' % urllib.quote(key)
95
96 def build_path_base(self, bucket, key=''):
97 key = boto.utils.get_utf8_value(key)
98 return '/%s' % urllib.quote(key)
99
100
101 class SubdomainCallingFormat(_CallingFormat):
102
103 @assert_case_insensitive
104 def get_bucket_server(self, server, bucket):
105 return '%s.%s' % (bucket, server)
106
107
108 class VHostCallingFormat(_CallingFormat):
109
110 @assert_case_insensitive
111 def get_bucket_server(self, server, bucket):
112 return bucket
113
114
115 class OrdinaryCallingFormat(_CallingFormat):
116
117 def get_bucket_server(self, server, bucket):
118 return server
119
120 def build_path_base(self, bucket, key=''):
121 key = boto.utils.get_utf8_value(key)
122 path_base = '/'
123 if bucket:
124 path_base += "%s/" % bucket
125 return path_base + urllib.quote(key)
126
127
128 class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat):
129
130 def build_url_base(self, connection, protocol, server, bucket, key=''):
131 url_base = '//'
132 url_base += self.build_host(server, bucket)
133 url_base += connection.get_path(self.build_path_base(bucket, key))
134 return url_base
135
136
137 class Location:
138
139 DEFAULT = '' # US Classic Region
140 EU = 'EU'
141 USWest = 'us-west-1'
142 USWest2 = 'us-west-2'
143 SAEast = 'sa-east-1'
144 APNortheast = 'ap-northeast-1'
145 APSoutheast = 'ap-southeast-1'
146 APSoutheast2 = 'ap-southeast-2'
147
148
149 class S3Connection(AWSAuthConnection):
150
151 DefaultHost = 's3.amazonaws.com'
152 QueryString = 'Signature=%s&Expires=%d&AWSAccessKeyId=%s'
153
154 def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
155 is_secure=True, port=None, proxy=None, proxy_port=None,
156 proxy_user=None, proxy_pass=None,
157 host=DefaultHost, debug=0, https_connection_factory=None,
158 calling_format=SubdomainCallingFormat(), path='/',
159 provider='aws', bucket_class=Bucket, security_token=None,
160 suppress_consec_slashes=True, anon=False,
161 validate_certs=None):
162 self.calling_format = calling_format
163 self.bucket_class = bucket_class
164 self.anon = anon
165 AWSAuthConnection.__init__(self, host,
166 aws_access_key_id, aws_secret_access_key,
167 is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
168 debug=debug, https_connection_factory=https_connection_factory,
169 path=path, provider=provider, security_token=security_token,
170 suppress_consec_slashes=suppress_consec_slashes,
171 validate_certs=validate_certs)
172
173 def _required_auth_capability(self):
174 if self.anon:
175 return ['anon']
176 else:
177 return ['s3']
178
179 def __iter__(self):
180 for bucket in self.get_all_buckets():
181 yield bucket
182
183 def __contains__(self, bucket_name):
184 return not (self.lookup(bucket_name) is None)
185
186 def set_bucket_class(self, bucket_class):
187 """
188 Set the Bucket class associated with this bucket. By default, this
189 would be the boto.s3.key.Bucket class but if you want to subclass that
190 for some reason this allows you to associate your new class.
191
192 :type bucket_class: class
193 :param bucket_class: A subclass of Bucket that can be more specific
194 """
195 self.bucket_class = bucket_class
196
197 def build_post_policy(self, expiration_time, conditions):
198 """
199 Taken from the AWS book Python examples and modified for use with boto
200 """
201 assert isinstance(expiration_time, time.struct_time), \
202 'Policy document must include a valid expiration Time object'
203
204 # Convert conditions object mappings to condition statements
205
206 return '{"expiration": "%s",\n"conditions": [%s]}' % \
207 (time.strftime(boto.utils.ISO8601, expiration_time), ",".join(condit ions))
208
209 def build_post_form_args(self, bucket_name, key, expires_in=6000,
210 acl=None, success_action_redirect=None,
211 max_content_length=None,
212 http_method='http', fields=None,
213 conditions=None, storage_class='STANDARD',
214 server_side_encryption=None):
215 """
216 Taken from the AWS book Python examples and modified for use with boto
217 This only returns the arguments required for the post form, not the
218 actual form. This does not return the file input field which also
219 needs to be added
220
221 :type bucket_name: string
222 :param bucket_name: Bucket to submit to
223
224 :type key: string
225 :param key: Key name, optionally add ${filename} to the end to
226 attach the submitted filename
227
228 :type expires_in: integer
229 :param expires_in: Time (in seconds) before this expires, defaults
230 to 6000
231
232 :type acl: string
233 :param acl: A canned ACL. One of:
234 * private
235 * public-read
236 * public-read-write
237 * authenticated-read
238 * bucket-owner-read
239 * bucket-owner-full-control
240
241 :type success_action_redirect: string
242 :param success_action_redirect: URL to redirect to on success
243
244 :type max_content_length: integer
245 :param max_content_length: Maximum size for this file
246
247 :type http_method: string
248 :param http_method: HTTP Method to use, "http" or "https"
249
250 :type storage_class: string
251 :param storage_class: Storage class to use for storing the object.
252 Valid values: STANDARD | REDUCED_REDUNDANCY
253
254 :type server_side_encryption: string
255 :param server_side_encryption: Specifies server-side encryption
256 algorithm to use when Amazon S3 creates an object.
257 Valid values: None | AES256
258
259 :rtype: dict
260 :return: A dictionary containing field names/values as well as
261 a url to POST to
262
263 .. code-block:: python
264
265
266 """
267 if fields == None:
268 fields = []
269 if conditions == None:
270 conditions = []
271 expiration = time.gmtime(int(time.time() + expires_in))
272
273 # Generate policy document
274 conditions.append('{"bucket": "%s"}' % bucket_name)
275 if key.endswith("${filename}"):
276 conditions.append('["starts-with", "$key", "%s"]' % key[:-len("${fil ename}")])
277 else:
278 conditions.append('{"key": "%s"}' % key)
279 if acl:
280 conditions.append('{"acl": "%s"}' % acl)
281 fields.append({"name": "acl", "value": acl})
282 if success_action_redirect:
283 conditions.append('{"success_action_redirect": "%s"}' % success_acti on_redirect)
284 fields.append({"name": "success_action_redirect", "value": success_a ction_redirect})
285 if max_content_length:
286 conditions.append('["content-length-range", 0, %i]' % max_content_le ngth)
287
288 if self.provider.security_token:
289 fields.append({'name': 'x-amz-security-token',
290 'value': self.provider.security_token})
291 conditions.append('{"x-amz-security-token": "%s"}' % self.provider.s ecurity_token)
292
293 if storage_class:
294 fields.append({'name': 'x-amz-storage-class',
295 'value': storage_class})
296 conditions.append('{"x-amz-storage-class": "%s"}' % storage_class)
297
298 if server_side_encryption:
299 fields.append({'name': 'x-amz-server-side-encryption',
300 'value': server_side_encryption})
301 conditions.append('{"x-amz-server-side-encryption": "%s"}' % server_ side_encryption)
302
303 policy = self.build_post_policy(expiration, conditions)
304
305 # Add the base64-encoded policy document as the 'policy' field
306 policy_b64 = base64.b64encode(policy)
307 fields.append({"name": "policy", "value": policy_b64})
308
309 # Add the AWS access key as the 'AWSAccessKeyId' field
310 fields.append({"name": "AWSAccessKeyId",
311 "value": self.aws_access_key_id})
312
313 # Add signature for encoded policy document as the
314 # 'signature' field
315 signature = self._auth_handler.sign_string(policy_b64)
316 fields.append({"name": "signature", "value": signature})
317 fields.append({"name": "key", "value": key})
318
319 # HTTPS protocol will be used if the secure HTTP option is enabled.
320 url = '%s://%s/' % (http_method,
321 self.calling_format.build_host(self.server_name(),
322 bucket_name))
323
324 return {"action": url, "fields": fields}
325
326 def generate_url(self, expires_in, method, bucket='', key='', headers=None,
327 query_auth=True, force_http=False, response_headers=None,
328 expires_in_absolute=False, version_id=None):
329 headers = headers or {}
330 if expires_in_absolute:
331 expires = int(expires_in)
332 else:
333 expires = int(time.time() + expires_in)
334 auth_path = self.calling_format.build_auth_path(bucket, key)
335 auth_path = self.get_path(auth_path)
336 # optional version_id and response_headers need to be added to
337 # the query param list.
338 extra_qp = []
339 if version_id is not None:
340 extra_qp.append("versionId=%s" % version_id)
341 if response_headers:
342 for k, v in response_headers.items():
343 extra_qp.append("%s=%s" % (k, urllib.quote(v)))
344 if self.provider.security_token:
345 headers['x-amz-security-token'] = self.provider.security_token
346 if extra_qp:
347 delimiter = '?' if '?' not in auth_path else '&'
348 auth_path += delimiter + '&'.join(extra_qp)
349 c_string = boto.utils.canonical_string(method, auth_path, headers,
350 expires, self.provider)
351 b64_hmac = self._auth_handler.sign_string(c_string)
352 encoded_canonical = urllib.quote(b64_hmac, safe='')
353 self.calling_format.build_path_base(bucket, key)
354 if query_auth:
355 query_part = '?' + self.QueryString % (encoded_canonical, expires,
356 self.aws_access_key_id)
357 else:
358 query_part = ''
359 if headers:
360 hdr_prefix = self.provider.header_prefix
361 for k, v in headers.items():
362 if k.startswith(hdr_prefix):
363 # headers used for sig generation must be
364 # included in the url also.
365 extra_qp.append("%s=%s" % (k, urllib.quote(v)))
366 if extra_qp:
367 delimiter = '?' if not query_part else '&'
368 query_part += delimiter + '&'.join(extra_qp)
369 if force_http:
370 protocol = 'http'
371 port = 80
372 else:
373 protocol = self.protocol
374 port = self.port
375 return self.calling_format.build_url_base(self, protocol,
376 self.server_name(port),
377 bucket, key) + query_part
378
379 def get_all_buckets(self, headers=None):
380 response = self.make_request('GET', headers=headers)
381 body = response.read()
382 if response.status > 300:
383 raise self.provider.storage_response_error(
384 response.status, response.reason, body)
385 rs = ResultSet([('Bucket', self.bucket_class)])
386 h = handler.XmlHandler(rs, self)
387 xml.sax.parseString(body, h)
388 return rs
389
390 def get_canonical_user_id(self, headers=None):
391 """
392 Convenience method that returns the "CanonicalUserID" of the
393 user who's credentials are associated with the connection.
394 The only way to get this value is to do a GET request on the
395 service which returns all buckets associated with the account.
396 As part of that response, the canonical userid is returned.
397 This method simply does all of that and then returns just the
398 user id.
399
400 :rtype: string
401 :return: A string containing the canonical user id.
402 """
403 rs = self.get_all_buckets(headers=headers)
404 return rs.owner.id
405
406 def get_bucket(self, bucket_name, validate=True, headers=None):
407 """
408 Retrieves a bucket by name.
409
410 If the bucket does not exist, an ``S3ResponseError`` will be raised. If
411 you are unsure if the bucket exists or not, you can use the
412 ``S3Connection.lookup`` method, which will either return a valid bucket
413 or ``None``.
414
415 :type bucket_name: string
416 :param bucket_name: The name of the bucket
417
418 :type headers: dict
419 :param headers: Additional headers to pass along with the request to
420 AWS.
421
422 :type validate: boolean
423 :param validate: If ``True``, it will try to fetch all keys within the
424 given bucket. (Default: ``True``)
425 """
426 bucket = self.bucket_class(self, bucket_name)
427 if validate:
428 bucket.get_all_keys(headers, maxkeys=0)
429 return bucket
430
431 def lookup(self, bucket_name, validate=True, headers=None):
432 """
433 Attempts to get a bucket from S3.
434
435 Works identically to ``S3Connection.get_bucket``, save for that it
436 will return ``None`` if the bucket does not exist instead of throwing
437 an exception.
438
439 :type bucket_name: string
440 :param bucket_name: The name of the bucket
441
442 :type headers: dict
443 :param headers: Additional headers to pass along with the request to
444 AWS.
445
446 :type validate: boolean
447 :param validate: If ``True``, it will try to fetch all keys within the
448 given bucket. (Default: ``True``)
449 """
450 try:
451 bucket = self.get_bucket(bucket_name, validate, headers=headers)
452 except:
453 bucket = None
454 return bucket
455
456 def create_bucket(self, bucket_name, headers=None,
457 location=Location.DEFAULT, policy=None):
458 """
459 Creates a new located bucket. By default it's in the USA. You can pass
460 Location.EU to create an European bucket.
461
462 :type bucket_name: string
463 :param bucket_name: The name of the new bucket
464
465 :type headers: dict
466 :param headers: Additional headers to pass along with the request to AWS .
467
468 :type location: str
469 :param location: The location of the new bucket. You can use one of the
470 constants in :class:`boto.s3.connection.Location` (e.g. Location.EU,
471 Location.USWest, etc.).
472
473 :type policy: :class:`boto.s3.acl.CannedACLStrings`
474 :param policy: A canned ACL policy that will be applied to the
475 new key in S3.
476
477 """
478 check_lowercase_bucketname(bucket_name)
479
480 if policy:
481 if headers:
482 headers[self.provider.acl_header] = policy
483 else:
484 headers = {self.provider.acl_header: policy}
485 if location == Location.DEFAULT:
486 data = ''
487 else:
488 data = '<CreateBucketConfiguration><LocationConstraint>' + \
489 location + '</LocationConstraint></CreateBucketConfiguration >'
490 response = self.make_request('PUT', bucket_name, headers=headers,
491 data=data)
492 body = response.read()
493 if response.status == 409:
494 raise self.provider.storage_create_error(
495 response.status, response.reason, body)
496 if response.status == 200:
497 return self.bucket_class(self, bucket_name)
498 else:
499 raise self.provider.storage_response_error(
500 response.status, response.reason, body)
501
502 def delete_bucket(self, bucket, headers=None):
503 """
504 Removes an S3 bucket.
505
506 In order to remove the bucket, it must first be empty. If the bucket is
507 not empty, an ``S3ResponseError`` will be raised.
508
509 :type bucket_name: string
510 :param bucket_name: The name of the bucket
511
512 :type headers: dict
513 :param headers: Additional headers to pass along with the request to
514 AWS.
515 """
516 response = self.make_request('DELETE', bucket, headers=headers)
517 body = response.read()
518 if response.status != 204:
519 raise self.provider.storage_response_error(
520 response.status, response.reason, body)
521
522 def make_request(self, method, bucket='', key='', headers=None, data='',
523 query_args=None, sender=None, override_num_retries=None):
524 if isinstance(bucket, self.bucket_class):
525 bucket = bucket.name
526 if isinstance(key, Key):
527 key = key.name
528 path = self.calling_format.build_path_base(bucket, key)
529 boto.log.debug('path=%s' % path)
530 auth_path = self.calling_format.build_auth_path(bucket, key)
531 boto.log.debug('auth_path=%s' % auth_path)
532 host = self.calling_format.build_host(self.server_name(), bucket)
533 if query_args:
534 path += '?' + query_args
535 boto.log.debug('path=%s' % path)
536 auth_path += '?' + query_args
537 boto.log.debug('auth_path=%s' % auth_path)
538 return AWSAuthConnection.make_request(self, method, path, headers,
539 data, host, auth_path, sender,
540 override_num_retries=override_num_retries)
OLDNEW
« no previous file with comments | « third_party/boto/s3/bucketlogging.py ('k') | third_party/boto/s3/cors.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698