OLD | NEW |
(Empty) | |
| 1 # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ |
| 2 # Copyright (c) 2010, Eucalyptus Systems, Inc. |
| 3 # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. |
| 4 # All rights reserved. |
| 5 # |
| 6 # Permission is hereby granted, free of charge, to any person obtaining a |
| 7 # copy of this software and associated documentation files (the |
| 8 # "Software"), to deal in the Software without restriction, including |
| 9 # without limitation the rights to use, copy, modify, merge, publish, dis- |
| 10 # tribute, sublicense, and/or sell copies of the Software, and to permit |
| 11 # persons to whom the Software is furnished to do so, subject to the fol- |
| 12 # lowing conditions: |
| 13 # |
| 14 # The above copyright notice and this permission notice shall be included |
| 15 # in all copies or substantial portions of the Software. |
| 16 # |
| 17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 18 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- |
| 19 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT |
| 20 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
| 21 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 23 # IN THE SOFTWARE. |
| 24 |
| 25 # |
| 26 # Parts of this code were copied or derived from sample code supplied by AWS. |
| 27 # The following notice applies to that code. |
| 28 # |
| 29 # This software code is made available "AS IS" without warranties of any |
| 30 # kind. You may copy, display, modify and redistribute the software |
| 31 # code either by itself or as incorporated into your code; provided that |
| 32 # you do not remove any proprietary notices. Your use of this software |
| 33 # code is at your own risk and you waive any claim against Amazon |
| 34 # Digital Services, Inc. or its affiliates with respect to your use of |
| 35 # this software code. (c) 2006 Amazon Digital Services, Inc. or its |
| 36 # affiliates. |
| 37 |
| 38 """ |
| 39 Some handy utility functions used by several classes. |
| 40 """ |
| 41 |
| 42 import socket |
| 43 import urllib |
| 44 import urllib2 |
| 45 import imp |
| 46 import subprocess |
| 47 import StringIO |
| 48 import time |
| 49 import logging.handlers |
| 50 import boto |
| 51 import boto.provider |
| 52 import tempfile |
| 53 import smtplib |
| 54 import datetime |
| 55 import re |
| 56 import email.mime.multipart |
| 57 import email.mime.base |
| 58 import email.mime.text |
| 59 import email.utils |
| 60 import email.encoders |
| 61 import gzip |
| 62 import base64 |
| 63 try: |
| 64 from hashlib import md5 |
| 65 except ImportError: |
| 66 from md5 import md5 |
| 67 |
| 68 |
| 69 try: |
| 70 import hashlib |
| 71 _hashfn = hashlib.sha512 |
| 72 except ImportError: |
| 73 import md5 |
| 74 _hashfn = md5.md5 |
| 75 |
| 76 from boto.compat import json |
| 77 |
| 78 # List of Query String Arguments of Interest |
| 79 qsa_of_interest = ['acl', 'cors', 'defaultObjectAcl', 'location', 'logging', |
| 80 'partNumber', 'policy', 'requestPayment', 'torrent', |
| 81 'versioning', 'versionId', 'versions', 'website', |
| 82 'uploads', 'uploadId', 'response-content-type', |
| 83 'response-content-language', 'response-expires', |
| 84 'response-cache-control', 'response-content-disposition', |
| 85 'response-content-encoding', 'delete', 'lifecycle', |
| 86 'tagging', 'restore', |
| 87 # storageClass is a QSA for buckets in Google Cloud Storage. |
| 88 # (StorageClass is associated to individual keys in S3, but |
| 89 # having it listed here should cause no problems because |
| 90 # GET bucket?storageClass is not part of the S3 API.) |
| 91 'storageClass', |
| 92 # websiteConfig is a QSA for buckets in Google Cloud Storage. |
| 93 'websiteConfig'] |
| 94 |
| 95 |
| 96 _first_cap_regex = re.compile('(.)([A-Z][a-z]+)') |
| 97 _number_cap_regex = re.compile('([a-z])([0-9]+)') |
| 98 _end_cap_regex = re.compile('([a-z0-9])([A-Z])') |
| 99 |
| 100 |
| 101 def unquote_v(nv): |
| 102 if len(nv) == 1: |
| 103 return nv |
| 104 else: |
| 105 return (nv[0], urllib.unquote(nv[1])) |
| 106 |
| 107 |
| 108 def canonical_string(method, path, headers, expires=None, |
| 109 provider=None): |
| 110 """ |
| 111 Generates the aws canonical string for the given parameters |
| 112 """ |
| 113 if not provider: |
| 114 provider = boto.provider.get_default() |
| 115 interesting_headers = {} |
| 116 for key in headers: |
| 117 lk = key.lower() |
| 118 if headers[key] != None and (lk in ['content-md5', 'content-type', 'date
'] or |
| 119 lk.startswith(provider.header_prefix)): |
| 120 interesting_headers[lk] = str(headers[key]).strip() |
| 121 |
| 122 # these keys get empty strings if they don't exist |
| 123 if 'content-type' not in interesting_headers: |
| 124 interesting_headers['content-type'] = '' |
| 125 if 'content-md5' not in interesting_headers: |
| 126 interesting_headers['content-md5'] = '' |
| 127 |
| 128 # just in case someone used this. it's not necessary in this lib. |
| 129 if provider.date_header in interesting_headers: |
| 130 interesting_headers['date'] = '' |
| 131 |
| 132 # if you're using expires for query string auth, then it trumps date |
| 133 # (and provider.date_header) |
| 134 if expires: |
| 135 interesting_headers['date'] = str(expires) |
| 136 |
| 137 sorted_header_keys = sorted(interesting_headers.keys()) |
| 138 |
| 139 buf = "%s\n" % method |
| 140 for key in sorted_header_keys: |
| 141 val = interesting_headers[key] |
| 142 if key.startswith(provider.header_prefix): |
| 143 buf += "%s:%s\n" % (key, val) |
| 144 else: |
| 145 buf += "%s\n" % val |
| 146 |
| 147 # don't include anything after the first ? in the resource... |
| 148 # unless it is one of the QSA of interest, defined above |
| 149 t = path.split('?') |
| 150 buf += t[0] |
| 151 |
| 152 if len(t) > 1: |
| 153 qsa = t[1].split('&') |
| 154 qsa = [a.split('=', 1) for a in qsa] |
| 155 qsa = [unquote_v(a) for a in qsa if a[0] in qsa_of_interest] |
| 156 if len(qsa) > 0: |
| 157 qsa.sort(cmp=lambda x, y:cmp(x[0], y[0])) |
| 158 qsa = ['='.join(a) for a in qsa] |
| 159 buf += '?' |
| 160 buf += '&'.join(qsa) |
| 161 |
| 162 return buf |
| 163 |
| 164 |
| 165 def merge_meta(headers, metadata, provider=None): |
| 166 if not provider: |
| 167 provider = boto.provider.get_default() |
| 168 metadata_prefix = provider.metadata_prefix |
| 169 final_headers = headers.copy() |
| 170 for k in metadata.keys(): |
| 171 if k.lower() in ['cache-control', 'content-md5', 'content-type', |
| 172 'content-encoding', 'content-disposition', |
| 173 'date', 'expires']: |
| 174 final_headers[k] = metadata[k] |
| 175 else: |
| 176 final_headers[metadata_prefix + k] = metadata[k] |
| 177 |
| 178 return final_headers |
| 179 |
| 180 |
| 181 def get_aws_metadata(headers, provider=None): |
| 182 if not provider: |
| 183 provider = boto.provider.get_default() |
| 184 metadata_prefix = provider.metadata_prefix |
| 185 metadata = {} |
| 186 for hkey in headers.keys(): |
| 187 if hkey.lower().startswith(metadata_prefix): |
| 188 val = urllib.unquote_plus(headers[hkey]) |
| 189 try: |
| 190 metadata[hkey[len(metadata_prefix):]] = unicode(val, 'utf-8') |
| 191 except UnicodeDecodeError: |
| 192 metadata[hkey[len(metadata_prefix):]] = val |
| 193 del headers[hkey] |
| 194 return metadata |
| 195 |
| 196 |
| 197 def retry_url(url, retry_on_404=True, num_retries=10): |
| 198 """ |
| 199 Retry a url. This is specifically used for accessing the metadata |
| 200 service on an instance. Since this address should never be proxied |
| 201 (for security reasons), we create a ProxyHandler with a NULL |
| 202 dictionary to override any proxy settings in the environment. |
| 203 """ |
| 204 for i in range(0, num_retries): |
| 205 try: |
| 206 proxy_handler = urllib2.ProxyHandler({}) |
| 207 opener = urllib2.build_opener(proxy_handler) |
| 208 req = urllib2.Request(url) |
| 209 r = opener.open(req) |
| 210 result = r.read() |
| 211 resp = urllib2.urlopen(req) |
| 212 return resp.read() |
| 213 except urllib2.HTTPError, e: |
| 214 # in 2.6 you use getcode(), in 2.5 and earlier you use code |
| 215 if hasattr(e, 'getcode'): |
| 216 code = e.getcode() |
| 217 else: |
| 218 code = e.code |
| 219 if code == 404 and not retry_on_404: |
| 220 return '' |
| 221 except urllib2.URLError, e: |
| 222 raise e |
| 223 except Exception, e: |
| 224 pass |
| 225 boto.log.exception('Caught exception reading instance data') |
| 226 time.sleep(2 ** i) |
| 227 boto.log.error('Unable to read instance data, giving up') |
| 228 return '' |
| 229 |
| 230 |
| 231 def _get_instance_metadata(url, num_retries): |
| 232 return LazyLoadMetadata(url, num_retries) |
| 233 |
| 234 |
| 235 class LazyLoadMetadata(dict): |
| 236 def __init__(self, url, num_retries): |
| 237 self._url = url |
| 238 self._num_retries = num_retries |
| 239 self._leaves = {} |
| 240 self._dicts = [] |
| 241 data = boto.utils.retry_url(self._url, num_retries=self._num_retries) |
| 242 if data: |
| 243 fields = data.split('\n') |
| 244 for field in fields: |
| 245 if field.endswith('/'): |
| 246 key = field[0:-1] |
| 247 self._dicts.append(key) |
| 248 else: |
| 249 p = field.find('=') |
| 250 if p > 0: |
| 251 key = field[p + 1:] |
| 252 resource = field[0:p] + '/openssh-key' |
| 253 else: |
| 254 key = resource = field |
| 255 self._leaves[key] = resource |
| 256 self[key] = None |
| 257 |
| 258 def _materialize(self): |
| 259 for key in self: |
| 260 self[key] |
| 261 |
| 262 def __getitem__(self, key): |
| 263 if key not in self: |
| 264 # allow dict to throw the KeyError |
| 265 return super(LazyLoadMetadata, self).__getitem__(key) |
| 266 |
| 267 # already loaded |
| 268 val = super(LazyLoadMetadata, self).__getitem__(key) |
| 269 if val is not None: |
| 270 return val |
| 271 |
| 272 if key in self._leaves: |
| 273 resource = self._leaves[key] |
| 274 val = boto.utils.retry_url(self._url + urllib.quote(resource, |
| 275 safe="/:"), |
| 276 num_retries=self._num_retries) |
| 277 if val and val[0] == '{': |
| 278 val = json.loads(val) |
| 279 else: |
| 280 p = val.find('\n') |
| 281 if p > 0: |
| 282 val = val.split('\n') |
| 283 self[key] = val |
| 284 elif key in self._dicts: |
| 285 self[key] = LazyLoadMetadata(self._url + key + '/', |
| 286 self._num_retries) |
| 287 |
| 288 return super(LazyLoadMetadata, self).__getitem__(key) |
| 289 |
| 290 def get(self, key, default=None): |
| 291 try: |
| 292 return self[key] |
| 293 except KeyError: |
| 294 return default |
| 295 |
| 296 def values(self): |
| 297 self._materialize() |
| 298 return super(LazyLoadMetadata, self).values() |
| 299 |
| 300 def items(self): |
| 301 self._materialize() |
| 302 return super(LazyLoadMetadata, self).items() |
| 303 |
| 304 def __str__(self): |
| 305 self._materialize() |
| 306 return super(LazyLoadMetadata, self).__str__() |
| 307 |
| 308 def __repr__(self): |
| 309 self._materialize() |
| 310 return super(LazyLoadMetadata, self).__repr__() |
| 311 |
| 312 |
| 313 def get_instance_metadata(version='latest', url='http://169.254.169.254', |
| 314 timeout=None, num_retries=5): |
| 315 """ |
| 316 Returns the instance metadata as a nested Python dictionary. |
| 317 Simple values (e.g. local_hostname, hostname, etc.) will be |
| 318 stored as string values. Values such as ancestor-ami-ids will |
| 319 be stored in the dict as a list of string values. More complex |
| 320 fields such as public-keys and will be stored as nested dicts. |
| 321 |
| 322 If the timeout is specified, the connection to the specified url |
| 323 will time out after the specified number of seconds. |
| 324 |
| 325 """ |
| 326 if timeout is not None: |
| 327 original = socket.getdefaulttimeout() |
| 328 socket.setdefaulttimeout(timeout) |
| 329 try: |
| 330 return _get_instance_metadata('%s/%s/meta-data/' % (url, version), |
| 331 num_retries=num_retries) |
| 332 except urllib2.URLError, e: |
| 333 return None |
| 334 finally: |
| 335 if timeout is not None: |
| 336 socket.setdefaulttimeout(original) |
| 337 |
| 338 |
| 339 def get_instance_identity(version='latest', url='http://169.254.169.254', |
| 340 timeout=None, num_retries=5): |
| 341 """ |
| 342 Returns the instance identity as a nested Python dictionary. |
| 343 """ |
| 344 iid = {} |
| 345 base_url = 'http://169.254.169.254/latest/dynamic/instance-identity' |
| 346 if timeout is not None: |
| 347 original = socket.getdefaulttimeout() |
| 348 socket.setdefaulttimeout(timeout) |
| 349 try: |
| 350 data = retry_url(base_url, num_retries=num_retries) |
| 351 fields = data.split('\n') |
| 352 for field in fields: |
| 353 val = retry_url(base_url + '/' + field + '/') |
| 354 if val[0] == '{': |
| 355 val = json.loads(val) |
| 356 if field: |
| 357 iid[field] = val |
| 358 return iid |
| 359 except urllib2.URLError, e: |
| 360 return None |
| 361 finally: |
| 362 if timeout is not None: |
| 363 socket.setdefaulttimeout(original) |
| 364 |
| 365 |
| 366 def get_instance_userdata(version='latest', sep=None, |
| 367 url='http://169.254.169.254'): |
| 368 ud_url = '%s/%s/user-data' % (url, version) |
| 369 user_data = retry_url(ud_url, retry_on_404=False) |
| 370 if user_data: |
| 371 if sep: |
| 372 l = user_data.split(sep) |
| 373 user_data = {} |
| 374 for nvpair in l: |
| 375 t = nvpair.split('=') |
| 376 user_data[t[0].strip()] = t[1].strip() |
| 377 return user_data |
| 378 |
| 379 ISO8601 = '%Y-%m-%dT%H:%M:%SZ' |
| 380 ISO8601_MS = '%Y-%m-%dT%H:%M:%S.%fZ' |
| 381 |
| 382 |
| 383 def get_ts(ts=None): |
| 384 if not ts: |
| 385 ts = time.gmtime() |
| 386 return time.strftime(ISO8601, ts) |
| 387 |
| 388 |
| 389 def parse_ts(ts): |
| 390 ts = ts.strip() |
| 391 try: |
| 392 dt = datetime.datetime.strptime(ts, ISO8601) |
| 393 return dt |
| 394 except ValueError: |
| 395 dt = datetime.datetime.strptime(ts, ISO8601_MS) |
| 396 return dt |
| 397 |
| 398 |
| 399 def find_class(module_name, class_name=None): |
| 400 if class_name: |
| 401 module_name = "%s.%s" % (module_name, class_name) |
| 402 modules = module_name.split('.') |
| 403 c = None |
| 404 |
| 405 try: |
| 406 for m in modules[1:]: |
| 407 if c: |
| 408 c = getattr(c, m) |
| 409 else: |
| 410 c = getattr(__import__(".".join(modules[0:-1])), m) |
| 411 return c |
| 412 except: |
| 413 return None |
| 414 |
| 415 |
| 416 def update_dme(username, password, dme_id, ip_address): |
| 417 """ |
| 418 Update your Dynamic DNS record with DNSMadeEasy.com |
| 419 """ |
| 420 dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip' |
| 421 dme_url += '?username=%s&password=%s&id=%s&ip=%s' |
| 422 s = urllib2.urlopen(dme_url % (username, password, dme_id, ip_address)) |
| 423 return s.read() |
| 424 |
| 425 |
| 426 def fetch_file(uri, file=None, username=None, password=None): |
| 427 """ |
| 428 Fetch a file based on the URI provided. If you do not pass in a file pointer |
| 429 a tempfile.NamedTemporaryFile, or None if the file could not be |
| 430 retrieved is returned. |
| 431 The URI can be either an HTTP url, or "s3://bucket_name/key_name" |
| 432 """ |
| 433 boto.log.info('Fetching %s' % uri) |
| 434 if file == None: |
| 435 file = tempfile.NamedTemporaryFile() |
| 436 try: |
| 437 if uri.startswith('s3://'): |
| 438 bucket_name, key_name = uri[len('s3://'):].split('/', 1) |
| 439 c = boto.connect_s3(aws_access_key_id=username, |
| 440 aws_secret_access_key=password) |
| 441 bucket = c.get_bucket(bucket_name) |
| 442 key = bucket.get_key(key_name) |
| 443 key.get_contents_to_file(file) |
| 444 else: |
| 445 if username and password: |
| 446 passman = urllib2.HTTPPasswordMgrWithDefaultRealm() |
| 447 passman.add_password(None, uri, username, password) |
| 448 authhandler = urllib2.HTTPBasicAuthHandler(passman) |
| 449 opener = urllib2.build_opener(authhandler) |
| 450 urllib2.install_opener(opener) |
| 451 s = urllib2.urlopen(uri) |
| 452 file.write(s.read()) |
| 453 file.seek(0) |
| 454 except: |
| 455 raise |
| 456 boto.log.exception('Problem Retrieving file: %s' % uri) |
| 457 file = None |
| 458 return file |
| 459 |
| 460 |
| 461 class ShellCommand(object): |
| 462 |
| 463 def __init__(self, command, wait=True, fail_fast=False, cwd=None): |
| 464 self.exit_code = 0 |
| 465 self.command = command |
| 466 self.log_fp = StringIO.StringIO() |
| 467 self.wait = wait |
| 468 self.fail_fast = fail_fast |
| 469 self.run(cwd=cwd) |
| 470 |
| 471 def run(self, cwd=None): |
| 472 boto.log.info('running:%s' % self.command) |
| 473 self.process = subprocess.Popen(self.command, shell=True, |
| 474 stdin=subprocess.PIPE, |
| 475 stdout=subprocess.PIPE, |
| 476 stderr=subprocess.PIPE, |
| 477 cwd=cwd) |
| 478 if(self.wait): |
| 479 while self.process.poll() == None: |
| 480 time.sleep(1) |
| 481 t = self.process.communicate() |
| 482 self.log_fp.write(t[0]) |
| 483 self.log_fp.write(t[1]) |
| 484 boto.log.info(self.log_fp.getvalue()) |
| 485 self.exit_code = self.process.returncode |
| 486 |
| 487 if self.fail_fast and self.exit_code != 0: |
| 488 raise Exception("Command " + self.command + " failed with status
" + self.exit_code) |
| 489 |
| 490 return self.exit_code |
| 491 |
| 492 def setReadOnly(self, value): |
| 493 raise AttributeError |
| 494 |
| 495 def getStatus(self): |
| 496 return self.exit_code |
| 497 |
| 498 status = property(getStatus, setReadOnly, None, 'The exit code for the comma
nd') |
| 499 |
| 500 def getOutput(self): |
| 501 return self.log_fp.getvalue() |
| 502 |
| 503 output = property(getOutput, setReadOnly, None, 'The STDIN and STDERR output
of the command') |
| 504 |
| 505 |
| 506 class AuthSMTPHandler(logging.handlers.SMTPHandler): |
| 507 """ |
| 508 This class extends the SMTPHandler in the standard Python logging module |
| 509 to accept a username and password on the constructor and to then use those |
| 510 credentials to authenticate with the SMTP server. To use this, you could |
| 511 add something like this in your boto config file: |
| 512 |
| 513 [handler_hand07] |
| 514 class=boto.utils.AuthSMTPHandler |
| 515 level=WARN |
| 516 formatter=form07 |
| 517 args=('localhost', 'username', 'password', 'from@abc', ['user1@abc', 'user2@
xyz'], 'Logger Subject') |
| 518 """ |
| 519 |
| 520 def __init__(self, mailhost, username, password, |
| 521 fromaddr, toaddrs, subject): |
| 522 """ |
| 523 Initialize the handler. |
| 524 |
| 525 We have extended the constructor to accept a username/password |
| 526 for SMTP authentication. |
| 527 """ |
| 528 logging.handlers.SMTPHandler.__init__(self, mailhost, fromaddr, |
| 529 toaddrs, subject) |
| 530 self.username = username |
| 531 self.password = password |
| 532 |
| 533 def emit(self, record): |
| 534 """ |
| 535 Emit a record. |
| 536 |
| 537 Format the record and send it to the specified addressees. |
| 538 It would be really nice if I could add authorization to this class |
| 539 without having to resort to cut and paste inheritance but, no. |
| 540 """ |
| 541 try: |
| 542 port = self.mailport |
| 543 if not port: |
| 544 port = smtplib.SMTP_PORT |
| 545 smtp = smtplib.SMTP(self.mailhost, port) |
| 546 smtp.login(self.username, self.password) |
| 547 msg = self.format(record) |
| 548 msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( |
| 549 self.fromaddr, |
| 550 ','.join(self.toaddrs), |
| 551 self.getSubject(record), |
| 552 email.utils.formatdate(), msg) |
| 553 smtp.sendmail(self.fromaddr, self.toaddrs, msg) |
| 554 smtp.quit() |
| 555 except (KeyboardInterrupt, SystemExit): |
| 556 raise |
| 557 except: |
| 558 self.handleError(record) |
| 559 |
| 560 |
| 561 class LRUCache(dict): |
| 562 """A dictionary-like object that stores only a certain number of items, and |
| 563 discards its least recently used item when full. |
| 564 |
| 565 >>> cache = LRUCache(3) |
| 566 >>> cache['A'] = 0 |
| 567 >>> cache['B'] = 1 |
| 568 >>> cache['C'] = 2 |
| 569 >>> len(cache) |
| 570 3 |
| 571 |
| 572 >>> cache['A'] |
| 573 0 |
| 574 |
| 575 Adding new items to the cache does not increase its size. Instead, the least |
| 576 recently used item is dropped: |
| 577 |
| 578 >>> cache['D'] = 3 |
| 579 >>> len(cache) |
| 580 3 |
| 581 >>> 'B' in cache |
| 582 False |
| 583 |
| 584 Iterating over the cache returns the keys, starting with the most recently |
| 585 used: |
| 586 |
| 587 >>> for key in cache: |
| 588 ... print key |
| 589 D |
| 590 A |
| 591 C |
| 592 |
| 593 This code is based on the LRUCache class from Genshi which is based on |
| 594 `Myghty <http://www.myghty.org>`_'s LRUCache from ``myghtyutils.util``, |
| 595 written by Mike Bayer and released under the MIT license (Genshi uses the |
| 596 BSD License). |
| 597 """ |
| 598 |
| 599 class _Item(object): |
| 600 def __init__(self, key, value): |
| 601 self.previous = self.next = None |
| 602 self.key = key |
| 603 self.value = value |
| 604 |
| 605 def __repr__(self): |
| 606 return repr(self.value) |
| 607 |
| 608 def __init__(self, capacity): |
| 609 self._dict = dict() |
| 610 self.capacity = capacity |
| 611 self.head = None |
| 612 self.tail = None |
| 613 |
| 614 def __contains__(self, key): |
| 615 return key in self._dict |
| 616 |
| 617 def __iter__(self): |
| 618 cur = self.head |
| 619 while cur: |
| 620 yield cur.key |
| 621 cur = cur.next |
| 622 |
| 623 def __len__(self): |
| 624 return len(self._dict) |
| 625 |
| 626 def __getitem__(self, key): |
| 627 item = self._dict[key] |
| 628 self._update_item(item) |
| 629 return item.value |
| 630 |
| 631 def __setitem__(self, key, value): |
| 632 item = self._dict.get(key) |
| 633 if item is None: |
| 634 item = self._Item(key, value) |
| 635 self._dict[key] = item |
| 636 self._insert_item(item) |
| 637 else: |
| 638 item.value = value |
| 639 self._update_item(item) |
| 640 self._manage_size() |
| 641 |
| 642 def __repr__(self): |
| 643 return repr(self._dict) |
| 644 |
| 645 def _insert_item(self, item): |
| 646 item.previous = None |
| 647 item.next = self.head |
| 648 if self.head is not None: |
| 649 self.head.previous = item |
| 650 else: |
| 651 self.tail = item |
| 652 self.head = item |
| 653 self._manage_size() |
| 654 |
| 655 def _manage_size(self): |
| 656 while len(self._dict) > self.capacity: |
| 657 del self._dict[self.tail.key] |
| 658 if self.tail != self.head: |
| 659 self.tail = self.tail.previous |
| 660 self.tail.next = None |
| 661 else: |
| 662 self.head = self.tail = None |
| 663 |
| 664 def _update_item(self, item): |
| 665 if self.head == item: |
| 666 return |
| 667 |
| 668 previous = item.previous |
| 669 previous.next = item.next |
| 670 if item.next is not None: |
| 671 item.next.previous = previous |
| 672 else: |
| 673 self.tail = previous |
| 674 |
| 675 item.previous = None |
| 676 item.next = self.head |
| 677 self.head.previous = self.head = item |
| 678 |
| 679 |
| 680 class Password(object): |
| 681 """ |
| 682 Password object that stores itself as hashed. |
| 683 Hash defaults to SHA512 if available, MD5 otherwise. |
| 684 """ |
| 685 hashfunc = _hashfn |
| 686 |
| 687 def __init__(self, str=None, hashfunc=None): |
| 688 """ |
| 689 Load the string from an initial value, this should be the |
| 690 raw hashed password. |
| 691 """ |
| 692 self.str = str |
| 693 if hashfunc: |
| 694 self.hashfunc = hashfunc |
| 695 |
| 696 def set(self, value): |
| 697 self.str = self.hashfunc(value).hexdigest() |
| 698 |
| 699 def __str__(self): |
| 700 return str(self.str) |
| 701 |
| 702 def __eq__(self, other): |
| 703 if other == None: |
| 704 return False |
| 705 return str(self.hashfunc(other).hexdigest()) == str(self.str) |
| 706 |
| 707 def __len__(self): |
| 708 if self.str: |
| 709 return len(self.str) |
| 710 else: |
| 711 return 0 |
| 712 |
| 713 |
| 714 def notify(subject, body=None, html_body=None, to_string=None, |
| 715 attachments=None, append_instance_id=True): |
| 716 attachments = attachments or [] |
| 717 if append_instance_id: |
| 718 subject = "[%s] %s" % (boto.config.get_value("Instance", "instance-id"),
subject) |
| 719 if not to_string: |
| 720 to_string = boto.config.get_value('Notification', 'smtp_to', None) |
| 721 if to_string: |
| 722 try: |
| 723 from_string = boto.config.get_value('Notification', 'smtp_from', 'bo
to') |
| 724 msg = email.mime.multipart.MIMEMultipart() |
| 725 msg['From'] = from_string |
| 726 msg['Reply-To'] = from_string |
| 727 msg['To'] = to_string |
| 728 msg['Date'] = email.utils.formatdate(localtime=True) |
| 729 msg['Subject'] = subject |
| 730 |
| 731 if body: |
| 732 msg.attach(email.mime.text.MIMEText(body)) |
| 733 |
| 734 if html_body: |
| 735 part = email.mime.base.MIMEBase('text', 'html') |
| 736 part.set_payload(html_body) |
| 737 email.encoders.encode_base64(part) |
| 738 msg.attach(part) |
| 739 |
| 740 for part in attachments: |
| 741 msg.attach(part) |
| 742 |
| 743 smtp_host = boto.config.get_value('Notification', 'smtp_host', 'loca
lhost') |
| 744 |
| 745 # Alternate port support |
| 746 if boto.config.get_value("Notification", "smtp_port"): |
| 747 server = smtplib.SMTP(smtp_host, int(boto.config.get_value("Noti
fication", "smtp_port"))) |
| 748 else: |
| 749 server = smtplib.SMTP(smtp_host) |
| 750 |
| 751 # TLS support |
| 752 if boto.config.getbool("Notification", "smtp_tls"): |
| 753 server.ehlo() |
| 754 server.starttls() |
| 755 server.ehlo() |
| 756 smtp_user = boto.config.get_value('Notification', 'smtp_user', '') |
| 757 smtp_pass = boto.config.get_value('Notification', 'smtp_pass', '') |
| 758 if smtp_user: |
| 759 server.login(smtp_user, smtp_pass) |
| 760 server.sendmail(from_string, to_string, msg.as_string()) |
| 761 server.quit() |
| 762 except: |
| 763 boto.log.exception('notify failed') |
| 764 |
| 765 |
| 766 def get_utf8_value(value): |
| 767 if not isinstance(value, str) and not isinstance(value, unicode): |
| 768 value = str(value) |
| 769 if isinstance(value, unicode): |
| 770 return value.encode('utf-8') |
| 771 else: |
| 772 return value |
| 773 |
| 774 |
| 775 def mklist(value): |
| 776 if not isinstance(value, list): |
| 777 if isinstance(value, tuple): |
| 778 value = list(value) |
| 779 else: |
| 780 value = [value] |
| 781 return value |
| 782 |
| 783 |
| 784 def pythonize_name(name): |
| 785 """Convert camel case to a "pythonic" name. |
| 786 |
| 787 Examples:: |
| 788 |
| 789 pythonize_name('CamelCase') -> 'camel_case' |
| 790 pythonize_name('already_pythonized') -> 'already_pythonized' |
| 791 pythonize_name('HTTPRequest') -> 'http_request' |
| 792 pythonize_name('HTTPStatus200Ok') -> 'http_status_200_ok' |
| 793 pythonize_name('UPPER') -> 'upper' |
| 794 pythonize_name('') -> '' |
| 795 |
| 796 """ |
| 797 s1 = _first_cap_regex.sub(r'\1_\2', name) |
| 798 s2 = _number_cap_regex.sub(r'\1_\2', s1) |
| 799 return _end_cap_regex.sub(r'\1_\2', s2).lower() |
| 800 |
| 801 |
| 802 def write_mime_multipart(content, compress=False, deftype='text/plain', delimite
r=':'): |
| 803 """Description: |
| 804 :param content: A list of tuples of name-content pairs. This is used |
| 805 instead of a dict to ensure that scripts run in order |
| 806 :type list of tuples: |
| 807 |
| 808 :param compress: Use gzip to compress the scripts, defaults to no compressio
n |
| 809 :type bool: |
| 810 |
| 811 :param deftype: The type that should be assumed if nothing else can be figur
ed out |
| 812 :type str: |
| 813 |
| 814 :param delimiter: mime delimiter |
| 815 :type str: |
| 816 |
| 817 :return: Final mime multipart |
| 818 :rtype: str: |
| 819 """ |
| 820 wrapper = email.mime.multipart.MIMEMultipart() |
| 821 for name, con in content: |
| 822 definite_type = guess_mime_type(con, deftype) |
| 823 maintype, subtype = definite_type.split('/', 1) |
| 824 if maintype == 'text': |
| 825 mime_con = email.mime.text.MIMEText(con, _subtype=subtype) |
| 826 else: |
| 827 mime_con = email.mime.base.MIMEBase(maintype, subtype) |
| 828 mime_con.set_payload(con) |
| 829 # Encode the payload using Base64 |
| 830 email.encoders.encode_base64(mime_con) |
| 831 mime_con.add_header('Content-Disposition', 'attachment', filename=name) |
| 832 wrapper.attach(mime_con) |
| 833 rcontent = wrapper.as_string() |
| 834 |
| 835 if compress: |
| 836 buf = StringIO.StringIO() |
| 837 gz = gzip.GzipFile(mode='wb', fileobj=buf) |
| 838 try: |
| 839 gz.write(rcontent) |
| 840 finally: |
| 841 gz.close() |
| 842 rcontent = buf.getvalue() |
| 843 |
| 844 return rcontent |
| 845 |
| 846 |
| 847 def guess_mime_type(content, deftype): |
| 848 """Description: Guess the mime type of a block of text |
| 849 :param content: content we're finding the type of |
| 850 :type str: |
| 851 |
| 852 :param deftype: Default mime type |
| 853 :type str: |
| 854 |
| 855 :rtype: <type>: |
| 856 :return: <description> |
| 857 """ |
| 858 #Mappings recognized by cloudinit |
| 859 starts_with_mappings = { |
| 860 '#include': 'text/x-include-url', |
| 861 '#!': 'text/x-shellscript', |
| 862 '#cloud-config': 'text/cloud-config', |
| 863 '#upstart-job': 'text/upstart-job', |
| 864 '#part-handler': 'text/part-handler', |
| 865 '#cloud-boothook': 'text/cloud-boothook' |
| 866 } |
| 867 rtype = deftype |
| 868 for possible_type, mimetype in starts_with_mappings.items(): |
| 869 if content.startswith(possible_type): |
| 870 rtype = mimetype |
| 871 break |
| 872 return(rtype) |
| 873 |
| 874 |
| 875 def compute_md5(fp, buf_size=8192, size=None): |
| 876 """ |
| 877 Compute MD5 hash on passed file and return results in a tuple of values. |
| 878 |
| 879 :type fp: file |
| 880 :param fp: File pointer to the file to MD5 hash. The file pointer |
| 881 will be reset to its current location before the |
| 882 method returns. |
| 883 |
| 884 :type buf_size: integer |
| 885 :param buf_size: Number of bytes per read request. |
| 886 |
| 887 :type size: int |
| 888 :param size: (optional) The Maximum number of bytes to read from |
| 889 the file pointer (fp). This is useful when uploading |
| 890 a file in multiple parts where the file is being |
| 891 split inplace into different parts. Less bytes may |
| 892 be available. |
| 893 |
| 894 :rtype: tuple |
| 895 :return: A tuple containing the hex digest version of the MD5 hash |
| 896 as the first element, the base64 encoded version of the |
| 897 plain digest as the second element and the data size as |
| 898 the third element. |
| 899 """ |
| 900 return compute_hash(fp, buf_size, size, hash_algorithm=md5) |
| 901 |
| 902 |
| 903 def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5): |
| 904 hash_obj = hash_algorithm() |
| 905 spos = fp.tell() |
| 906 if size and size < buf_size: |
| 907 s = fp.read(size) |
| 908 else: |
| 909 s = fp.read(buf_size) |
| 910 while s: |
| 911 hash_obj.update(s) |
| 912 if size: |
| 913 size -= len(s) |
| 914 if size <= 0: |
| 915 break |
| 916 if size and size < buf_size: |
| 917 s = fp.read(size) |
| 918 else: |
| 919 s = fp.read(buf_size) |
| 920 hex_digest = hash_obj.hexdigest() |
| 921 base64_digest = base64.encodestring(hash_obj.digest()) |
| 922 if base64_digest[-1] == '\n': |
| 923 base64_digest = base64_digest[0:-1] |
| 924 # data_size based on bytes read. |
| 925 data_size = fp.tell() - spos |
| 926 fp.seek(spos) |
| 927 return (hex_digest, base64_digest, data_size) |
OLD | NEW |