Mercurial > repos > bcclaywell > argo_navis
comparison venv/lib/python2.7/site-packages/boto/utils.py @ 0:d67268158946 draft
planemo upload commit a3f181f5f126803c654b3a66dd4e83a48f7e203b
author | bcclaywell |
---|---|
date | Mon, 12 Oct 2015 17:43:33 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:d67268158946 |
---|---|
1 # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ | |
2 # Copyright (c) 2010, Eucalyptus Systems, Inc. | |
3 # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. | |
4 # All rights reserved. | |
5 # | |
6 # Permission is hereby granted, free of charge, to any person obtaining a | |
7 # copy of this software and associated documentation files (the | |
8 # "Software"), to deal in the Software without restriction, including | |
9 # without limitation the rights to use, copy, modify, merge, publish, dis- | |
10 # tribute, sublicense, and/or sell copies of the Software, and to permit | |
11 # persons to whom the Software is furnished to do so, subject to the fol- | |
12 # lowing conditions: | |
13 # | |
14 # The above copyright notice and this permission notice shall be included | |
15 # in all copies or substantial portions of the Software. | |
16 # | |
17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
18 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- | |
19 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | |
20 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
21 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
23 # IN THE SOFTWARE. | |
24 | |
25 # | |
26 # Parts of this code were copied or derived from sample code supplied by AWS. | |
27 # The following notice applies to that code. | |
28 # | |
29 # This software code is made available "AS IS" without warranties of any | |
30 # kind. You may copy, display, modify and redistribute the software | |
31 # code either by itself or as incorporated into your code; provided that | |
32 # you do not remove any proprietary notices. Your use of this software | |
33 # code is at your own risk and you waive any claim against Amazon | |
34 # Digital Services, Inc. or its affiliates with respect to your use of | |
35 # this software code. (c) 2006 Amazon Digital Services, Inc. or its | |
36 # affiliates. | |
37 | |
38 """ | |
39 Some handy utility functions used by several classes. | |
40 """ | |
41 | |
42 import subprocess | |
43 import time | |
44 import logging.handlers | |
45 import boto | |
46 import boto.provider | |
47 import tempfile | |
48 import random | |
49 import smtplib | |
50 import datetime | |
51 import re | |
52 import email.mime.multipart | |
53 import email.mime.base | |
54 import email.mime.text | |
55 import email.utils | |
56 import email.encoders | |
57 import gzip | |
58 import threading | |
59 import locale | |
60 from boto.compat import six, StringIO, urllib, encodebytes | |
61 | |
62 from contextlib import contextmanager | |
63 | |
64 from hashlib import md5, sha512 | |
65 _hashfn = sha512 | |
66 | |
67 from boto.compat import json | |
68 | |
69 try: | |
70 from boto.compat.json import JSONDecodeError | |
71 except ImportError: | |
72 JSONDecodeError = ValueError | |
73 | |
74 # List of Query String Arguments of Interest | |
75 qsa_of_interest = ['acl', 'cors', 'defaultObjectAcl', 'location', 'logging', | |
76 'partNumber', 'policy', 'requestPayment', 'torrent', | |
77 'versioning', 'versionId', 'versions', 'website', | |
78 'uploads', 'uploadId', 'response-content-type', | |
79 'response-content-language', 'response-expires', | |
80 'response-cache-control', 'response-content-disposition', | |
81 'response-content-encoding', 'delete', 'lifecycle', | |
82 'tagging', 'restore', | |
83 # storageClass is a QSA for buckets in Google Cloud Storage. | |
84 # (StorageClass is associated to individual keys in S3, but | |
85 # having it listed here should cause no problems because | |
86 # GET bucket?storageClass is not part of the S3 API.) | |
87 'storageClass', | |
88 # websiteConfig is a QSA for buckets in Google Cloud | |
89 # Storage. | |
90 'websiteConfig', | |
91 # compose is a QSA for objects in Google Cloud Storage. | |
92 'compose'] | |
93 | |
94 | |
95 _first_cap_regex = re.compile('(.)([A-Z][a-z]+)') | |
96 _number_cap_regex = re.compile('([a-z])([0-9]+)') | |
97 _end_cap_regex = re.compile('([a-z0-9])([A-Z])') | |
98 | |
99 | |
100 def unquote_v(nv): | |
101 if len(nv) == 1: | |
102 return nv | |
103 else: | |
104 return (nv[0], urllib.parse.unquote(nv[1])) | |
105 | |
106 | |
107 def canonical_string(method, path, headers, expires=None, | |
108 provider=None): | |
109 """ | |
110 Generates the aws canonical string for the given parameters | |
111 """ | |
112 if not provider: | |
113 provider = boto.provider.get_default() | |
114 interesting_headers = {} | |
115 for key in headers: | |
116 lk = key.lower() | |
117 if headers[key] is not None and \ | |
118 (lk in ['content-md5', 'content-type', 'date'] or | |
119 lk.startswith(provider.header_prefix)): | |
120 interesting_headers[lk] = str(headers[key]).strip() | |
121 | |
122 # these keys get empty strings if they don't exist | |
123 if 'content-type' not in interesting_headers: | |
124 interesting_headers['content-type'] = '' | |
125 if 'content-md5' not in interesting_headers: | |
126 interesting_headers['content-md5'] = '' | |
127 | |
128 # just in case someone used this. it's not necessary in this lib. | |
129 if provider.date_header in interesting_headers: | |
130 interesting_headers['date'] = '' | |
131 | |
132 # if you're using expires for query string auth, then it trumps date | |
133 # (and provider.date_header) | |
134 if expires: | |
135 interesting_headers['date'] = str(expires) | |
136 | |
137 sorted_header_keys = sorted(interesting_headers.keys()) | |
138 | |
139 buf = "%s\n" % method | |
140 for key in sorted_header_keys: | |
141 val = interesting_headers[key] | |
142 if key.startswith(provider.header_prefix): | |
143 buf += "%s:%s\n" % (key, val) | |
144 else: | |
145 buf += "%s\n" % val | |
146 | |
147 # don't include anything after the first ? in the resource... | |
148 # unless it is one of the QSA of interest, defined above | |
149 t = path.split('?') | |
150 buf += t[0] | |
151 | |
152 if len(t) > 1: | |
153 qsa = t[1].split('&') | |
154 qsa = [a.split('=', 1) for a in qsa] | |
155 qsa = [unquote_v(a) for a in qsa if a[0] in qsa_of_interest] | |
156 if len(qsa) > 0: | |
157 qsa.sort(key=lambda x: x[0]) | |
158 qsa = ['='.join(a) for a in qsa] | |
159 buf += '?' | |
160 buf += '&'.join(qsa) | |
161 | |
162 return buf | |
163 | |
164 | |
165 def merge_meta(headers, metadata, provider=None): | |
166 if not provider: | |
167 provider = boto.provider.get_default() | |
168 metadata_prefix = provider.metadata_prefix | |
169 final_headers = headers.copy() | |
170 for k in metadata.keys(): | |
171 if k.lower() in boto.s3.key.Key.base_user_settable_fields: | |
172 final_headers[k] = metadata[k] | |
173 else: | |
174 final_headers[metadata_prefix + k] = metadata[k] | |
175 | |
176 return final_headers | |
177 | |
178 | |
179 def get_aws_metadata(headers, provider=None): | |
180 if not provider: | |
181 provider = boto.provider.get_default() | |
182 metadata_prefix = provider.metadata_prefix | |
183 metadata = {} | |
184 for hkey in headers.keys(): | |
185 if hkey.lower().startswith(metadata_prefix): | |
186 val = urllib.parse.unquote(headers[hkey]) | |
187 if isinstance(val, bytes): | |
188 try: | |
189 val = val.decode('utf-8') | |
190 except UnicodeDecodeError: | |
191 # Just leave the value as-is | |
192 pass | |
193 metadata[hkey[len(metadata_prefix):]] = val | |
194 del headers[hkey] | |
195 return metadata | |
196 | |
197 | |
198 def retry_url(url, retry_on_404=True, num_retries=10, timeout=None): | |
199 """ | |
200 Retry a url. This is specifically used for accessing the metadata | |
201 service on an instance. Since this address should never be proxied | |
202 (for security reasons), we create a ProxyHandler with a NULL | |
203 dictionary to override any proxy settings in the environment. | |
204 """ | |
205 for i in range(0, num_retries): | |
206 try: | |
207 proxy_handler = urllib.request.ProxyHandler({}) | |
208 opener = urllib.request.build_opener(proxy_handler) | |
209 req = urllib.request.Request(url) | |
210 r = opener.open(req, timeout=timeout) | |
211 result = r.read() | |
212 | |
213 if(not isinstance(result, six.string_types) and | |
214 hasattr(result, 'decode')): | |
215 result = result.decode('utf-8') | |
216 | |
217 return result | |
218 except urllib.error.HTTPError as e: | |
219 code = e.getcode() | |
220 if code == 404 and not retry_on_404: | |
221 return '' | |
222 except Exception as e: | |
223 pass | |
224 boto.log.exception('Caught exception reading instance data') | |
225 # If not on the last iteration of the loop then sleep. | |
226 if i + 1 != num_retries: | |
227 time.sleep(min(2 ** i, | |
228 boto.config.get('Boto', 'max_retry_delay', 60))) | |
229 boto.log.error('Unable to read instance data, giving up') | |
230 return '' | |
231 | |
232 | |
233 def _get_instance_metadata(url, num_retries, timeout=None): | |
234 return LazyLoadMetadata(url, num_retries, timeout) | |
235 | |
236 | |
237 class LazyLoadMetadata(dict): | |
238 def __init__(self, url, num_retries, timeout=None): | |
239 self._url = url | |
240 self._num_retries = num_retries | |
241 self._leaves = {} | |
242 self._dicts = [] | |
243 self._timeout = timeout | |
244 data = boto.utils.retry_url(self._url, num_retries=self._num_retries, timeout=self._timeout) | |
245 if data: | |
246 fields = data.split('\n') | |
247 for field in fields: | |
248 if field.endswith('/'): | |
249 key = field[0:-1] | |
250 self._dicts.append(key) | |
251 else: | |
252 p = field.find('=') | |
253 if p > 0: | |
254 key = field[p + 1:] | |
255 resource = field[0:p] + '/openssh-key' | |
256 else: | |
257 key = resource = field | |
258 self._leaves[key] = resource | |
259 self[key] = None | |
260 | |
261 def _materialize(self): | |
262 for key in self: | |
263 self[key] | |
264 | |
265 def __getitem__(self, key): | |
266 if key not in self: | |
267 # allow dict to throw the KeyError | |
268 return super(LazyLoadMetadata, self).__getitem__(key) | |
269 | |
270 # already loaded | |
271 val = super(LazyLoadMetadata, self).__getitem__(key) | |
272 if val is not None: | |
273 return val | |
274 | |
275 if key in self._leaves: | |
276 resource = self._leaves[key] | |
277 last_exception = None | |
278 | |
279 for i in range(0, self._num_retries): | |
280 try: | |
281 val = boto.utils.retry_url( | |
282 self._url + urllib.parse.quote(resource, | |
283 safe="/:"), | |
284 num_retries=self._num_retries, | |
285 timeout=self._timeout) | |
286 if val and val[0] == '{': | |
287 val = json.loads(val) | |
288 break | |
289 else: | |
290 p = val.find('\n') | |
291 if p > 0: | |
292 val = val.split('\n') | |
293 break | |
294 | |
295 except JSONDecodeError as e: | |
296 boto.log.debug( | |
297 "encountered '%s' exception: %s" % ( | |
298 e.__class__.__name__, e)) | |
299 boto.log.debug( | |
300 'corrupted JSON data found: %s' % val) | |
301 last_exception = e | |
302 | |
303 except Exception as e: | |
304 boto.log.debug("encountered unretryable" + | |
305 " '%s' exception, re-raising" % ( | |
306 e.__class__.__name__)) | |
307 last_exception = e | |
308 raise | |
309 | |
310 boto.log.error("Caught exception reading meta data" + | |
311 " for the '%s' try" % (i + 1)) | |
312 | |
313 if i + 1 != self._num_retries: | |
314 next_sleep = min( | |
315 random.random() * 2 ** i, | |
316 boto.config.get('Boto', 'max_retry_delay', 60)) | |
317 time.sleep(next_sleep) | |
318 else: | |
319 boto.log.error('Unable to read meta data, giving up') | |
320 boto.log.error( | |
321 "encountered '%s' exception: %s" % ( | |
322 last_exception.__class__.__name__, last_exception)) | |
323 raise last_exception | |
324 | |
325 self[key] = val | |
326 elif key in self._dicts: | |
327 self[key] = LazyLoadMetadata(self._url + key + '/', | |
328 self._num_retries) | |
329 | |
330 return super(LazyLoadMetadata, self).__getitem__(key) | |
331 | |
332 def get(self, key, default=None): | |
333 try: | |
334 return self[key] | |
335 except KeyError: | |
336 return default | |
337 | |
338 def values(self): | |
339 self._materialize() | |
340 return super(LazyLoadMetadata, self).values() | |
341 | |
342 def items(self): | |
343 self._materialize() | |
344 return super(LazyLoadMetadata, self).items() | |
345 | |
346 def __str__(self): | |
347 self._materialize() | |
348 return super(LazyLoadMetadata, self).__str__() | |
349 | |
350 def __repr__(self): | |
351 self._materialize() | |
352 return super(LazyLoadMetadata, self).__repr__() | |
353 | |
354 | |
355 def _build_instance_metadata_url(url, version, path): | |
356 """ | |
357 Builds an EC2 metadata URL for fetching information about an instance. | |
358 | |
359 Example: | |
360 | |
361 >>> _build_instance_metadata_url('http://169.254.169.254', 'latest', 'meta-data/') | |
362 http://169.254.169.254/latest/meta-data/ | |
363 | |
364 :type url: string | |
365 :param url: URL to metadata service, e.g. 'http://169.254.169.254' | |
366 | |
367 :type version: string | |
368 :param version: Version of the metadata to get, e.g. 'latest' | |
369 | |
370 :type path: string | |
371 :param path: Path of the metadata to get, e.g. 'meta-data/'. If a trailing | |
372 slash is required it must be passed in with the path. | |
373 | |
374 :return: The full metadata URL | |
375 """ | |
376 return '%s/%s/%s' % (url, version, path) | |
377 | |
378 | |
379 def get_instance_metadata(version='latest', url='http://169.254.169.254', | |
380 data='meta-data/', timeout=None, num_retries=5): | |
381 """ | |
382 Returns the instance metadata as a nested Python dictionary. | |
383 Simple values (e.g. local_hostname, hostname, etc.) will be | |
384 stored as string values. Values such as ancestor-ami-ids will | |
385 be stored in the dict as a list of string values. More complex | |
386 fields such as public-keys and will be stored as nested dicts. | |
387 | |
388 If the timeout is specified, the connection to the specified url | |
389 will time out after the specified number of seconds. | |
390 | |
391 """ | |
392 try: | |
393 metadata_url = _build_instance_metadata_url(url, version, data) | |
394 return _get_instance_metadata(metadata_url, num_retries=num_retries, timeout=timeout) | |
395 except urllib.error.URLError: | |
396 return None | |
397 | |
398 | |
399 def get_instance_identity(version='latest', url='http://169.254.169.254', | |
400 timeout=None, num_retries=5): | |
401 """ | |
402 Returns the instance identity as a nested Python dictionary. | |
403 """ | |
404 iid = {} | |
405 base_url = _build_instance_metadata_url(url, version, | |
406 'dynamic/instance-identity/') | |
407 try: | |
408 data = retry_url(base_url, num_retries=num_retries, timeout=timeout) | |
409 fields = data.split('\n') | |
410 for field in fields: | |
411 val = retry_url(base_url + '/' + field + '/', num_retries=num_retries, timeout=timeout) | |
412 if val[0] == '{': | |
413 val = json.loads(val) | |
414 if field: | |
415 iid[field] = val | |
416 return iid | |
417 except urllib.error.URLError: | |
418 return None | |
419 | |
420 | |
421 def get_instance_userdata(version='latest', sep=None, | |
422 url='http://169.254.169.254', timeout=None, num_retries=5): | |
423 ud_url = _build_instance_metadata_url(url, version, 'user-data') | |
424 user_data = retry_url(ud_url, retry_on_404=False, num_retries=num_retries, timeout=timeout) | |
425 if user_data: | |
426 if sep: | |
427 l = user_data.split(sep) | |
428 user_data = {} | |
429 for nvpair in l: | |
430 t = nvpair.split('=') | |
431 user_data[t[0].strip()] = t[1].strip() | |
432 return user_data | |
433 | |
434 ISO8601 = '%Y-%m-%dT%H:%M:%SZ' | |
435 ISO8601_MS = '%Y-%m-%dT%H:%M:%S.%fZ' | |
436 RFC1123 = '%a, %d %b %Y %H:%M:%S %Z' | |
437 LOCALE_LOCK = threading.Lock() | |
438 | |
439 | |
440 @contextmanager | |
441 def setlocale(name): | |
442 """ | |
443 A context manager to set the locale in a threadsafe manner. | |
444 """ | |
445 with LOCALE_LOCK: | |
446 saved = locale.setlocale(locale.LC_ALL) | |
447 | |
448 try: | |
449 yield locale.setlocale(locale.LC_ALL, name) | |
450 finally: | |
451 locale.setlocale(locale.LC_ALL, saved) | |
452 | |
453 | |
454 def get_ts(ts=None): | |
455 if not ts: | |
456 ts = time.gmtime() | |
457 return time.strftime(ISO8601, ts) | |
458 | |
459 | |
460 def parse_ts(ts): | |
461 with setlocale('C'): | |
462 ts = ts.strip() | |
463 try: | |
464 dt = datetime.datetime.strptime(ts, ISO8601) | |
465 return dt | |
466 except ValueError: | |
467 try: | |
468 dt = datetime.datetime.strptime(ts, ISO8601_MS) | |
469 return dt | |
470 except ValueError: | |
471 dt = datetime.datetime.strptime(ts, RFC1123) | |
472 return dt | |
473 | |
474 | |
475 def find_class(module_name, class_name=None): | |
476 if class_name: | |
477 module_name = "%s.%s" % (module_name, class_name) | |
478 modules = module_name.split('.') | |
479 c = None | |
480 | |
481 try: | |
482 for m in modules[1:]: | |
483 if c: | |
484 c = getattr(c, m) | |
485 else: | |
486 c = getattr(__import__(".".join(modules[0:-1])), m) | |
487 return c | |
488 except: | |
489 return None | |
490 | |
491 | |
492 def update_dme(username, password, dme_id, ip_address): | |
493 """ | |
494 Update your Dynamic DNS record with DNSMadeEasy.com | |
495 """ | |
496 dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip' | |
497 dme_url += '?username=%s&password=%s&id=%s&ip=%s' | |
498 s = urllib.request.urlopen(dme_url % (username, password, dme_id, ip_address)) | |
499 return s.read() | |
500 | |
501 | |
502 def fetch_file(uri, file=None, username=None, password=None): | |
503 """ | |
504 Fetch a file based on the URI provided. | |
505 If you do not pass in a file pointer a tempfile.NamedTemporaryFile, | |
506 or None if the file could not be retrieved is returned. | |
507 The URI can be either an HTTP url, or "s3://bucket_name/key_name" | |
508 """ | |
509 boto.log.info('Fetching %s' % uri) | |
510 if file is None: | |
511 file = tempfile.NamedTemporaryFile() | |
512 try: | |
513 if uri.startswith('s3://'): | |
514 bucket_name, key_name = uri[len('s3://'):].split('/', 1) | |
515 c = boto.connect_s3(aws_access_key_id=username, | |
516 aws_secret_access_key=password) | |
517 bucket = c.get_bucket(bucket_name) | |
518 key = bucket.get_key(key_name) | |
519 key.get_contents_to_file(file) | |
520 else: | |
521 if username and password: | |
522 passman = urllib.request.HTTPPasswordMgrWithDefaultRealm() | |
523 passman.add_password(None, uri, username, password) | |
524 authhandler = urllib.request.HTTPBasicAuthHandler(passman) | |
525 opener = urllib.request.build_opener(authhandler) | |
526 urllib.request.install_opener(opener) | |
527 s = urllib.request.urlopen(uri) | |
528 file.write(s.read()) | |
529 file.seek(0) | |
530 except: | |
531 raise | |
532 boto.log.exception('Problem Retrieving file: %s' % uri) | |
533 file = None | |
534 return file | |
535 | |
536 | |
537 class ShellCommand(object): | |
538 | |
539 def __init__(self, command, wait=True, fail_fast=False, cwd=None): | |
540 self.exit_code = 0 | |
541 self.command = command | |
542 self.log_fp = StringIO() | |
543 self.wait = wait | |
544 self.fail_fast = fail_fast | |
545 self.run(cwd=cwd) | |
546 | |
547 def run(self, cwd=None): | |
548 boto.log.info('running:%s' % self.command) | |
549 self.process = subprocess.Popen(self.command, shell=True, | |
550 stdin=subprocess.PIPE, | |
551 stdout=subprocess.PIPE, | |
552 stderr=subprocess.PIPE, | |
553 cwd=cwd) | |
554 if(self.wait): | |
555 while self.process.poll() is None: | |
556 time.sleep(1) | |
557 t = self.process.communicate() | |
558 self.log_fp.write(t[0]) | |
559 self.log_fp.write(t[1]) | |
560 boto.log.info(self.log_fp.getvalue()) | |
561 self.exit_code = self.process.returncode | |
562 | |
563 if self.fail_fast and self.exit_code != 0: | |
564 raise Exception("Command " + self.command + | |
565 " failed with status " + self.exit_code) | |
566 | |
567 return self.exit_code | |
568 | |
569 def setReadOnly(self, value): | |
570 raise AttributeError | |
571 | |
572 def getStatus(self): | |
573 return self.exit_code | |
574 | |
575 status = property(getStatus, setReadOnly, None, | |
576 'The exit code for the command') | |
577 | |
578 def getOutput(self): | |
579 return self.log_fp.getvalue() | |
580 | |
581 output = property(getOutput, setReadOnly, None, | |
582 'The STDIN and STDERR output of the command') | |
583 | |
584 | |
585 class AuthSMTPHandler(logging.handlers.SMTPHandler): | |
586 """ | |
587 This class extends the SMTPHandler in the standard Python logging module | |
588 to accept a username and password on the constructor and to then use those | |
589 credentials to authenticate with the SMTP server. To use this, you could | |
590 add something like this in your boto config file: | |
591 | |
592 [handler_hand07] | |
593 class=boto.utils.AuthSMTPHandler | |
594 level=WARN | |
595 formatter=form07 | |
596 args=('localhost', 'username', 'password', 'from@abc', ['user1@abc', 'user2@xyz'], 'Logger Subject') | |
597 """ | |
598 | |
599 def __init__(self, mailhost, username, password, | |
600 fromaddr, toaddrs, subject): | |
601 """ | |
602 Initialize the handler. | |
603 | |
604 We have extended the constructor to accept a username/password | |
605 for SMTP authentication. | |
606 """ | |
607 super(AuthSMTPHandler, self).__init__(mailhost, fromaddr, | |
608 toaddrs, subject) | |
609 self.username = username | |
610 self.password = password | |
611 | |
612 def emit(self, record): | |
613 """ | |
614 Emit a record. | |
615 | |
616 Format the record and send it to the specified addressees. | |
617 It would be really nice if I could add authorization to this class | |
618 without having to resort to cut and paste inheritance but, no. | |
619 """ | |
620 try: | |
621 port = self.mailport | |
622 if not port: | |
623 port = smtplib.SMTP_PORT | |
624 smtp = smtplib.SMTP(self.mailhost, port) | |
625 smtp.login(self.username, self.password) | |
626 msg = self.format(record) | |
627 msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( | |
628 self.fromaddr, | |
629 ','.join(self.toaddrs), | |
630 self.getSubject(record), | |
631 email.utils.formatdate(), msg) | |
632 smtp.sendmail(self.fromaddr, self.toaddrs, msg) | |
633 smtp.quit() | |
634 except (KeyboardInterrupt, SystemExit): | |
635 raise | |
636 except: | |
637 self.handleError(record) | |
638 | |
639 | |
640 class LRUCache(dict): | |
641 """A dictionary-like object that stores only a certain number of items, and | |
642 discards its least recently used item when full. | |
643 | |
644 >>> cache = LRUCache(3) | |
645 >>> cache['A'] = 0 | |
646 >>> cache['B'] = 1 | |
647 >>> cache['C'] = 2 | |
648 >>> len(cache) | |
649 3 | |
650 | |
651 >>> cache['A'] | |
652 0 | |
653 | |
654 Adding new items to the cache does not increase its size. Instead, the least | |
655 recently used item is dropped: | |
656 | |
657 >>> cache['D'] = 3 | |
658 >>> len(cache) | |
659 3 | |
660 >>> 'B' in cache | |
661 False | |
662 | |
663 Iterating over the cache returns the keys, starting with the most recently | |
664 used: | |
665 | |
666 >>> for key in cache: | |
667 ... print key | |
668 D | |
669 A | |
670 C | |
671 | |
672 This code is based on the LRUCache class from Genshi which is based on | |
673 `Myghty <http://www.myghty.org>`_'s LRUCache from ``myghtyutils.util``, | |
674 written by Mike Bayer and released under the MIT license (Genshi uses the | |
675 BSD License). | |
676 """ | |
677 | |
678 class _Item(object): | |
679 def __init__(self, key, value): | |
680 self.previous = self.next = None | |
681 self.key = key | |
682 self.value = value | |
683 | |
684 def __repr__(self): | |
685 return repr(self.value) | |
686 | |
687 def __init__(self, capacity): | |
688 self._dict = dict() | |
689 self.capacity = capacity | |
690 self.head = None | |
691 self.tail = None | |
692 | |
693 def __contains__(self, key): | |
694 return key in self._dict | |
695 | |
696 def __iter__(self): | |
697 cur = self.head | |
698 while cur: | |
699 yield cur.key | |
700 cur = cur.next | |
701 | |
702 def __len__(self): | |
703 return len(self._dict) | |
704 | |
705 def __getitem__(self, key): | |
706 item = self._dict[key] | |
707 self._update_item(item) | |
708 return item.value | |
709 | |
710 def __setitem__(self, key, value): | |
711 item = self._dict.get(key) | |
712 if item is None: | |
713 item = self._Item(key, value) | |
714 self._dict[key] = item | |
715 self._insert_item(item) | |
716 else: | |
717 item.value = value | |
718 self._update_item(item) | |
719 self._manage_size() | |
720 | |
721 def __repr__(self): | |
722 return repr(self._dict) | |
723 | |
724 def _insert_item(self, item): | |
725 item.previous = None | |
726 item.next = self.head | |
727 if self.head is not None: | |
728 self.head.previous = item | |
729 else: | |
730 self.tail = item | |
731 self.head = item | |
732 self._manage_size() | |
733 | |
734 def _manage_size(self): | |
735 while len(self._dict) > self.capacity: | |
736 del self._dict[self.tail.key] | |
737 if self.tail != self.head: | |
738 self.tail = self.tail.previous | |
739 self.tail.next = None | |
740 else: | |
741 self.head = self.tail = None | |
742 | |
743 def _update_item(self, item): | |
744 if self.head == item: | |
745 return | |
746 | |
747 previous = item.previous | |
748 previous.next = item.next | |
749 if item.next is not None: | |
750 item.next.previous = previous | |
751 else: | |
752 self.tail = previous | |
753 | |
754 item.previous = None | |
755 item.next = self.head | |
756 self.head.previous = self.head = item | |
757 | |
758 | |
759 class Password(object): | |
760 """ | |
761 Password object that stores itself as hashed. | |
762 Hash defaults to SHA512 if available, MD5 otherwise. | |
763 """ | |
764 hashfunc = _hashfn | |
765 | |
766 def __init__(self, str=None, hashfunc=None): | |
767 """ | |
768 Load the string from an initial value, this should be the | |
769 raw hashed password. | |
770 """ | |
771 self.str = str | |
772 if hashfunc: | |
773 self.hashfunc = hashfunc | |
774 | |
775 def set(self, value): | |
776 if not isinstance(value, bytes): | |
777 value = value.encode('utf-8') | |
778 self.str = self.hashfunc(value).hexdigest() | |
779 | |
780 def __str__(self): | |
781 return str(self.str) | |
782 | |
783 def __eq__(self, other): | |
784 if other is None: | |
785 return False | |
786 if not isinstance(other, bytes): | |
787 other = other.encode('utf-8') | |
788 return str(self.hashfunc(other).hexdigest()) == str(self.str) | |
789 | |
790 def __len__(self): | |
791 if self.str: | |
792 return len(self.str) | |
793 else: | |
794 return 0 | |
795 | |
796 | |
797 def notify(subject, body=None, html_body=None, to_string=None, | |
798 attachments=None, append_instance_id=True): | |
799 attachments = attachments or [] | |
800 if append_instance_id: | |
801 subject = "[%s] %s" % ( | |
802 boto.config.get_value("Instance", "instance-id"), subject) | |
803 if not to_string: | |
804 to_string = boto.config.get_value('Notification', 'smtp_to', None) | |
805 if to_string: | |
806 try: | |
807 from_string = boto.config.get_value('Notification', | |
808 'smtp_from', 'boto') | |
809 msg = email.mime.multipart.MIMEMultipart() | |
810 msg['From'] = from_string | |
811 msg['Reply-To'] = from_string | |
812 msg['To'] = to_string | |
813 msg['Date'] = email.utils.formatdate(localtime=True) | |
814 msg['Subject'] = subject | |
815 | |
816 if body: | |
817 msg.attach(email.mime.text.MIMEText(body)) | |
818 | |
819 if html_body: | |
820 part = email.mime.base.MIMEBase('text', 'html') | |
821 part.set_payload(html_body) | |
822 email.encoders.encode_base64(part) | |
823 msg.attach(part) | |
824 | |
825 for part in attachments: | |
826 msg.attach(part) | |
827 | |
828 smtp_host = boto.config.get_value('Notification', | |
829 'smtp_host', 'localhost') | |
830 | |
831 # Alternate port support | |
832 if boto.config.get_value("Notification", "smtp_port"): | |
833 server = smtplib.SMTP(smtp_host, int( | |
834 boto.config.get_value("Notification", "smtp_port"))) | |
835 else: | |
836 server = smtplib.SMTP(smtp_host) | |
837 | |
838 # TLS support | |
839 if boto.config.getbool("Notification", "smtp_tls"): | |
840 server.ehlo() | |
841 server.starttls() | |
842 server.ehlo() | |
843 smtp_user = boto.config.get_value('Notification', 'smtp_user', '') | |
844 smtp_pass = boto.config.get_value('Notification', 'smtp_pass', '') | |
845 if smtp_user: | |
846 server.login(smtp_user, smtp_pass) | |
847 server.sendmail(from_string, to_string, msg.as_string()) | |
848 server.quit() | |
849 except: | |
850 boto.log.exception('notify failed') | |
851 | |
852 | |
853 def get_utf8_value(value): | |
854 if not six.PY2 and isinstance(value, bytes): | |
855 return value | |
856 | |
857 if not isinstance(value, six.string_types): | |
858 value = six.text_type(value) | |
859 | |
860 if isinstance(value, six.text_type): | |
861 value = value.encode('utf-8') | |
862 | |
863 return value | |
864 | |
865 | |
866 def mklist(value): | |
867 if not isinstance(value, list): | |
868 if isinstance(value, tuple): | |
869 value = list(value) | |
870 else: | |
871 value = [value] | |
872 return value | |
873 | |
874 | |
875 def pythonize_name(name): | |
876 """Convert camel case to a "pythonic" name. | |
877 | |
878 Examples:: | |
879 | |
880 pythonize_name('CamelCase') -> 'camel_case' | |
881 pythonize_name('already_pythonized') -> 'already_pythonized' | |
882 pythonize_name('HTTPRequest') -> 'http_request' | |
883 pythonize_name('HTTPStatus200Ok') -> 'http_status_200_ok' | |
884 pythonize_name('UPPER') -> 'upper' | |
885 pythonize_name('') -> '' | |
886 | |
887 """ | |
888 s1 = _first_cap_regex.sub(r'\1_\2', name) | |
889 s2 = _number_cap_regex.sub(r'\1_\2', s1) | |
890 return _end_cap_regex.sub(r'\1_\2', s2).lower() | |
891 | |
892 | |
893 def write_mime_multipart(content, compress=False, deftype='text/plain', delimiter=':'): | |
894 """Description: | |
895 :param content: A list of tuples of name-content pairs. This is used | |
896 instead of a dict to ensure that scripts run in order | |
897 :type list of tuples: | |
898 | |
899 :param compress: Use gzip to compress the scripts, defaults to no compression | |
900 :type bool: | |
901 | |
902 :param deftype: The type that should be assumed if nothing else can be figured out | |
903 :type str: | |
904 | |
905 :param delimiter: mime delimiter | |
906 :type str: | |
907 | |
908 :return: Final mime multipart | |
909 :rtype: str: | |
910 """ | |
911 wrapper = email.mime.multipart.MIMEMultipart() | |
912 for name, con in content: | |
913 definite_type = guess_mime_type(con, deftype) | |
914 maintype, subtype = definite_type.split('/', 1) | |
915 if maintype == 'text': | |
916 mime_con = email.mime.text.MIMEText(con, _subtype=subtype) | |
917 else: | |
918 mime_con = email.mime.base.MIMEBase(maintype, subtype) | |
919 mime_con.set_payload(con) | |
920 # Encode the payload using Base64 | |
921 email.encoders.encode_base64(mime_con) | |
922 mime_con.add_header('Content-Disposition', 'attachment', filename=name) | |
923 wrapper.attach(mime_con) | |
924 rcontent = wrapper.as_string() | |
925 | |
926 if compress: | |
927 buf = StringIO() | |
928 gz = gzip.GzipFile(mode='wb', fileobj=buf) | |
929 try: | |
930 gz.write(rcontent) | |
931 finally: | |
932 gz.close() | |
933 rcontent = buf.getvalue() | |
934 | |
935 return rcontent | |
936 | |
937 | |
938 def guess_mime_type(content, deftype): | |
939 """Description: Guess the mime type of a block of text | |
940 :param content: content we're finding the type of | |
941 :type str: | |
942 | |
943 :param deftype: Default mime type | |
944 :type str: | |
945 | |
946 :rtype: <type>: | |
947 :return: <description> | |
948 """ | |
949 # Mappings recognized by cloudinit | |
950 starts_with_mappings = { | |
951 '#include': 'text/x-include-url', | |
952 '#!': 'text/x-shellscript', | |
953 '#cloud-config': 'text/cloud-config', | |
954 '#upstart-job': 'text/upstart-job', | |
955 '#part-handler': 'text/part-handler', | |
956 '#cloud-boothook': 'text/cloud-boothook' | |
957 } | |
958 rtype = deftype | |
959 for possible_type, mimetype in starts_with_mappings.items(): | |
960 if content.startswith(possible_type): | |
961 rtype = mimetype | |
962 break | |
963 return(rtype) | |
964 | |
965 | |
966 def compute_md5(fp, buf_size=8192, size=None): | |
967 """ | |
968 Compute MD5 hash on passed file and return results in a tuple of values. | |
969 | |
970 :type fp: file | |
971 :param fp: File pointer to the file to MD5 hash. The file pointer | |
972 will be reset to its current location before the | |
973 method returns. | |
974 | |
975 :type buf_size: integer | |
976 :param buf_size: Number of bytes per read request. | |
977 | |
978 :type size: int | |
979 :param size: (optional) The Maximum number of bytes to read from | |
980 the file pointer (fp). This is useful when uploading | |
981 a file in multiple parts where the file is being | |
982 split inplace into different parts. Less bytes may | |
983 be available. | |
984 | |
985 :rtype: tuple | |
986 :return: A tuple containing the hex digest version of the MD5 hash | |
987 as the first element, the base64 encoded version of the | |
988 plain digest as the second element and the data size as | |
989 the third element. | |
990 """ | |
991 return compute_hash(fp, buf_size, size, hash_algorithm=md5) | |
992 | |
993 | |
994 def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5): | |
995 hash_obj = hash_algorithm() | |
996 spos = fp.tell() | |
997 if size and size < buf_size: | |
998 s = fp.read(size) | |
999 else: | |
1000 s = fp.read(buf_size) | |
1001 while s: | |
1002 if not isinstance(s, bytes): | |
1003 s = s.encode('utf-8') | |
1004 hash_obj.update(s) | |
1005 if size: | |
1006 size -= len(s) | |
1007 if size <= 0: | |
1008 break | |
1009 if size and size < buf_size: | |
1010 s = fp.read(size) | |
1011 else: | |
1012 s = fp.read(buf_size) | |
1013 hex_digest = hash_obj.hexdigest() | |
1014 base64_digest = encodebytes(hash_obj.digest()).decode('utf-8') | |
1015 if base64_digest[-1] == '\n': | |
1016 base64_digest = base64_digest[0:-1] | |
1017 # data_size based on bytes read. | |
1018 data_size = fp.tell() - spos | |
1019 fp.seek(spos) | |
1020 return (hex_digest, base64_digest, data_size) | |
1021 | |
1022 | |
1023 def find_matching_headers(name, headers): | |
1024 """ | |
1025 Takes a specific header name and a dict of headers {"name": "value"}. | |
1026 Returns a list of matching header names, case-insensitive. | |
1027 | |
1028 """ | |
1029 return [h for h in headers if h.lower() == name.lower()] | |
1030 | |
1031 | |
1032 def merge_headers_by_name(name, headers): | |
1033 """ | |
1034 Takes a specific header name and a dict of headers {"name": "value"}. | |
1035 Returns a string of all header values, comma-separated, that match the | |
1036 input header name, case-insensitive. | |
1037 | |
1038 """ | |
1039 matching_headers = find_matching_headers(name, headers) | |
1040 return ','.join(str(headers[h]) for h in matching_headers | |
1041 if headers[h] is not None) | |
1042 | |
1043 | |
1044 class RequestHook(object): | |
1045 """ | |
1046 This can be extended and supplied to the connection object | |
1047 to gain access to request and response object after the request completes. | |
1048 One use for this would be to implement some specific request logging. | |
1049 """ | |
1050 def handle_request_data(self, request, response, error=False): | |
1051 pass |