PageRenderTime 93ms CodeModel.GetById 35ms RepoModel.GetById 1ms app.codeStats 0ms

/monitor_batch/pymodules/python2.7/lib/python/boto-2.19.0-py2.7.egg/boto/utils.py

https://gitlab.com/pooja043/Globus_Docker_4
Python | 967 lines | 765 code | 26 blank | 176 comment | 43 complexity | 3fbd96c9e90f81372f6ac22ce2bb2991 MD5 | raw file
  1. # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
  2. # Copyright (c) 2010, Eucalyptus Systems, Inc.
  3. # Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
  4. # All rights reserved.
  5. #
  6. # Permission is hereby granted, free of charge, to any person obtaining a
  7. # copy of this software and associated documentation files (the
  8. # "Software"), to deal in the Software without restriction, including
  9. # without limitation the rights to use, copy, modify, merge, publish, dis-
  10. # tribute, sublicense, and/or sell copies of the Software, and to permit
  11. # persons to whom the Software is furnished to do so, subject to the fol-
  12. # lowing conditions:
  13. #
  14. # The above copyright notice and this permission notice shall be included
  15. # in all copies or substantial portions of the Software.
  16. #
  17. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  18. # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
  19. # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
  20. # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  21. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  22. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
  23. # IN THE SOFTWARE.
  24. #
  25. # Parts of this code were copied or derived from sample code supplied by AWS.
  26. # The following notice applies to that code.
  27. #
  28. # This software code is made available "AS IS" without warranties of any
  29. # kind. You may copy, display, modify and redistribute the software
  30. # code either by itself or as incorporated into your code; provided that
  31. # you do not remove any proprietary notices. Your use of this software
  32. # code is at your own risk and you waive any claim against Amazon
  33. # Digital Services, Inc. or its affiliates with respect to your use of
  34. # this software code. (c) 2006 Amazon Digital Services, Inc. or its
  35. # affiliates.
  36. """
  37. Some handy utility functions used by several classes.
  38. """
  39. import socket
  40. import urllib
  41. import urllib2
  42. import imp
  43. import subprocess
  44. import StringIO
  45. import time
  46. import logging.handlers
  47. import boto
  48. import boto.provider
  49. import tempfile
  50. import smtplib
  51. import datetime
  52. import re
  53. import email.mime.multipart
  54. import email.mime.base
  55. import email.mime.text
  56. import email.utils
  57. import email.encoders
  58. import gzip
  59. import base64
  60. try:
  61. from hashlib import md5
  62. except ImportError:
  63. from md5 import md5
  64. try:
  65. import hashlib
  66. _hashfn = hashlib.sha512
  67. except ImportError:
  68. import md5
  69. _hashfn = md5.md5
  70. from boto.compat import json
  71. # List of Query String Arguments of Interest
  72. qsa_of_interest = ['acl', 'cors', 'defaultObjectAcl', 'location', 'logging',
  73. 'partNumber', 'policy', 'requestPayment', 'torrent',
  74. 'versioning', 'versionId', 'versions', 'website',
  75. 'uploads', 'uploadId', 'response-content-type',
  76. 'response-content-language', 'response-expires',
  77. 'response-cache-control', 'response-content-disposition',
  78. 'response-content-encoding', 'delete', 'lifecycle',
  79. 'tagging', 'restore',
  80. # storageClass is a QSA for buckets in Google Cloud Storage.
  81. # (StorageClass is associated to individual keys in S3, but
  82. # having it listed here should cause no problems because
  83. # GET bucket?storageClass is not part of the S3 API.)
  84. 'storageClass',
  85. # websiteConfig is a QSA for buckets in Google Cloud Storage.
  86. 'websiteConfig',
  87. # compose is a QSA for objects in Google Cloud Storage.
  88. 'compose']
  89. _first_cap_regex = re.compile('(.)([A-Z][a-z]+)')
  90. _number_cap_regex = re.compile('([a-z])([0-9]+)')
  91. _end_cap_regex = re.compile('([a-z0-9])([A-Z])')
  92. def unquote_v(nv):
  93. if len(nv) == 1:
  94. return nv
  95. else:
  96. return (nv[0], urllib.unquote(nv[1]))
  97. def canonical_string(method, path, headers, expires=None,
  98. provider=None):
  99. """
  100. Generates the aws canonical string for the given parameters
  101. """
  102. if not provider:
  103. provider = boto.provider.get_default()
  104. interesting_headers = {}
  105. for key in headers:
  106. lk = key.lower()
  107. if headers[key] != None and (lk in ['content-md5', 'content-type', 'date'] or
  108. lk.startswith(provider.header_prefix)):
  109. interesting_headers[lk] = str(headers[key]).strip()
  110. # these keys get empty strings if they don't exist
  111. if 'content-type' not in interesting_headers:
  112. interesting_headers['content-type'] = ''
  113. if 'content-md5' not in interesting_headers:
  114. interesting_headers['content-md5'] = ''
  115. # just in case someone used this. it's not necessary in this lib.
  116. if provider.date_header in interesting_headers:
  117. interesting_headers['date'] = ''
  118. # if you're using expires for query string auth, then it trumps date
  119. # (and provider.date_header)
  120. if expires:
  121. interesting_headers['date'] = str(expires)
  122. sorted_header_keys = sorted(interesting_headers.keys())
  123. buf = "%s\n" % method
  124. for key in sorted_header_keys:
  125. val = interesting_headers[key]
  126. if key.startswith(provider.header_prefix):
  127. buf += "%s:%s\n" % (key, val)
  128. else:
  129. buf += "%s\n" % val
  130. # don't include anything after the first ? in the resource...
  131. # unless it is one of the QSA of interest, defined above
  132. t = path.split('?')
  133. buf += t[0]
  134. if len(t) > 1:
  135. qsa = t[1].split('&')
  136. qsa = [a.split('=', 1) for a in qsa]
  137. qsa = [unquote_v(a) for a in qsa if a[0] in qsa_of_interest]
  138. if len(qsa) > 0:
  139. qsa.sort(cmp=lambda x, y:cmp(x[0], y[0]))
  140. qsa = ['='.join(a) for a in qsa]
  141. buf += '?'
  142. buf += '&'.join(qsa)
  143. return buf
  144. def merge_meta(headers, metadata, provider=None):
  145. if not provider:
  146. provider = boto.provider.get_default()
  147. metadata_prefix = provider.metadata_prefix
  148. final_headers = headers.copy()
  149. for k in metadata.keys():
  150. if k.lower() in ['cache-control', 'content-md5', 'content-type',
  151. 'content-encoding', 'content-disposition',
  152. 'expires']:
  153. final_headers[k] = metadata[k]
  154. else:
  155. final_headers[metadata_prefix + k] = metadata[k]
  156. return final_headers
  157. def get_aws_metadata(headers, provider=None):
  158. if not provider:
  159. provider = boto.provider.get_default()
  160. metadata_prefix = provider.metadata_prefix
  161. metadata = {}
  162. for hkey in headers.keys():
  163. if hkey.lower().startswith(metadata_prefix):
  164. val = urllib.unquote_plus(headers[hkey])
  165. try:
  166. metadata[hkey[len(metadata_prefix):]] = unicode(val, 'utf-8')
  167. except UnicodeDecodeError:
  168. metadata[hkey[len(metadata_prefix):]] = val
  169. del headers[hkey]
  170. return metadata
  171. def retry_url(url, retry_on_404=True, num_retries=10):
  172. """
  173. Retry a url. This is specifically used for accessing the metadata
  174. service on an instance. Since this address should never be proxied
  175. (for security reasons), we create a ProxyHandler with a NULL
  176. dictionary to override any proxy settings in the environment.
  177. """
  178. for i in range(0, num_retries):
  179. try:
  180. proxy_handler = urllib2.ProxyHandler({})
  181. opener = urllib2.build_opener(proxy_handler)
  182. req = urllib2.Request(url)
  183. r = opener.open(req)
  184. result = r.read()
  185. return result
  186. except urllib2.HTTPError, e:
  187. # in 2.6 you use getcode(), in 2.5 and earlier you use code
  188. if hasattr(e, 'getcode'):
  189. code = e.getcode()
  190. else:
  191. code = e.code
  192. if code == 404 and not retry_on_404:
  193. return ''
  194. except Exception, e:
  195. pass
  196. boto.log.exception('Caught exception reading instance data')
  197. # If not on the last iteration of the loop then sleep.
  198. if i + 1 != num_retries:
  199. time.sleep(2 ** i)
  200. boto.log.error('Unable to read instance data, giving up')
  201. return ''
  202. def _get_instance_metadata(url, num_retries):
  203. return LazyLoadMetadata(url, num_retries)
  204. class LazyLoadMetadata(dict):
  205. def __init__(self, url, num_retries):
  206. self._url = url
  207. self._num_retries = num_retries
  208. self._leaves = {}
  209. self._dicts = []
  210. data = boto.utils.retry_url(self._url, num_retries=self._num_retries)
  211. if data:
  212. fields = data.split('\n')
  213. for field in fields:
  214. if field.endswith('/'):
  215. key = field[0:-1]
  216. self._dicts.append(key)
  217. else:
  218. p = field.find('=')
  219. if p > 0:
  220. key = field[p + 1:]
  221. resource = field[0:p] + '/openssh-key'
  222. else:
  223. key = resource = field
  224. self._leaves[key] = resource
  225. self[key] = None
  226. def _materialize(self):
  227. for key in self:
  228. self[key]
  229. def __getitem__(self, key):
  230. if key not in self:
  231. # allow dict to throw the KeyError
  232. return super(LazyLoadMetadata, self).__getitem__(key)
  233. # already loaded
  234. val = super(LazyLoadMetadata, self).__getitem__(key)
  235. if val is not None:
  236. return val
  237. if key in self._leaves:
  238. resource = self._leaves[key]
  239. val = boto.utils.retry_url(self._url + urllib.quote(resource,
  240. safe="/:"),
  241. num_retries=self._num_retries)
  242. if val and val[0] == '{':
  243. val = json.loads(val)
  244. else:
  245. p = val.find('\n')
  246. if p > 0:
  247. val = val.split('\n')
  248. self[key] = val
  249. elif key in self._dicts:
  250. self[key] = LazyLoadMetadata(self._url + key + '/',
  251. self._num_retries)
  252. return super(LazyLoadMetadata, self).__getitem__(key)
  253. def get(self, key, default=None):
  254. try:
  255. return self[key]
  256. except KeyError:
  257. return default
  258. def values(self):
  259. self._materialize()
  260. return super(LazyLoadMetadata, self).values()
  261. def items(self):
  262. self._materialize()
  263. return super(LazyLoadMetadata, self).items()
  264. def __str__(self):
  265. self._materialize()
  266. return super(LazyLoadMetadata, self).__str__()
  267. def __repr__(self):
  268. self._materialize()
  269. return super(LazyLoadMetadata, self).__repr__()
  270. def _build_instance_metadata_url(url, version, path):
  271. """
  272. Builds an EC2 metadata URL for fetching information about an instance.
  273. Requires the following arguments: a URL, a version and a path.
  274. Example:
  275. >>> _build_instance_metadata_url('http://169.254.169.254', 'latest', 'meta-data')
  276. http://169.254.169.254/latest/meta-data/
  277. """
  278. return '%s/%s/%s/' % (url, version, path)
  279. def get_instance_metadata(version='latest', url='http://169.254.169.254',
  280. data='meta-data', timeout=None, num_retries=5):
  281. """
  282. Returns the instance metadata as a nested Python dictionary.
  283. Simple values (e.g. local_hostname, hostname, etc.) will be
  284. stored as string values. Values such as ancestor-ami-ids will
  285. be stored in the dict as a list of string values. More complex
  286. fields such as public-keys and will be stored as nested dicts.
  287. If the timeout is specified, the connection to the specified url
  288. will time out after the specified number of seconds.
  289. """
  290. if timeout is not None:
  291. original = socket.getdefaulttimeout()
  292. socket.setdefaulttimeout(timeout)
  293. try:
  294. metadata_url = _build_instance_metadata_url(url, version, data)
  295. return _get_instance_metadata(metadata_url, num_retries=num_retries)
  296. except urllib2.URLError, e:
  297. return None
  298. finally:
  299. if timeout is not None:
  300. socket.setdefaulttimeout(original)
  301. def get_instance_identity(version='latest', url='http://169.254.169.254',
  302. timeout=None, num_retries=5):
  303. """
  304. Returns the instance identity as a nested Python dictionary.
  305. """
  306. iid = {}
  307. base_url = _build_instance_metadata_url(url, version, 'dynamic/instance-identity')
  308. if timeout is not None:
  309. original = socket.getdefaulttimeout()
  310. socket.setdefaulttimeout(timeout)
  311. try:
  312. data = retry_url(base_url, num_retries=num_retries)
  313. fields = data.split('\n')
  314. for field in fields:
  315. val = retry_url(base_url + '/' + field + '/')
  316. if val[0] == '{':
  317. val = json.loads(val)
  318. if field:
  319. iid[field] = val
  320. return iid
  321. except urllib2.URLError, e:
  322. return None
  323. finally:
  324. if timeout is not None:
  325. socket.setdefaulttimeout(original)
  326. def get_instance_userdata(version='latest', sep=None,
  327. url='http://169.254.169.254'):
  328. ud_url = _build_instance_metadata_url(url, version, 'user-data')
  329. user_data = retry_url(ud_url, retry_on_404=False)
  330. if user_data:
  331. if sep:
  332. l = user_data.split(sep)
  333. user_data = {}
  334. for nvpair in l:
  335. t = nvpair.split('=')
  336. user_data[t[0].strip()] = t[1].strip()
  337. return user_data
  338. ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
  339. ISO8601_MS = '%Y-%m-%dT%H:%M:%S.%fZ'
  340. RFC1123 = '%a, %d %b %Y %H:%M:%S %Z'
  341. def get_ts(ts=None):
  342. if not ts:
  343. ts = time.gmtime()
  344. return time.strftime(ISO8601, ts)
  345. def parse_ts(ts):
  346. ts = ts.strip()
  347. try:
  348. dt = datetime.datetime.strptime(ts, ISO8601)
  349. return dt
  350. except ValueError:
  351. try:
  352. dt = datetime.datetime.strptime(ts, ISO8601_MS)
  353. return dt
  354. except ValueError:
  355. dt = datetime.datetime.strptime(ts, RFC1123)
  356. return dt
  357. def find_class(module_name, class_name=None):
  358. if class_name:
  359. module_name = "%s.%s" % (module_name, class_name)
  360. modules = module_name.split('.')
  361. c = None
  362. try:
  363. for m in modules[1:]:
  364. if c:
  365. c = getattr(c, m)
  366. else:
  367. c = getattr(__import__(".".join(modules[0:-1])), m)
  368. return c
  369. except:
  370. return None
  371. def update_dme(username, password, dme_id, ip_address):
  372. """
  373. Update your Dynamic DNS record with DNSMadeEasy.com
  374. """
  375. dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip'
  376. dme_url += '?username=%s&password=%s&id=%s&ip=%s'
  377. s = urllib2.urlopen(dme_url % (username, password, dme_id, ip_address))
  378. return s.read()
  379. def fetch_file(uri, file=None, username=None, password=None):
  380. """
  381. Fetch a file based on the URI provided. If you do not pass in a file pointer
  382. a tempfile.NamedTemporaryFile, or None if the file could not be
  383. retrieved is returned.
  384. The URI can be either an HTTP url, or "s3://bucket_name/key_name"
  385. """
  386. boto.log.info('Fetching %s' % uri)
  387. if file == None:
  388. file = tempfile.NamedTemporaryFile()
  389. try:
  390. if uri.startswith('s3://'):
  391. bucket_name, key_name = uri[len('s3://'):].split('/', 1)
  392. c = boto.connect_s3(aws_access_key_id=username,
  393. aws_secret_access_key=password)
  394. bucket = c.get_bucket(bucket_name)
  395. key = bucket.get_key(key_name)
  396. key.get_contents_to_file(file)
  397. else:
  398. if username and password:
  399. passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
  400. passman.add_password(None, uri, username, password)
  401. authhandler = urllib2.HTTPBasicAuthHandler(passman)
  402. opener = urllib2.build_opener(authhandler)
  403. urllib2.install_opener(opener)
  404. s = urllib2.urlopen(uri)
  405. file.write(s.read())
  406. file.seek(0)
  407. except:
  408. raise
  409. boto.log.exception('Problem Retrieving file: %s' % uri)
  410. file = None
  411. return file
  412. class ShellCommand(object):
  413. def __init__(self, command, wait=True, fail_fast=False, cwd=None):
  414. self.exit_code = 0
  415. self.command = command
  416. self.log_fp = StringIO.StringIO()
  417. self.wait = wait
  418. self.fail_fast = fail_fast
  419. self.run(cwd=cwd)
  420. def run(self, cwd=None):
  421. boto.log.info('running:%s' % self.command)
  422. self.process = subprocess.Popen(self.command, shell=True,
  423. stdin=subprocess.PIPE,
  424. stdout=subprocess.PIPE,
  425. stderr=subprocess.PIPE,
  426. cwd=cwd)
  427. if(self.wait):
  428. while self.process.poll() == None:
  429. time.sleep(1)
  430. t = self.process.communicate()
  431. self.log_fp.write(t[0])
  432. self.log_fp.write(t[1])
  433. boto.log.info(self.log_fp.getvalue())
  434. self.exit_code = self.process.returncode
  435. if self.fail_fast and self.exit_code != 0:
  436. raise Exception("Command " + self.command + " failed with status " + self.exit_code)
  437. return self.exit_code
  438. def setReadOnly(self, value):
  439. raise AttributeError
  440. def getStatus(self):
  441. return self.exit_code
  442. status = property(getStatus, setReadOnly, None, 'The exit code for the command')
  443. def getOutput(self):
  444. return self.log_fp.getvalue()
  445. output = property(getOutput, setReadOnly, None, 'The STDIN and STDERR output of the command')
  446. class AuthSMTPHandler(logging.handlers.SMTPHandler):
  447. """
  448. This class extends the SMTPHandler in the standard Python logging module
  449. to accept a username and password on the constructor and to then use those
  450. credentials to authenticate with the SMTP server. To use this, you could
  451. add something like this in your boto config file:
  452. [handler_hand07]
  453. class=boto.utils.AuthSMTPHandler
  454. level=WARN
  455. formatter=form07
  456. args=('localhost', 'username', 'password', 'from@abc', ['user1@abc', 'user2@xyz'], 'Logger Subject')
  457. """
  458. def __init__(self, mailhost, username, password,
  459. fromaddr, toaddrs, subject):
  460. """
  461. Initialize the handler.
  462. We have extended the constructor to accept a username/password
  463. for SMTP authentication.
  464. """
  465. logging.handlers.SMTPHandler.__init__(self, mailhost, fromaddr,
  466. toaddrs, subject)
  467. self.username = username
  468. self.password = password
  469. def emit(self, record):
  470. """
  471. Emit a record.
  472. Format the record and send it to the specified addressees.
  473. It would be really nice if I could add authorization to this class
  474. without having to resort to cut and paste inheritance but, no.
  475. """
  476. try:
  477. port = self.mailport
  478. if not port:
  479. port = smtplib.SMTP_PORT
  480. smtp = smtplib.SMTP(self.mailhost, port)
  481. smtp.login(self.username, self.password)
  482. msg = self.format(record)
  483. msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
  484. self.fromaddr,
  485. ','.join(self.toaddrs),
  486. self.getSubject(record),
  487. email.utils.formatdate(), msg)
  488. smtp.sendmail(self.fromaddr, self.toaddrs, msg)
  489. smtp.quit()
  490. except (KeyboardInterrupt, SystemExit):
  491. raise
  492. except:
  493. self.handleError(record)
  494. class LRUCache(dict):
  495. """A dictionary-like object that stores only a certain number of items, and
  496. discards its least recently used item when full.
  497. >>> cache = LRUCache(3)
  498. >>> cache['A'] = 0
  499. >>> cache['B'] = 1
  500. >>> cache['C'] = 2
  501. >>> len(cache)
  502. 3
  503. >>> cache['A']
  504. 0
  505. Adding new items to the cache does not increase its size. Instead, the least
  506. recently used item is dropped:
  507. >>> cache['D'] = 3
  508. >>> len(cache)
  509. 3
  510. >>> 'B' in cache
  511. False
  512. Iterating over the cache returns the keys, starting with the most recently
  513. used:
  514. >>> for key in cache:
  515. ... print key
  516. D
  517. A
  518. C
  519. This code is based on the LRUCache class from Genshi which is based on
  520. `Myghty <http://www.myghty.org>`_'s LRUCache from ``myghtyutils.util``,
  521. written by Mike Bayer and released under the MIT license (Genshi uses the
  522. BSD License).
  523. """
  524. class _Item(object):
  525. def __init__(self, key, value):
  526. self.previous = self.next = None
  527. self.key = key
  528. self.value = value
  529. def __repr__(self):
  530. return repr(self.value)
  531. def __init__(self, capacity):
  532. self._dict = dict()
  533. self.capacity = capacity
  534. self.head = None
  535. self.tail = None
  536. def __contains__(self, key):
  537. return key in self._dict
  538. def __iter__(self):
  539. cur = self.head
  540. while cur:
  541. yield cur.key
  542. cur = cur.next
  543. def __len__(self):
  544. return len(self._dict)
  545. def __getitem__(self, key):
  546. item = self._dict[key]
  547. self._update_item(item)
  548. return item.value
  549. def __setitem__(self, key, value):
  550. item = self._dict.get(key)
  551. if item is None:
  552. item = self._Item(key, value)
  553. self._dict[key] = item
  554. self._insert_item(item)
  555. else:
  556. item.value = value
  557. self._update_item(item)
  558. self._manage_size()
  559. def __repr__(self):
  560. return repr(self._dict)
  561. def _insert_item(self, item):
  562. item.previous = None
  563. item.next = self.head
  564. if self.head is not None:
  565. self.head.previous = item
  566. else:
  567. self.tail = item
  568. self.head = item
  569. self._manage_size()
  570. def _manage_size(self):
  571. while len(self._dict) > self.capacity:
  572. del self._dict[self.tail.key]
  573. if self.tail != self.head:
  574. self.tail = self.tail.previous
  575. self.tail.next = None
  576. else:
  577. self.head = self.tail = None
  578. def _update_item(self, item):
  579. if self.head == item:
  580. return
  581. previous = item.previous
  582. previous.next = item.next
  583. if item.next is not None:
  584. item.next.previous = previous
  585. else:
  586. self.tail = previous
  587. item.previous = None
  588. item.next = self.head
  589. self.head.previous = self.head = item
  590. class Password(object):
  591. """
  592. Password object that stores itself as hashed.
  593. Hash defaults to SHA512 if available, MD5 otherwise.
  594. """
  595. hashfunc = _hashfn
  596. def __init__(self, str=None, hashfunc=None):
  597. """
  598. Load the string from an initial value, this should be the
  599. raw hashed password.
  600. """
  601. self.str = str
  602. if hashfunc:
  603. self.hashfunc = hashfunc
  604. def set(self, value):
  605. self.str = self.hashfunc(value).hexdigest()
  606. def __str__(self):
  607. return str(self.str)
  608. def __eq__(self, other):
  609. if other == None:
  610. return False
  611. return str(self.hashfunc(other).hexdigest()) == str(self.str)
  612. def __len__(self):
  613. if self.str:
  614. return len(self.str)
  615. else:
  616. return 0
  617. def notify(subject, body=None, html_body=None, to_string=None,
  618. attachments=None, append_instance_id=True):
  619. attachments = attachments or []
  620. if append_instance_id:
  621. subject = "[%s] %s" % (boto.config.get_value("Instance", "instance-id"), subject)
  622. if not to_string:
  623. to_string = boto.config.get_value('Notification', 'smtp_to', None)
  624. if to_string:
  625. try:
  626. from_string = boto.config.get_value('Notification', 'smtp_from', 'boto')
  627. msg = email.mime.multipart.MIMEMultipart()
  628. msg['From'] = from_string
  629. msg['Reply-To'] = from_string
  630. msg['To'] = to_string
  631. msg['Date'] = email.utils.formatdate(localtime=True)
  632. msg['Subject'] = subject
  633. if body:
  634. msg.attach(email.mime.text.MIMEText(body))
  635. if html_body:
  636. part = email.mime.base.MIMEBase('text', 'html')
  637. part.set_payload(html_body)
  638. email.encoders.encode_base64(part)
  639. msg.attach(part)
  640. for part in attachments:
  641. msg.attach(part)
  642. smtp_host = boto.config.get_value('Notification', 'smtp_host', 'localhost')
  643. # Alternate port support
  644. if boto.config.get_value("Notification", "smtp_port"):
  645. server = smtplib.SMTP(smtp_host, int(boto.config.get_value("Notification", "smtp_port")))
  646. else:
  647. server = smtplib.SMTP(smtp_host)
  648. # TLS support
  649. if boto.config.getbool("Notification", "smtp_tls"):
  650. server.ehlo()
  651. server.starttls()
  652. server.ehlo()
  653. smtp_user = boto.config.get_value('Notification', 'smtp_user', '')
  654. smtp_pass = boto.config.get_value('Notification', 'smtp_pass', '')
  655. if smtp_user:
  656. server.login(smtp_user, smtp_pass)
  657. server.sendmail(from_string, to_string, msg.as_string())
  658. server.quit()
  659. except:
  660. boto.log.exception('notify failed')
  661. def get_utf8_value(value):
  662. if not isinstance(value, str) and not isinstance(value, unicode):
  663. value = str(value)
  664. if isinstance(value, unicode):
  665. return value.encode('utf-8')
  666. else:
  667. return value
  668. def mklist(value):
  669. if not isinstance(value, list):
  670. if isinstance(value, tuple):
  671. value = list(value)
  672. else:
  673. value = [value]
  674. return value
  675. def pythonize_name(name):
  676. """Convert camel case to a "pythonic" name.
  677. Examples::
  678. pythonize_name('CamelCase') -> 'camel_case'
  679. pythonize_name('already_pythonized') -> 'already_pythonized'
  680. pythonize_name('HTTPRequest') -> 'http_request'
  681. pythonize_name('HTTPStatus200Ok') -> 'http_status_200_ok'
  682. pythonize_name('UPPER') -> 'upper'
  683. pythonize_name('') -> ''
  684. """
  685. s1 = _first_cap_regex.sub(r'\1_\2', name)
  686. s2 = _number_cap_regex.sub(r'\1_\2', s1)
  687. return _end_cap_regex.sub(r'\1_\2', s2).lower()
  688. def write_mime_multipart(content, compress=False, deftype='text/plain', delimiter=':'):
  689. """Description:
  690. :param content: A list of tuples of name-content pairs. This is used
  691. instead of a dict to ensure that scripts run in order
  692. :type list of tuples:
  693. :param compress: Use gzip to compress the scripts, defaults to no compression
  694. :type bool:
  695. :param deftype: The type that should be assumed if nothing else can be figured out
  696. :type str:
  697. :param delimiter: mime delimiter
  698. :type str:
  699. :return: Final mime multipart
  700. :rtype: str:
  701. """
  702. wrapper = email.mime.multipart.MIMEMultipart()
  703. for name, con in content:
  704. definite_type = guess_mime_type(con, deftype)
  705. maintype, subtype = definite_type.split('/', 1)
  706. if maintype == 'text':
  707. mime_con = email.mime.text.MIMEText(con, _subtype=subtype)
  708. else:
  709. mime_con = email.mime.base.MIMEBase(maintype, subtype)
  710. mime_con.set_payload(con)
  711. # Encode the payload using Base64
  712. email.encoders.encode_base64(mime_con)
  713. mime_con.add_header('Content-Disposition', 'attachment', filename=name)
  714. wrapper.attach(mime_con)
  715. rcontent = wrapper.as_string()
  716. if compress:
  717. buf = StringIO.StringIO()
  718. gz = gzip.GzipFile(mode='wb', fileobj=buf)
  719. try:
  720. gz.write(rcontent)
  721. finally:
  722. gz.close()
  723. rcontent = buf.getvalue()
  724. return rcontent
  725. def guess_mime_type(content, deftype):
  726. """Description: Guess the mime type of a block of text
  727. :param content: content we're finding the type of
  728. :type str:
  729. :param deftype: Default mime type
  730. :type str:
  731. :rtype: <type>:
  732. :return: <description>
  733. """
  734. #Mappings recognized by cloudinit
  735. starts_with_mappings = {
  736. '#include': 'text/x-include-url',
  737. '#!': 'text/x-shellscript',
  738. '#cloud-config': 'text/cloud-config',
  739. '#upstart-job': 'text/upstart-job',
  740. '#part-handler': 'text/part-handler',
  741. '#cloud-boothook': 'text/cloud-boothook'
  742. }
  743. rtype = deftype
  744. for possible_type, mimetype in starts_with_mappings.items():
  745. if content.startswith(possible_type):
  746. rtype = mimetype
  747. break
  748. return(rtype)
  749. def compute_md5(fp, buf_size=8192, size=None):
  750. """
  751. Compute MD5 hash on passed file and return results in a tuple of values.
  752. :type fp: file
  753. :param fp: File pointer to the file to MD5 hash. The file pointer
  754. will be reset to its current location before the
  755. method returns.
  756. :type buf_size: integer
  757. :param buf_size: Number of bytes per read request.
  758. :type size: int
  759. :param size: (optional) The Maximum number of bytes to read from
  760. the file pointer (fp). This is useful when uploading
  761. a file in multiple parts where the file is being
  762. split inplace into different parts. Less bytes may
  763. be available.
  764. :rtype: tuple
  765. :return: A tuple containing the hex digest version of the MD5 hash
  766. as the first element, the base64 encoded version of the
  767. plain digest as the second element and the data size as
  768. the third element.
  769. """
  770. return compute_hash(fp, buf_size, size, hash_algorithm=md5)
  771. def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5):
  772. hash_obj = hash_algorithm()
  773. spos = fp.tell()
  774. if size and size < buf_size:
  775. s = fp.read(size)
  776. else:
  777. s = fp.read(buf_size)
  778. while s:
  779. hash_obj.update(s)
  780. if size:
  781. size -= len(s)
  782. if size <= 0:
  783. break
  784. if size and size < buf_size:
  785. s = fp.read(size)
  786. else:
  787. s = fp.read(buf_size)
  788. hex_digest = hash_obj.hexdigest()
  789. base64_digest = base64.encodestring(hash_obj.digest())
  790. if base64_digest[-1] == '\n':
  791. base64_digest = base64_digest[0:-1]
  792. # data_size based on bytes read.
  793. data_size = fp.tell() - spos
  794. fp.seek(spos)
  795. return (hex_digest, base64_digest, data_size)
  796. def find_matching_headers(name, headers):
  797. """
  798. Takes a specific header name and a dict of headers {"name": "value"}.
  799. Returns a list of matching header names, case-insensitive.
  800. """
  801. return [h for h in headers if h.lower() == name.lower()]
  802. def merge_headers_by_name(name, headers):
  803. """
  804. Takes a specific header name and a dict of headers {"name": "value"}.
  805. Returns a string of all header values, comma-separated, that match the
  806. input header name, case-insensitive.
  807. """
  808. matching_headers = find_matching_headers(name, headers)
  809. return ','.join(str(headers[h]) for h in matching_headers
  810. if headers[h] is not None)