PageRenderTime 48ms CodeModel.GetById 19ms RepoModel.GetById 1ms app.codeStats 0ms

/storages/backends/s3.py

https://bitbucket.org/nguyenivan/topsnam
Python | 288 lines | 284 code | 4 blank | 0 comment | 4 complexity | 2fa506f84ea44a3a5708ffa2174dc065 MD5 | raw file
  1. import os
  2. import mimetypes
  3. import warnings
  4. try:
  5. from cStringIO import StringIO
  6. except ImportError:
  7. from StringIO import StringIO
  8. from django.conf import settings
  9. from django.core.files.base import File
  10. from django.core.files.storage import Storage
  11. from django.core.exceptions import ImproperlyConfigured
  12. try:
  13. from S3 import AWSAuthConnection, QueryStringAuthGenerator, CallingFormat
  14. except ImportError:
  15. raise ImproperlyConfigured("Could not load amazon's S3 bindings.\nSee "
  16. "http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134")
  17. ACCESS_KEY_NAME = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
  18. SECRET_KEY_NAME = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
  19. HEADERS = getattr(settings, 'AWS_HEADERS', {})
  20. DEFAULT_ACL = getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
  21. QUERYSTRING_ACTIVE = getattr(settings, 'AWS_QUERYSTRING_ACTIVE', False)
  22. QUERYSTRING_EXPIRE = getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 60)
  23. SECURE_URLS = getattr(settings, 'AWS_S3_SECURE_URLS', False)
  24. BUCKET_PREFIX = getattr(settings, 'AWS_BUCKET_PREFIX', '')
  25. CALLING_FORMAT = getattr(settings, 'AWS_CALLING_FORMAT', CallingFormat.PATH)
  26. PRELOAD_METADATA = getattr(settings, 'AWS_PRELOAD_METADATA', False)
  27. IS_GZIPPED = getattr(settings, 'AWS_IS_GZIPPED', False)
  28. GZIP_CONTENT_TYPES = getattr(settings, 'GZIP_CONTENT_TYPES', (
  29. 'text/css',
  30. 'application/javascript',
  31. 'application/x-javascript'
  32. ))
  33. if IS_GZIPPED:
  34. from gzip import GzipFile
  35. class S3Storage(Storage):
  36. """Amazon Simple Storage Service"""
  37. def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
  38. access_key=None, secret_key=None, acl=DEFAULT_ACL,
  39. calling_format=CALLING_FORMAT, encrypt=False,
  40. gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
  41. preload_metadata=PRELOAD_METADATA):
  42. warnings.warn(
  43. "The s3 backend is deprecated and will be removed in version 1.2. "
  44. "Use the s3boto backend instead.",
  45. PendingDeprecationWarning
  46. )
  47. self.bucket = bucket
  48. self.acl = acl
  49. self.encrypt = encrypt
  50. self.gzip = gzip
  51. self.gzip_content_types = gzip_content_types
  52. self.preload_metadata = preload_metadata
  53. if encrypt:
  54. try:
  55. import ezPyCrypto
  56. except ImportError:
  57. raise ImproperlyConfigured("Could not load ezPyCrypto.\nSee "
  58. "http://www.freenet.org.nz/ezPyCrypto/ to install it.")
  59. self.crypto_key = ezPyCrypto.key
  60. if not access_key and not secret_key:
  61. access_key, secret_key = self._get_access_keys()
  62. self.connection = AWSAuthConnection(access_key, secret_key,
  63. calling_format=calling_format)
  64. self.generator = QueryStringAuthGenerator(access_key, secret_key,
  65. calling_format=calling_format,
  66. is_secure=SECURE_URLS)
  67. self.generator.set_expires_in(QUERYSTRING_EXPIRE)
  68. self.headers = HEADERS
  69. self._entries = {}
  70. def _get_access_keys(self):
  71. access_key = ACCESS_KEY_NAME
  72. secret_key = SECRET_KEY_NAME
  73. if (access_key or secret_key) and (not access_key or not secret_key):
  74. access_key = os.environ.get(ACCESS_KEY_NAME)
  75. secret_key = os.environ.get(SECRET_KEY_NAME)
  76. if access_key and secret_key:
  77. # Both were provided, so use them
  78. return access_key, secret_key
  79. return None, None
  80. @property
  81. def entries(self):
  82. if self.preload_metadata and not self._entries:
  83. self._entries = dict((entry.key, entry)
  84. for entry in self.connection.list_bucket(self.bucket).entries)
  85. return self._entries
  86. def _get_connection(self):
  87. return AWSAuthConnection(*self._get_access_keys())
  88. def _clean_name(self, name):
  89. # Useful for windows' paths
  90. return os.path.join(BUCKET_PREFIX, os.path.normpath(name).replace('\\', '/'))
  91. def _compress_string(self, s):
  92. """Gzip a given string."""
  93. zbuf = StringIO()
  94. zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
  95. zfile.write(s)
  96. zfile.close()
  97. return zbuf.getvalue()
  98. def _put_file(self, name, content):
  99. if self.encrypt:
  100. # Create a key object
  101. key = self.crypto_key()
  102. # Read in a public key
  103. fd = open(settings.CRYPTO_KEYS_PUBLIC, "rb")
  104. public_key = fd.read()
  105. fd.close()
  106. # import this public key
  107. key.importKey(public_key)
  108. # Now encrypt some text against this public key
  109. content = key.encString(content)
  110. content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
  111. if self.gzip and content_type in self.gzip_content_types:
  112. content = self._compress_string(content)
  113. self.headers.update({'Content-Encoding': 'gzip'})
  114. self.headers.update({
  115. 'x-amz-acl': self.acl,
  116. 'Content-Type': content_type,
  117. 'Content-Length' : str(len(content)),
  118. })
  119. response = self.connection.put(self.bucket, name, content, self.headers)
  120. if response.http_response.status not in (200, 206):
  121. raise IOError("S3StorageError: %s" % response.message)
  122. def _open(self, name, mode='rb'):
  123. name = self._clean_name(name)
  124. remote_file = S3StorageFile(name, self, mode=mode)
  125. return remote_file
  126. def _read(self, name, start_range=None, end_range=None):
  127. name = self._clean_name(name)
  128. if start_range is None:
  129. headers = {}
  130. else:
  131. headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
  132. response = self.connection.get(self.bucket, name, headers)
  133. if response.http_response.status not in (200, 206):
  134. raise IOError("S3StorageError: %s" % response.message)
  135. headers = response.http_response.msg
  136. if self.encrypt:
  137. # Read in a private key
  138. fd = open(settings.CRYPTO_KEYS_PRIVATE, "rb")
  139. private_key = fd.read()
  140. fd.close()
  141. # Create a key object, and auto-import private key
  142. key = self.crypto_key(private_key)
  143. # Decrypt this file
  144. response.object.data = key.decString(response.object.data)
  145. return response.object.data, headers.get('etag', None), headers.get('content-range', None)
  146. def _save(self, name, content):
  147. name = self._clean_name(name)
  148. content.open()
  149. if hasattr(content, 'chunks'):
  150. content_str = ''.join(chunk for chunk in content.chunks())
  151. else:
  152. content_str = content.read()
  153. self._put_file(name, content_str)
  154. return name
  155. def delete(self, name):
  156. name = self._clean_name(name)
  157. response = self.connection.delete(self.bucket, name)
  158. if response.http_response.status != 204:
  159. raise IOError("S3StorageError: %s" % response.message)
  160. def exists(self, name):
  161. name = self._clean_name(name)
  162. if self.entries:
  163. return name in self.entries
  164. response = self.connection._make_request('HEAD', self.bucket, name)
  165. return response.status == 200
  166. def size(self, name):
  167. name = self._clean_name(name)
  168. if self.entries:
  169. entry = self.entries.get(name)
  170. if entry:
  171. return entry.size
  172. return 0
  173. response = self.connection._make_request('HEAD', self.bucket, name)
  174. content_length = response.getheader('Content-Length')
  175. return content_length and int(content_length) or 0
  176. def url(self, name):
  177. name = self._clean_name(name)
  178. if QUERYSTRING_ACTIVE:
  179. return self.generator.generate_url('GET', self.bucket, name)
  180. else:
  181. return self.generator.make_bare_url(self.bucket, name)
  182. def modified_time(self, name):
  183. try:
  184. from dateutil import parser, tz
  185. except ImportError:
  186. raise NotImplementedError()
  187. name = self._clean_name(name)
  188. if self.entries:
  189. last_modified = self.entries.get(name).last_modified
  190. else:
  191. response = self.connection._make_request('HEAD', self.bucket, name)
  192. last_modified = response.getheader('Last-Modified')
  193. # convert to string to date
  194. last_modified_date = parser.parse(last_modified)
  195. # if the date has no timzone, assume UTC
  196. if last_modified_date.tzinfo == None:
  197. last_modified_date = last_modified_date.replace(tzinfo=tz.tzutc())
  198. # convert date to local time w/o timezone
  199. return last_modified_date.astimezone(tz.tzlocal()).replace(tzinfo=None)
  200. ## UNCOMMENT BELOW IF NECESSARY
  201. #def get_available_name(self, name):
  202. # """ Overwrite existing file with the same name. """
  203. # name = self._clean_name(name)
  204. # return name
  205. class PreloadingS3Storage(S3Storage):
  206. pass
  207. class S3StorageFile(File):
  208. def __init__(self, name, storage, mode):
  209. self._name = name
  210. self._storage = storage
  211. self._mode = mode
  212. self._is_dirty = False
  213. self.file = StringIO()
  214. self.start_range = 0
  215. @property
  216. def size(self):
  217. if not hasattr(self, '_size'):
  218. self._size = self._storage.size(self._name)
  219. return self._size
  220. def read(self, num_bytes=None):
  221. if num_bytes is None:
  222. args = []
  223. self.start_range = 0
  224. else:
  225. args = [self.start_range, self.start_range+num_bytes-1]
  226. data, etags, content_range = self._storage._read(self._name, *args)
  227. if content_range is not None:
  228. current_range, size = content_range.split(' ', 1)[1].split('/', 1)
  229. start_range, end_range = current_range.split('-', 1)
  230. self._size, self.start_range = int(size), int(end_range)+1
  231. self.file = StringIO(data)
  232. return self.file.getvalue()
  233. def write(self, content):
  234. if 'w' not in self._mode:
  235. raise AttributeError("File was opened for read-only access.")
  236. self.file = StringIO(content)
  237. self._is_dirty = True
  238. def close(self):
  239. if self._is_dirty:
  240. self._storage._put_file(self._name, self.file.getvalue())
  241. self.file.close()