PageRenderTime 47ms CodeModel.GetById 18ms RepoModel.GetById 0ms app.codeStats 0ms

/thirdparty/google_appengine/google/appengine/api/blobstore/blobstore_stub.py

https://code.google.com/
Python | 388 lines | 318 code | 26 blank | 44 comment | 0 complexity | 291b17525746738fb331a8e7c22486d1 MD5 | raw file
Possible License(s): Apache-2.0, BSD-3-Clause, BSD-2-Clause, LGPL-2.1, GPL-2.0, MIT
  1. #!/usr/bin/env python
  2. #
  3. # Copyright 2007 Google Inc.
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. #
  17. """Datastore backed Blobstore API stub.
  18. Class:
  19. BlobstoreServiceStub: BlobstoreService stub backed by datastore.
  20. """
  21. import base64
  22. import os
  23. import time
  24. import urlparse
  25. from google.appengine.api import apiproxy_stub
  26. from google.appengine.api import datastore
  27. from google.appengine.api import datastore_errors
  28. from google.appengine.api import datastore_types
  29. from google.appengine.api import users
  30. from google.appengine.api import blobstore
  31. from google.appengine.api.blobstore import blobstore_service_pb
  32. from google.appengine.runtime import apiproxy_errors
  33. __all__ = ['BlobStorage',
  34. 'BlobstoreServiceStub',
  35. 'ConfigurationError',
  36. 'CreateUploadSession',
  37. 'Error',
  38. ]
  39. class Error(Exception):
  40. """Base blobstore error type."""
  41. class ConfigurationError(Error):
  42. """Raised when environment is not correctly configured."""
  43. _UPLOAD_SESSION_KIND = '__BlobUploadSession__'
  44. _GS_INFO_KIND = '__GsFileInfo__'
  45. def CreateUploadSession(creation,
  46. success_path,
  47. user,
  48. max_bytes_per_blob,
  49. max_bytes_total):
  50. """Create upload session in datastore.
  51. Creates an upload session and puts it in Datastore to be referenced by
  52. upload handler later.
  53. Args:
  54. creation: Creation timestamp.
  55. success_path: Path in users application to call upon success.
  56. user: User that initiated this upload, if any.
  57. max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
  58. max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
  59. Returns:
  60. String encoded key of new Datastore entity.
  61. """
  62. entity = datastore.Entity(_UPLOAD_SESSION_KIND, namespace='')
  63. entity.update({'creation': creation,
  64. 'success_path': success_path,
  65. 'user': user,
  66. 'state': 'init',
  67. 'max_bytes_per_blob': max_bytes_per_blob,
  68. 'max_bytes_total': max_bytes_total})
  69. datastore.Put(entity)
  70. return str(entity.key())
  71. class BlobStorage(object):
  72. """Base class for defining how blobs are stored.
  73. This base class merely defines an interface that all stub blob-storage
  74. mechanisms must implement.
  75. """
  76. def StoreBlob(self, blob_key, blob_stream):
  77. """Store blob stream.
  78. Implement this method to persist blob data.
  79. Args:
  80. blob_key: Blob key of blob to store.
  81. blob_stream: Stream or stream-like object that will generate blob content.
  82. """
  83. raise NotImplementedError('Storage class must override StoreBlob method.')
  84. def OpenBlob(self, blob_key):
  85. """Open blob for streaming.
  86. Args:
  87. blob_key: Blob-key of existing blob to open for reading.
  88. Returns:
  89. Open file stream for reading blob. Caller is responsible for closing
  90. file.
  91. """
  92. raise NotImplementedError('Storage class must override OpenBlob method.')
  93. def DeleteBlob(self, blob_key):
  94. """Delete blob data from storage.
  95. Args:
  96. blob_key: Blob-key of existing blob to delete.
  97. """
  98. raise NotImplementedError('Storage class must override DeleteBlob method.')
  99. class BlobstoreServiceStub(apiproxy_stub.APIProxyStub):
  100. """Datastore backed Blobstore service stub.
  101. This stub stores manages upload sessions in the Datastore and must be
  102. provided with a blob_storage object to know where the actual blob
  103. records can be found after having been uploaded.
  104. This stub does not handle the actual creation of blobs, neither the BlobInfo
  105. in the Datastore nor creation of blob data in the blob_storage. It does,
  106. however, assume that another part of the system has created these and
  107. uses these objects for deletion.
  108. An upload session is created when the CreateUploadURL request is handled and
  109. put in the Datastore under the __BlobUploadSession__ kind. There is no
  110. analog for this kind on a production server. Other than creation, this stub
  111. not work with session objects. The URLs created by this service stub are:
  112. http://<appserver-host>:<appserver-port>/<uploader-path>/<session-info>
  113. This is very similar to what the URL is on a production server. The session
  114. info is the string encoded version of the session entity
  115. """
  116. _ACCEPTS_REQUEST_ID = True
  117. GS_BLOBKEY_PREFIX = 'encoded_gs_file:'
  118. def __init__(self,
  119. blob_storage,
  120. time_function=time.time,
  121. service_name='blobstore',
  122. uploader_path='_ah/upload/'):
  123. """Constructor.
  124. Args:
  125. blob_storage: BlobStorage class instance used for blob storage.
  126. time_function: Used for dependency injection in tests.
  127. service_name: Service name expected for all calls.
  128. uploader_path: Path to upload handler pointed to by URLs generated
  129. by this service stub.
  130. """
  131. super(BlobstoreServiceStub, self).__init__(service_name)
  132. self.__storage = blob_storage
  133. self.__time_function = time_function
  134. self.__next_session_id = 1
  135. self.__uploader_path = uploader_path
  136. @property
  137. def storage(self):
  138. """Access BlobStorage used by service stub.
  139. Returns:
  140. BlobStorage instance used by blobstore service stub.
  141. """
  142. return self.__storage
  143. def _GetEnviron(self, name):
  144. """Helper method ensures environment configured as expected.
  145. Args:
  146. name: Name of environment variable to get.
  147. Returns:
  148. Environment variable associated with name.
  149. Raises:
  150. ConfigurationError if required environment variable is not found.
  151. """
  152. try:
  153. return os.environ[name]
  154. except KeyError:
  155. raise ConfigurationError('%s is not set in environment.' % name)
  156. def _CreateSession(self,
  157. success_path,
  158. user,
  159. max_bytes_per_blob=None,
  160. max_bytes_total=None):
  161. """Create new upload session.
  162. Args:
  163. success_path: Application path to call upon successful POST.
  164. user: User that initiated the upload session.
  165. max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
  166. max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
  167. Returns:
  168. String encoded key of a new upload session created in the datastore.
  169. """
  170. return CreateUploadSession(self.__time_function(),
  171. success_path,
  172. user,
  173. max_bytes_per_blob,
  174. max_bytes_total)
  175. def _Dynamic_CreateUploadURL(self, request, response, request_id):
  176. """Create upload URL implementation.
  177. Create a new upload session. The upload session key is encoded in the
  178. resulting POST URL. This URL is embedded in a POST form by the application
  179. which contacts the uploader when the user posts.
  180. Args:
  181. request: A fully initialized CreateUploadURLRequest instance.
  182. response: A CreateUploadURLResponse instance.
  183. request_id: A unique string identifying the request associated with the
  184. API call.
  185. """
  186. max_bytes_per_blob = None
  187. max_bytes_total = None
  188. if request.has_max_upload_size_per_blob_bytes():
  189. max_bytes_per_blob = request.max_upload_size_per_blob_bytes()
  190. if request.has_max_upload_size_bytes():
  191. max_bytes_total = request.max_upload_size_bytes()
  192. session = self._CreateSession(request.success_path(),
  193. users.get_current_user(),
  194. max_bytes_per_blob,
  195. max_bytes_total)
  196. protocol, host, _, _, _, _ = urlparse.urlparse(
  197. self.request_data.get_request_url(request_id))
  198. response.set_url('%s://%s/%s%s' % (protocol, host, self.__uploader_path,
  199. session))
  200. def _Dynamic_DeleteBlob(self, request, response, unused_request_id):
  201. """Delete a blob by its blob-key.
  202. Delete a blob from the blobstore using its blob-key. Deleting blobs that
  203. do not exist is a no-op.
  204. Args:
  205. request: A fully initialized DeleteBlobRequest instance.
  206. response: Not used but should be a VoidProto.
  207. """
  208. for blob_key in request.blob_key_list():
  209. if blob_key.startswith(self.GS_BLOBKEY_PREFIX):
  210. key = datastore_types.Key.from_path(_GS_INFO_KIND,
  211. str(blob_key),
  212. namespace='')
  213. else:
  214. key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND,
  215. str(blob_key),
  216. namespace='')
  217. datastore.Delete(key)
  218. self.__storage.DeleteBlob(blob_key)
  219. def _Dynamic_FetchData(self, request, response, unused_request_id):
  220. """Fetch a blob fragment from a blob by its blob-key.
  221. Fetches a blob fragment using its blob-key. Start index is inclusive,
  222. end index is inclusive. Valid requests for information outside of
  223. the range of the blob return a partial string or empty string if entirely
  224. out of range.
  225. Args:
  226. request: A fully initialized FetchDataRequest instance.
  227. response: A FetchDataResponse instance.
  228. Raises:
  229. ApplicationError when application has the following errors:
  230. INDEX_OUT_OF_RANGE: Index is negative or end > start.
  231. BLOB_FETCH_SIZE_TOO_LARGE: Request blob fragment is larger than
  232. MAX_BLOB_FRAGMENT_SIZE.
  233. BLOB_NOT_FOUND: If invalid blob-key is provided or is not found.
  234. """
  235. start_index = request.start_index()
  236. if start_index < 0:
  237. raise apiproxy_errors.ApplicationError(
  238. blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE)
  239. end_index = request.end_index()
  240. if end_index < start_index:
  241. raise apiproxy_errors.ApplicationError(
  242. blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE)
  243. fetch_size = end_index - start_index + 1
  244. if fetch_size > blobstore.MAX_BLOB_FETCH_SIZE:
  245. raise apiproxy_errors.ApplicationError(
  246. blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE)
  247. blob_key = request.blob_key()
  248. blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
  249. blob_key,
  250. namespace='')
  251. try:
  252. datastore.Get(blob_info_key)
  253. except datastore_errors.EntityNotFoundError, err:
  254. raise apiproxy_errors.ApplicationError(
  255. blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
  256. blob_file = self.__storage.OpenBlob(blob_key)
  257. blob_file.seek(start_index)
  258. response.set_data(blob_file.read(fetch_size))
  259. def _Dynamic_DecodeBlobKey(self, request, response, unused_request_id):
  260. """Decode a given blob key: data is simply base64-decoded.
  261. Args:
  262. request: A fully-initialized DecodeBlobKeyRequest instance
  263. response: A DecodeBlobKeyResponse instance.
  264. """
  265. for blob_key in request.blob_key_list():
  266. response.add_decoded(blob_key.decode('base64'))
  267. def _Dynamic_CreateEncodedGoogleStorageKey(self, request, response,
  268. unused_request_id):
  269. """Create an encoded blob key that represents a bigstore file.
  270. For now we'll just base64 encode the bigstore filename, APIs that accept
  271. encoded blob keys will need to be able to support Google Storage files or
  272. blobstore files based on decoding this key.
  273. Args:
  274. request: A fully-initialized CreateEncodedGoogleStorageKeyRequest
  275. instance.
  276. response: A CreateEncodedGoogleStorageKeyResponse instance.
  277. """
  278. filename = request.filename()
  279. response.set_blob_key( self.GS_BLOBKEY_PREFIX +
  280. base64.urlsafe_b64encode(filename))
  281. def CreateBlob(self, blob_key, content):
  282. """Create new blob and put in storage and Datastore.
  283. This is useful in testing where you have access to the stub.
  284. Args:
  285. blob_key: String blob-key of new blob.
  286. content: Content of new blob as a string.
  287. Returns:
  288. New Datastore entity without blob meta-data fields.
  289. """
  290. entity = datastore.Entity(blobstore.BLOB_INFO_KIND,
  291. name=blob_key, namespace='')
  292. entity['size'] = len(content)
  293. datastore.Put(entity)
  294. self.storage.CreateBlob(blob_key, content)
  295. return entity