App Engine Python SDK version 1.8.9
[gae.git] / python / google / appengine / api / blobstore / blobstore_stub.py
blob5c0ef1f2e2b305a14c159bf18920845ea0728416
1 #!/usr/bin/env python
3 # Copyright 2007 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
21 """Datastore backed Blobstore API stub.
23 Class:
24 BlobstoreServiceStub: BlobstoreService stub backed by datastore.
25 """
36 import base64
37 import os
38 import time
39 import urlparse
41 from google.appengine.api import apiproxy_stub
42 from google.appengine.api import blobstore
43 from google.appengine.api import datastore
44 from google.appengine.api import datastore_errors
45 from google.appengine.api import datastore_types
46 from google.appengine.api import users
47 from google.appengine.api.blobstore import blobstore_service_pb
48 from google.appengine.runtime import apiproxy_errors
51 __all__ = ['BlobStorage',
52 'BlobstoreServiceStub',
53 'ConfigurationError',
54 'CreateUploadSession',
55 'Error',
59 class Error(Exception):
60 """Base blobstore error type."""
63 class ConfigurationError(Error):
64 """Raised when environment is not correctly configured."""
67 _UPLOAD_SESSION_KIND = '__BlobUploadSession__'
69 _GS_INFO_KIND = '__GsFileInfo__'
72 def CreateUploadSession(creation,
73 success_path,
74 user,
75 max_bytes_per_blob,
76 max_bytes_total,
77 bucket_name=None):
78 """Create upload session in datastore.
80 Creates an upload session and puts it in Datastore to be referenced by
81 upload handler later.
83 Args:
84 creation: Creation timestamp.
85 success_path: Path in users application to call upon success.
86 user: User that initiated this upload, if any.
87 max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
88 max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
89 bucket_name: Name of the Google Storage bucket tio upload the files.
91 Returns:
92 String encoded key of new Datastore entity.
93 """
94 entity = datastore.Entity(_UPLOAD_SESSION_KIND, namespace='')
95 entity_dict = {'creation': creation,
96 'success_path': success_path,
97 'user': user,
98 'state': 'init',
99 'max_bytes_per_blob': max_bytes_per_blob,
100 'max_bytes_total': max_bytes_total}
101 if bucket_name:
102 entity_dict['gs_bucket_name'] = bucket_name
104 entity.update(entity_dict)
105 datastore.Put(entity)
106 return str(entity.key())
109 class BlobStorage(object):
110 """Base class for defining how blobs are stored.
112 This base class merely defines an interface that all stub blob-storage
113 mechanisms must implement.
116 def StoreBlob(self, blob_key, blob_stream):
117 """Store blob stream.
119 Implement this method to persist blob data.
121 Args:
122 blob_key: Blob key of blob to store.
123 blob_stream: Stream or stream-like object that will generate blob content.
125 raise NotImplementedError('Storage class must override StoreBlob method.')
127 def OpenBlob(self, blob_key):
128 """Open blob for streaming.
130 Args:
131 blob_key: Blob-key of existing blob to open for reading.
133 Returns:
134 Open file stream for reading blob. Caller is responsible for closing
135 file.
137 raise NotImplementedError('Storage class must override OpenBlob method.')
139 def DeleteBlob(self, blob_key):
140 """Delete blob data from storage.
142 Args:
143 blob_key: Blob-key of existing blob to delete.
145 raise NotImplementedError('Storage class must override DeleteBlob method.')
148 class BlobstoreServiceStub(apiproxy_stub.APIProxyStub):
149 """Datastore backed Blobstore service stub.
151 This stub stores manages upload sessions in the Datastore and must be
152 provided with a blob_storage object to know where the actual blob
153 records can be found after having been uploaded.
155 This stub does not handle the actual creation of blobs, neither the BlobInfo
156 in the Datastore nor creation of blob data in the blob_storage. It does,
157 however, assume that another part of the system has created these and
158 uses these objects for deletion.
160 An upload session is created when the CreateUploadURL request is handled and
161 put in the Datastore under the __BlobUploadSession__ kind. There is no
162 analog for this kind on a production server. Other than creation, this stub
163 not work with session objects. The URLs created by this service stub are:
165 http://<appserver-host>:<appserver-port>/<uploader-path>/<session-info>
167 This is very similar to what the URL is on a production server. The session
168 info is the string encoded version of the session entity
171 _ACCEPTS_REQUEST_ID = True
172 GS_BLOBKEY_PREFIX = 'encoded_gs_file:'
174 def __init__(self,
175 blob_storage,
176 time_function=time.time,
177 service_name='blobstore',
178 uploader_path='_ah/upload/',
179 request_data=None):
180 """Constructor.
182 Args:
183 blob_storage: BlobStorage class instance used for blob storage.
184 time_function: Used for dependency injection in tests.
185 service_name: Service name expected for all calls.
186 uploader_path: Path to upload handler pointed to by URLs generated
187 by this service stub.
188 request_data: A apiproxy_stub.RequestData instance used to look up state
189 associated with the request that generated an API call.
191 super(BlobstoreServiceStub, self).__init__(service_name,
192 request_data=request_data)
193 self.__storage = blob_storage
194 self.__time_function = time_function
195 self.__next_session_id = 1
196 self.__uploader_path = uploader_path
198 @classmethod
199 def ToDatastoreBlobKey(cls, blobkey):
200 """Given a string blobkey, return its db.Key."""
201 kind = blobstore.BLOB_INFO_KIND
202 if blobkey.startswith(cls.GS_BLOBKEY_PREFIX):
203 kind = _GS_INFO_KIND
204 return datastore_types.Key.from_path(kind,
205 blobkey,
206 namespace='')
207 @property
208 def storage(self):
209 """Access BlobStorage used by service stub.
211 Returns:
212 BlobStorage instance used by blobstore service stub.
214 return self.__storage
216 def _GetEnviron(self, name):
217 """Helper method ensures environment configured as expected.
219 Args:
220 name: Name of environment variable to get.
222 Returns:
223 Environment variable associated with name.
225 Raises:
226 ConfigurationError if required environment variable is not found.
228 try:
229 return os.environ[name]
230 except KeyError:
231 raise ConfigurationError('%s is not set in environment.' % name)
233 def _CreateSession(self,
234 success_path,
235 user,
236 max_bytes_per_blob=None,
237 max_bytes_total=None,
238 bucket_name=None):
239 """Create new upload session.
241 Args:
242 success_path: Application path to call upon successful POST.
243 user: User that initiated the upload session.
244 max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
245 max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
246 bucket_name: The name of the Cloud Storage bucket where the files will be
247 uploaded.
249 Returns:
250 String encoded key of a new upload session created in the datastore.
252 return CreateUploadSession(self.__time_function(),
253 success_path,
254 user,
255 max_bytes_per_blob,
256 max_bytes_total,
257 bucket_name)
259 def _Dynamic_CreateUploadURL(self, request, response, request_id):
260 """Create upload URL implementation.
262 Create a new upload session. The upload session key is encoded in the
263 resulting POST URL. This URL is embedded in a POST form by the application
264 which contacts the uploader when the user posts.
266 Args:
267 request: A fully initialized CreateUploadURLRequest instance.
268 response: A CreateUploadURLResponse instance.
269 request_id: A unique string identifying the request associated with the
270 API call.
272 max_bytes_per_blob = None
273 max_bytes_total = None
274 bucket_name = None
276 if request.has_max_upload_size_per_blob_bytes():
277 max_bytes_per_blob = request.max_upload_size_per_blob_bytes()
279 if request.has_max_upload_size_bytes():
280 max_bytes_total = request.max_upload_size_bytes()
282 if request.has_gs_bucket_name():
283 bucket_name = request.gs_bucket_name()
285 session = self._CreateSession(request.success_path(),
286 users.get_current_user(),
287 max_bytes_per_blob,
288 max_bytes_total,
289 bucket_name)
291 protocol, host, _, _, _, _ = urlparse.urlparse(
292 self.request_data.get_request_url(request_id))
294 response.set_url('%s://%s/%s%s' % (protocol, host, self.__uploader_path,
295 session))
297 @classmethod
298 def DeleteBlob(cls, blobkey, storage):
299 """Delete a blob.
301 Args:
302 blobkey: blobkey in str.
303 storage: blobstore storage stub.
305 datastore.Delete(cls.ToDatastoreBlobKey(blobkey))
307 blobinfo = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND,
308 blobkey,
309 namespace='')
310 datastore.Delete(blobinfo)
311 storage.DeleteBlob(blobkey)
313 def _Dynamic_DeleteBlob(self, request, response, unused_request_id):
314 """Delete a blob by its blob-key.
316 Delete a blob from the blobstore using its blob-key. Deleting blobs that
317 do not exist is a no-op.
319 Args:
320 request: A fully initialized DeleteBlobRequest instance.
321 response: Not used but should be a VoidProto.
323 for blobkey in request.blob_key_list():
324 self.DeleteBlob(blobkey, self.__storage)
326 def _Dynamic_FetchData(self, request, response, unused_request_id):
327 """Fetch a blob fragment from a blob by its blob-key.
329 Fetches a blob fragment using its blob-key. Start index is inclusive,
330 end index is inclusive. Valid requests for information outside of
331 the range of the blob return a partial string or empty string if entirely
332 out of range.
334 Args:
335 request: A fully initialized FetchDataRequest instance.
336 response: A FetchDataResponse instance.
338 Raises:
339 ApplicationError when application has the following errors:
340 INDEX_OUT_OF_RANGE: Index is negative or end > start.
341 BLOB_FETCH_SIZE_TOO_LARGE: Request blob fragment is larger than
342 MAX_BLOB_FRAGMENT_SIZE.
343 BLOB_NOT_FOUND: If invalid blob-key is provided or is not found.
346 start_index = request.start_index()
347 if start_index < 0:
348 raise apiproxy_errors.ApplicationError(
349 blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE)
352 end_index = request.end_index()
353 if end_index < start_index:
354 raise apiproxy_errors.ApplicationError(
355 blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE)
358 fetch_size = end_index - start_index + 1
359 if fetch_size > blobstore.MAX_BLOB_FETCH_SIZE:
360 raise apiproxy_errors.ApplicationError(
361 blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE)
364 blobkey = request.blob_key()
365 info_key = self.ToDatastoreBlobKey(blobkey)
366 try:
367 datastore.Get(info_key)
368 except datastore_errors.EntityNotFoundError:
369 raise apiproxy_errors.ApplicationError(
370 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
373 blob_file = self.__storage.OpenBlob(blobkey)
374 blob_file.seek(start_index)
375 response.set_data(blob_file.read(fetch_size))
377 def _Dynamic_DecodeBlobKey(self, request, response, unused_request_id):
378 """Decode a given blob key: data is simply base64-decoded.
380 Args:
381 request: A fully-initialized DecodeBlobKeyRequest instance
382 response: A DecodeBlobKeyResponse instance.
384 for blob_key in request.blob_key_list():
385 response.add_decoded(blob_key.decode('base64'))
387 @classmethod
388 def CreateEncodedGoogleStorageKey(cls, filename):
389 """Create an encoded blob key that represents a Google Storage file.
391 For now we'll just base64 encode the Google Storage filename, APIs that
392 accept encoded blob keys will need to be able to support Google Storage
393 files or blobstore files based on decoding this key.
395 Any stub that creates GS files should use this function to convert
396 a gs filename to a blobkey. The created blobkey should be used both
397 as its _GS_FILE_INFO entity's key name and as the storage key to
398 store its content in blobstore. This ensures the GS files created
399 can be operated by other APIs.
401 Note this encoding is easily reversible and is not encryption.
403 Args:
404 filename: gs filename of form 'bucket/filename'
406 Returns:
407 blobkey string of encoded filename.
409 return cls.GS_BLOBKEY_PREFIX + base64.urlsafe_b64encode(filename)
411 def _Dynamic_CreateEncodedGoogleStorageKey(self, request, response,
412 unused_request_id):
413 """Create an encoded blob key that represents a Google Storage file.
415 For now we'll just base64 encode the Google Storage filename, APIs that
416 accept encoded blob keys will need to be able to support Google Storage
417 files or blobstore files based on decoding this key.
419 Args:
420 request: A fully-initialized CreateEncodedGoogleStorageKeyRequest
421 instance.
422 response: A CreateEncodedGoogleStorageKeyResponse instance.
424 filename = request.filename()[len(blobstore.GS_PREFIX):]
425 response.set_blob_key(
426 self.CreateEncodedGoogleStorageKey(filename))
428 def CreateBlob(self, blob_key, content):
429 """Create new blob and put in storage and Datastore.
431 This is useful in testing where you have access to the stub.
433 Args:
434 blob_key: String blob-key of new blob.
435 content: Content of new blob as a string.
437 Returns:
438 New Datastore entity without blob meta-data fields.
440 entity = datastore.Entity(blobstore.BLOB_INFO_KIND,
441 name=blob_key, namespace='')
442 entity['size'] = len(content)
443 datastore.Put(entity)
444 self.storage.CreateBlob(blob_key, content)
445 return entity