3 # Copyright 2007 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
21 """Datastore backed Blobstore API stub.
24 BlobstoreServiceStub: BlobstoreService stub backed by datastore.
41 from google
.appengine
.api
import apiproxy_stub
42 from google
.appengine
.api
import blobstore
43 from google
.appengine
.api
import datastore
44 from google
.appengine
.api
import datastore_errors
45 from google
.appengine
.api
import datastore_types
46 from google
.appengine
.api
import users
47 from google
.appengine
.api
.blobstore
import blobstore_service_pb
48 from google
.appengine
.runtime
import apiproxy_errors
51 __all__
= ['BlobStorage',
52 'BlobstoreServiceStub',
54 'CreateUploadSession',
59 class Error(Exception):
60 """Base blobstore error type."""
63 class ConfigurationError(Error
):
64 """Raised when environment is not correctly configured."""
67 _UPLOAD_SESSION_KIND
= '__BlobUploadSession__'
69 _GS_INFO_KIND
= '__GsFileInfo__'
72 def CreateUploadSession(creation
,
78 """Create upload session in datastore.
80 Creates an upload session and puts it in Datastore to be referenced by
84 creation: Creation timestamp.
85 success_path: Path in users application to call upon success.
86 user: User that initiated this upload, if any.
87 max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
88 max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
89 bucket_name: Name of the Google Storage bucket tio upload the files.
92 String encoded key of new Datastore entity.
94 entity
= datastore
.Entity(_UPLOAD_SESSION_KIND
, namespace
='')
95 entity_dict
= {'creation': creation
,
96 'success_path': success_path
,
99 'max_bytes_per_blob': max_bytes_per_blob
,
100 'max_bytes_total': max_bytes_total
}
102 entity_dict
['gs_bucket_name'] = bucket_name
104 entity
.update(entity_dict
)
105 datastore
.Put(entity
)
106 return str(entity
.key())
109 class BlobStorage(object):
110 """Base class for defining how blobs are stored.
112 This base class merely defines an interface that all stub blob-storage
113 mechanisms must implement.
116 def StoreBlob(self
, blob_key
, blob_stream
):
117 """Store blob stream.
119 Implement this method to persist blob data.
122 blob_key: Blob key of blob to store.
123 blob_stream: Stream or stream-like object that will generate blob content.
125 raise NotImplementedError('Storage class must override StoreBlob method.')
127 def OpenBlob(self
, blob_key
):
128 """Open blob for streaming.
131 blob_key: Blob-key of existing blob to open for reading.
134 Open file stream for reading blob. Caller is responsible for closing
137 raise NotImplementedError('Storage class must override OpenBlob method.')
139 def DeleteBlob(self
, blob_key
):
140 """Delete blob data from storage.
143 blob_key: Blob-key of existing blob to delete.
145 raise NotImplementedError('Storage class must override DeleteBlob method.')
148 class BlobstoreServiceStub(apiproxy_stub
.APIProxyStub
):
149 """Datastore backed Blobstore service stub.
151 This stub stores manages upload sessions in the Datastore and must be
152 provided with a blob_storage object to know where the actual blob
153 records can be found after having been uploaded.
155 This stub does not handle the actual creation of blobs, neither the BlobInfo
156 in the Datastore nor creation of blob data in the blob_storage. It does,
157 however, assume that another part of the system has created these and
158 uses these objects for deletion.
160 An upload session is created when the CreateUploadURL request is handled and
161 put in the Datastore under the __BlobUploadSession__ kind. There is no
162 analog for this kind on a production server. Other than creation, this stub
163 not work with session objects. The URLs created by this service stub are:
165 http://<appserver-host>:<appserver-port>/<uploader-path>/<session-info>
167 This is very similar to what the URL is on a production server. The session
168 info is the string encoded version of the session entity
171 _ACCEPTS_REQUEST_ID
= True
172 GS_BLOBKEY_PREFIX
= 'encoded_gs_file:'
176 time_function
=time
.time
,
177 service_name
='blobstore',
178 uploader_path
='_ah/upload/',
183 blob_storage: BlobStorage class instance used for blob storage.
184 time_function: Used for dependency injection in tests.
185 service_name: Service name expected for all calls.
186 uploader_path: Path to upload handler pointed to by URLs generated
187 by this service stub.
188 request_data: A apiproxy_stub.RequestData instance used to look up state
189 associated with the request that generated an API call.
191 super(BlobstoreServiceStub
, self
).__init
__(service_name
,
192 request_data
=request_data
)
193 self
.__storage
= blob_storage
194 self
.__time
_function
= time_function
195 self
.__next
_session
_id
= 1
196 self
.__uploader
_path
= uploader_path
199 def ToDatastoreBlobKey(cls
, blobkey
):
200 """Given a string blobkey, return its db.Key."""
201 kind
= blobstore
.BLOB_INFO_KIND
202 if blobkey
.startswith(cls
.GS_BLOBKEY_PREFIX
):
204 return datastore_types
.Key
.from_path(kind
,
209 """Access BlobStorage used by service stub.
212 BlobStorage instance used by blobstore service stub.
214 return self
.__storage
216 def _GetEnviron(self
, name
):
217 """Helper method ensures environment configured as expected.
220 name: Name of environment variable to get.
223 Environment variable associated with name.
226 ConfigurationError if required environment variable is not found.
229 return os
.environ
[name
]
231 raise ConfigurationError('%s is not set in environment.' % name
)
233 def _CreateSession(self
,
236 max_bytes_per_blob
=None,
237 max_bytes_total
=None,
239 """Create new upload session.
242 success_path: Application path to call upon successful POST.
243 user: User that initiated the upload session.
244 max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
245 max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
246 bucket_name: The name of the Cloud Storage bucket where the files will be
250 String encoded key of a new upload session created in the datastore.
252 return CreateUploadSession(self
.__time
_function
(),
259 def _Dynamic_CreateUploadURL(self
, request
, response
, request_id
):
260 """Create upload URL implementation.
262 Create a new upload session. The upload session key is encoded in the
263 resulting POST URL. This URL is embedded in a POST form by the application
264 which contacts the uploader when the user posts.
267 request: A fully initialized CreateUploadURLRequest instance.
268 response: A CreateUploadURLResponse instance.
269 request_id: A unique string identifying the request associated with the
272 max_bytes_per_blob
= None
273 max_bytes_total
= None
276 if request
.has_max_upload_size_per_blob_bytes():
277 max_bytes_per_blob
= request
.max_upload_size_per_blob_bytes()
279 if request
.has_max_upload_size_bytes():
280 max_bytes_total
= request
.max_upload_size_bytes()
282 if request
.has_gs_bucket_name():
283 bucket_name
= request
.gs_bucket_name()
285 session
= self
._CreateSession
(request
.success_path(),
286 users
.get_current_user(),
291 protocol
, host
, _
, _
, _
, _
= urlparse
.urlparse(
292 self
.request_data
.get_request_url(request_id
))
294 response
.set_url('%s://%s/%s%s' % (protocol
, host
, self
.__uploader
_path
,
298 def DeleteBlob(cls
, blobkey
, storage
):
302 blobkey: blobkey in str.
303 storage: blobstore storage stub.
305 datastore
.Delete(cls
.ToDatastoreBlobKey(blobkey
))
307 blobinfo
= datastore_types
.Key
.from_path(blobstore
.BLOB_INFO_KIND
,
310 datastore
.Delete(blobinfo
)
311 storage
.DeleteBlob(blobkey
)
313 def _Dynamic_DeleteBlob(self
, request
, response
, unused_request_id
):
314 """Delete a blob by its blob-key.
316 Delete a blob from the blobstore using its blob-key. Deleting blobs that
317 do not exist is a no-op.
320 request: A fully initialized DeleteBlobRequest instance.
321 response: Not used but should be a VoidProto.
323 for blobkey
in request
.blob_key_list():
324 self
.DeleteBlob(blobkey
, self
.__storage
)
326 def _Dynamic_FetchData(self
, request
, response
, unused_request_id
):
327 """Fetch a blob fragment from a blob by its blob-key.
329 Fetches a blob fragment using its blob-key. Start index is inclusive,
330 end index is inclusive. Valid requests for information outside of
331 the range of the blob return a partial string or empty string if entirely
335 request: A fully initialized FetchDataRequest instance.
336 response: A FetchDataResponse instance.
339 ApplicationError when application has the following errors:
340 INDEX_OUT_OF_RANGE: Index is negative or end > start.
341 BLOB_FETCH_SIZE_TOO_LARGE: Request blob fragment is larger than
342 MAX_BLOB_FRAGMENT_SIZE.
343 BLOB_NOT_FOUND: If invalid blob-key is provided or is not found.
346 start_index
= request
.start_index()
348 raise apiproxy_errors
.ApplicationError(
349 blobstore_service_pb
.BlobstoreServiceError
.DATA_INDEX_OUT_OF_RANGE
)
352 end_index
= request
.end_index()
353 if end_index
< start_index
:
354 raise apiproxy_errors
.ApplicationError(
355 blobstore_service_pb
.BlobstoreServiceError
.DATA_INDEX_OUT_OF_RANGE
)
358 fetch_size
= end_index
- start_index
+ 1
359 if fetch_size
> blobstore
.MAX_BLOB_FETCH_SIZE
:
360 raise apiproxy_errors
.ApplicationError(
361 blobstore_service_pb
.BlobstoreServiceError
.BLOB_FETCH_SIZE_TOO_LARGE
)
364 blobkey
= request
.blob_key()
365 info_key
= self
.ToDatastoreBlobKey(blobkey
)
367 datastore
.Get(info_key
)
368 except datastore_errors
.EntityNotFoundError
:
369 raise apiproxy_errors
.ApplicationError(
370 blobstore_service_pb
.BlobstoreServiceError
.BLOB_NOT_FOUND
)
373 blob_file
= self
.__storage
.OpenBlob(blobkey
)
374 blob_file
.seek(start_index
)
375 response
.set_data(blob_file
.read(fetch_size
))
377 def _Dynamic_DecodeBlobKey(self
, request
, response
, unused_request_id
):
378 """Decode a given blob key: data is simply base64-decoded.
381 request: A fully-initialized DecodeBlobKeyRequest instance
382 response: A DecodeBlobKeyResponse instance.
384 for blob_key
in request
.blob_key_list():
385 response
.add_decoded(blob_key
.decode('base64'))
388 def CreateEncodedGoogleStorageKey(cls
, filename
):
389 """Create an encoded blob key that represents a Google Storage file.
391 For now we'll just base64 encode the Google Storage filename, APIs that
392 accept encoded blob keys will need to be able to support Google Storage
393 files or blobstore files based on decoding this key.
395 Any stub that creates GS files should use this function to convert
396 a gs filename to a blobkey. The created blobkey should be used both
397 as its _GS_FILE_INFO entity's key name and as the storage key to
398 store its content in blobstore. This ensures the GS files created
399 can be operated by other APIs.
401 Note this encoding is easily reversible and is not encryption.
404 filename: gs filename of form 'bucket/filename'
407 blobkey string of encoded filename.
409 return cls
.GS_BLOBKEY_PREFIX
+ base64
.urlsafe_b64encode(filename
)
411 def _Dynamic_CreateEncodedGoogleStorageKey(self
, request
, response
,
413 """Create an encoded blob key that represents a Google Storage file.
415 For now we'll just base64 encode the Google Storage filename, APIs that
416 accept encoded blob keys will need to be able to support Google Storage
417 files or blobstore files based on decoding this key.
420 request: A fully-initialized CreateEncodedGoogleStorageKeyRequest
422 response: A CreateEncodedGoogleStorageKeyResponse instance.
424 filename
= request
.filename()[len(blobstore
.GS_PREFIX
):]
425 response
.set_blob_key(
426 self
.CreateEncodedGoogleStorageKey(filename
))
428 def CreateBlob(self
, blob_key
, content
):
429 """Create new blob and put in storage and Datastore.
431 This is useful in testing where you have access to the stub.
434 blob_key: String blob-key of new blob.
435 content: Content of new blob as a string.
438 New Datastore entity without blob meta-data fields.
440 entity
= datastore
.Entity(blobstore
.BLOB_INFO_KIND
,
441 name
=blob_key
, namespace
='')
442 entity
['size'] = len(content
)
443 datastore
.Put(entity
)
444 self
.storage
.CreateBlob(blob_key
, content
)