App Engine Python SDK version 1.8.9
[gae.git] / python / google / appengine / api / blobstore / blobstore.py
bloba435122429859af9d5f0e1594ab0628c254ac375
1 #!/usr/bin/env python
3 # Copyright 2007 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
21 """A Python blobstore API used by app developers.
23 Contains methods uses to interface with Blobstore API. Defines db.Key-like
24 class representing a blob-key. Contains API part that forward to apiproxy.
25 """
34 import datetime
35 import time
37 from google.appengine.api import apiproxy_stub_map
38 from google.appengine.api import datastore
39 from google.appengine.api import datastore_errors
40 from google.appengine.api import datastore_types
41 from google.appengine.api import api_base_pb
42 from google.appengine.api.blobstore import blobstore_service_pb
43 from google.appengine.runtime import apiproxy_errors
46 __all__ = ['BLOB_INFO_KIND',
47 'BLOB_KEY_HEADER',
48 'BLOB_MIGRATION_KIND',
49 'BLOB_RANGE_HEADER',
50 'MAX_BLOB_FETCH_SIZE',
51 'UPLOAD_INFO_CREATION_HEADER',
52 'CLOUD_STORAGE_OBJECT_HEADER',
53 'GS_PREFIX',
54 'BlobFetchSizeTooLargeError',
55 'BlobKey',
56 'BlobNotFoundError',
57 'DataIndexOutOfRangeError',
58 'PermissionDeniedError',
59 'Error',
60 'InternalError',
61 'create_rpc',
62 'create_upload_url',
63 'create_upload_url_async',
64 'delete',
65 'delete_async',
66 'fetch_data',
67 'fetch_data_async',
68 'create_gs_key',
69 'create_gs_key_async',
73 BlobKey = datastore_types.BlobKey
77 BLOB_INFO_KIND = '__BlobInfo__'
79 BLOB_KEY_HEADER = 'X-AppEngine-BlobKey'
81 BLOB_MIGRATION_KIND = '__BlobMigration__'
83 BLOB_RANGE_HEADER = 'X-AppEngine-BlobRange'
85 MAX_BLOB_FETCH_SIZE = (1 << 20) - (1 << 15)
87 GS_PREFIX = '/gs/'
91 UPLOAD_INFO_CREATION_HEADER = 'X-AppEngine-Upload-Creation'
92 CLOUD_STORAGE_OBJECT_HEADER = 'X-AppEngine-Cloud-Storage-Object'
93 _BASE_CREATION_HEADER_FORMAT = '%Y-%m-%d %H:%M:%S'
95 class Error(Exception):
96 """Base blobstore error type."""
99 class InternalError(Error):
100 """Raised when an internal error occurs within API."""
103 class BlobNotFoundError(Error):
104 """Raised when attempting to access blob data for non-existant blob."""
107 class DataIndexOutOfRangeError(Error):
108 """Raised when attempting to access indexes out of range in wrong order."""
111 class BlobFetchSizeTooLargeError(Error):
112 """Raised when attempting to fetch too large a block from a blob."""
115 class _CreationFormatError(Error):
116 """Raised when attempting to parse bad creation date format."""
119 class PermissionDeniedError(Error):
120 """Raised when permissions are lacking for a requested operation."""
123 def _ToBlobstoreError(error):
124 """Translate an application error to a datastore Error, if possible.
126 Args:
127 error: An ApplicationError to translate.
129 error_map = {
130 blobstore_service_pb.BlobstoreServiceError.INTERNAL_ERROR:
131 InternalError,
132 blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND:
133 BlobNotFoundError,
134 blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE:
135 DataIndexOutOfRangeError,
136 blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE:
137 BlobFetchSizeTooLargeError,
138 blobstore_service_pb.BlobstoreServiceError.PERMISSION_DENIED:
139 PermissionDeniedError,
141 desired_exc = error_map.get(error.application_error)
142 return desired_exc(error.error_detail) if desired_exc else error
145 def _format_creation(stamp):
146 """Format an upload creation timestamp with milliseconds.
148 This method is necessary to format a timestamp with microseconds on Python
149 versions before 2.6.
151 Cannot simply convert datetime objects to str because the microseconds are
152 stripped from the format when set to 0. The upload creation date format will
153 always have microseconds padded out to 6 places.
155 Args:
156 stamp: datetime.datetime object to format.
158 Returns:
159 Formatted datetime as Python 2.6 format '%Y-%m-%d %H:%M:%S.%f'.
161 return '%s.%06d' % (stamp.strftime(_BASE_CREATION_HEADER_FORMAT),
162 stamp.microsecond)
165 def _parse_creation(creation_string, field_name):
166 """Parses upload creation string from header format.
168 Parse creation date of the format:
170 YYYY-mm-dd HH:MM:SS.ffffff
172 Y: Year
173 m: Month (01-12)
174 d: Day (01-31)
175 H: Hour (00-24)
176 M: Minute (00-59)
177 S: Second (00-59)
178 f: Microsecond
180 Args:
181 creation_string: String creation date format.
183 Returns:
184 datetime object parsed from creation_string.
186 Raises:
187 _CreationFormatError when the creation string is formatted incorrectly.
191 split_creation_string = creation_string.split('.', 1)
192 if len(split_creation_string) != 2:
193 raise _CreationFormatError(
194 'Could not parse creation %s in field %s.' % (creation_string,
195 field_name))
196 timestamp_string, microsecond = split_creation_string
198 try:
199 timestamp = time.strptime(timestamp_string,
200 _BASE_CREATION_HEADER_FORMAT)
201 microsecond = int(microsecond)
202 except ValueError:
203 raise _CreationFormatError('Could not parse creation %s in field %s.'
204 % (creation_string, field_name))
206 return datetime.datetime(*timestamp[:6] + tuple([microsecond]))
209 def create_rpc(deadline=None, callback=None):
210 """Creates an RPC object for use with the blobstore API.
212 Args:
213 deadline: Optional deadline in seconds for the operation; the default
214 is a system-specific deadline (typically 5 seconds).
215 callback: Optional callable to invoke on completion.
217 Returns:
218 An apiproxy_stub_map.UserRPC object specialized for this service.
220 return apiproxy_stub_map.UserRPC('blobstore', deadline, callback)
223 def _make_async_call(rpc, method, request, response,
224 get_result_hook, user_data):
225 if rpc is None:
226 rpc = create_rpc()
227 rpc.make_call(method, request, response, get_result_hook, user_data)
228 return rpc
231 def _get_result_hook(rpc):
232 try:
233 rpc.check_success()
234 except apiproxy_errors.ApplicationError, err:
235 raise _ToBlobstoreError(err)
236 hook = rpc.user_data
237 return hook(rpc)
240 def create_upload_url(success_path,
241 max_bytes_per_blob=None,
242 max_bytes_total=None,
243 rpc=None,
244 gs_bucket_name=None):
245 """Create upload URL for POST form.
247 Args:
248 success_path: Path within application to call when POST is successful
249 and upload is complete.
250 max_bytes_per_blob: The maximum size in bytes that any one blob in the
251 upload can be or None for no maximum size.
252 max_bytes_total: The maximum size in bytes that the aggregate sizes of all
253 of the blobs in the upload can be or None for no maximum size.
254 rpc: Optional UserRPC object.
255 gs_bucket_name: The Google Storage bucket name that the blobs should be
256 uploaded to. The application's service account must have the correct
257 permissions to write to this bucket. The bucket name may be of the foramt
258 'bucket/path/', in which case the included path will be prepended to the
259 uploaded object name.
261 Returns:
262 The upload URL.
264 Raises:
265 TypeError: If max_bytes_per_blob or max_bytes_total are not integral types.
266 ValueError: If max_bytes_per_blob or max_bytes_total are not
267 positive values.
269 rpc = create_upload_url_async(success_path,
270 max_bytes_per_blob=max_bytes_per_blob,
271 max_bytes_total=max_bytes_total,
272 rpc=rpc,
273 gs_bucket_name=gs_bucket_name)
274 return rpc.get_result()
277 def create_upload_url_async(success_path,
278 max_bytes_per_blob=None,
279 max_bytes_total=None,
280 rpc=None,
281 gs_bucket_name=None):
282 """Create upload URL for POST form -- async version.
284 Args:
285 success_path: Path within application to call when POST is successful
286 and upload is complete.
287 max_bytes_per_blob: The maximum size in bytes that any one blob in the
288 upload can be or None for no maximum size.
289 max_bytes_total: The maximum size in bytes that the aggregate sizes of all
290 of the blobs in the upload can be or None for no maximum size.
291 rpc: Optional UserRPC object.
292 gs_bucket_name: The Google Storage bucket name that the blobs should be
293 uploaded to. The application's service account must have the correct
294 permissions to write to this bucket. The bucket name may be of the foramt
295 'bucket/path/', in which case the included path will be prepended to the
296 uploaded object name.
298 Returns:
299 A UserRPC whose result will be the upload URL.
301 Raises:
302 TypeError: If max_bytes_per_blob or max_bytes_total are not integral types.
303 ValueError: If max_bytes_per_blob or max_bytes_total are not
304 positive values.
306 request = blobstore_service_pb.CreateUploadURLRequest()
307 response = blobstore_service_pb.CreateUploadURLResponse()
308 request.set_success_path(success_path)
310 if max_bytes_per_blob is not None:
311 if not isinstance(max_bytes_per_blob, (int, long)):
312 raise TypeError('max_bytes_per_blob must be integer.')
313 if max_bytes_per_blob < 1:
314 raise ValueError('max_bytes_per_blob must be positive.')
315 request.set_max_upload_size_per_blob_bytes(max_bytes_per_blob)
317 if max_bytes_total is not None:
318 if not isinstance(max_bytes_total, (int, long)):
319 raise TypeError('max_bytes_total must be integer.')
320 if max_bytes_total < 1:
321 raise ValueError('max_bytes_total must be positive.')
322 request.set_max_upload_size_bytes(max_bytes_total)
324 if (request.has_max_upload_size_bytes() and
325 request.has_max_upload_size_per_blob_bytes()):
326 if (request.max_upload_size_bytes() <
327 request.max_upload_size_per_blob_bytes()):
328 raise ValueError('max_bytes_total can not be less'
329 ' than max_upload_size_per_blob_bytes')
331 if gs_bucket_name is not None:
332 if not isinstance(gs_bucket_name, basestring):
333 raise TypeError('gs_bucket_name must be a string.')
334 request.set_gs_bucket_name(gs_bucket_name)
336 return _make_async_call(rpc, 'CreateUploadURL', request, response,
337 _get_result_hook, lambda rpc: rpc.response.url())
342 def delete(blob_keys, rpc=None, _token=None):
343 """Delete a blob from Blobstore.
345 Args:
346 blob_keys: Single instance or list of blob keys. A blob-key can be either
347 a string or an instance of BlobKey.
348 rpc: Optional UserRPC object.
350 Returns:
351 None.
356 rpc = delete_async(blob_keys, rpc, _token)
357 return rpc.get_result()
362 def delete_async(blob_keys, rpc=None, _token=None):
363 """Delete a blob from Blobstore -- async version.
365 Args:
366 blob_keys: Single instance or list of blob keys. A blob-key can be either
367 a string or an instance of BlobKey.
368 rpc: Optional UserRPC object.
370 Returns:
371 A UserRPC whose result will be None.
376 if isinstance(blob_keys, (basestring, BlobKey)):
377 blob_keys = [blob_keys]
378 request = blobstore_service_pb.DeleteBlobRequest()
379 for blob_key in blob_keys:
380 request.add_blob_key(str(blob_key))
381 if _token:
382 request.set_token(_token)
383 response = api_base_pb.VoidProto()
385 return _make_async_call(rpc, 'DeleteBlob', request, response,
386 _get_result_hook, lambda rpc: None)
389 def fetch_data(blob_key, start_index, end_index, rpc=None):
390 """Fetch data for blob.
392 See docstring for ext.blobstore.fetch_data for more details.
394 Args:
395 blob: BlobKey, str or unicode representation of BlobKey of
396 blob to fetch data from.
397 start_index: Start index of blob data to fetch. May not be negative.
398 end_index: End index (exclusive) of blob data to fetch. Must be
399 >= start_index.
400 rpc: Optional UserRPC object.
402 Returns:
403 A str containing partial data of blob. See docstring for
404 ext.blobstore.fetch_data for more details.
406 Raises:
407 See docstring for ext.blobstore.fetch_data for more details.
409 rpc = fetch_data_async(blob_key, start_index, end_index, rpc)
410 return rpc.get_result()
413 def fetch_data_async(blob_key, start_index, end_index, rpc=None):
414 """Fetch data for blob -- async version.
416 See docstring for ext.blobstore.fetch_data for more details.
418 Args:
419 blob: BlobKey, str or unicode representation of BlobKey of
420 blob to fetch data from.
421 start_index: Start index of blob data to fetch. May not be negative.
422 end_index: End index (exclusive) of blob data to fetch. Must be
423 >= start_index.
424 rpc: Optional UserRPC object.
426 Returns:
427 A UserRPC whose result will be a str as returned by fetch_data().
429 Raises:
430 See docstring for ext.blobstore.fetch_data for more details.
432 if not isinstance(start_index, (int, long)):
433 raise TypeError('start_index must be integer.')
435 if not isinstance(end_index, (int, long)):
436 raise TypeError('end_index must be integer.')
438 if isinstance(blob_key, BlobKey):
439 blob_key = str(blob_key).decode('utf-8')
440 elif isinstance(blob_key, str):
441 blob_key = blob_key.decode('utf-8')
442 elif not isinstance(blob_key, unicode):
443 raise TypeError('Blob-key must be str, unicode or BlobKey: %s' % blob_key)
446 if start_index < 0:
447 raise DataIndexOutOfRangeError(
448 'May not fetch blob at negative index.')
451 if end_index < start_index:
452 raise DataIndexOutOfRangeError(
453 'Start index %d > end index %d' % (start_index, end_index))
456 fetch_size = end_index - start_index + 1
458 if fetch_size > MAX_BLOB_FETCH_SIZE:
459 raise BlobFetchSizeTooLargeError(
460 'Blob fetch size is too large: %d' % fetch_size)
462 request = blobstore_service_pb.FetchDataRequest()
463 response = blobstore_service_pb.FetchDataResponse()
465 request.set_blob_key(blob_key)
466 request.set_start_index(start_index)
467 request.set_end_index(end_index)
469 return _make_async_call(rpc, 'FetchData', request, response,
470 _get_result_hook, lambda rpc: rpc.response.data())
473 def create_gs_key(filename, rpc=None):
474 """Create an encoded key for a Google Storage file.
476 It is safe to persist this key for future use.
478 Args:
479 filename: The filename of the google storage object to create the key for.
480 rpc: Optional UserRPC object.
482 Returns:
483 An encrypted blob key string.
485 rpc = create_gs_key_async(filename, rpc)
486 return rpc.get_result()
489 def create_gs_key_async(filename, rpc=None):
490 """Create an encoded key for a google storage file - async version.
492 It is safe to persist this key for future use.
494 Args:
495 filename: The filename of the google storage object to create the
496 key for.
497 rpc: Optional UserRPC object.
499 Returns:
500 A UserRPC whose result will be a string as returned by create_gs_key.
502 Raises:
503 TypeError: If filename is not a string.
504 ValueError: If filename is not in the format '/gs/bucket_name/object_name'
507 if not isinstance(filename, basestring):
508 raise TypeError('filename must be str: %s' % filename)
509 if not filename.startswith(GS_PREFIX):
510 raise ValueError('filename must start with "/gs/": %s' % filename)
511 if not '/' in filename[4:]:
512 raise ValueError('filename must have the format '
513 '"/gs/bucket_name/object_name": %s' % filename)
515 request = blobstore_service_pb.CreateEncodedGoogleStorageKeyRequest()
516 response = blobstore_service_pb.CreateEncodedGoogleStorageKeyResponse()
518 request.set_filename(filename)
520 return _make_async_call(rpc,
521 'CreateEncodedGoogleStorageKey',
522 request,
523 response,
524 _get_result_hook,
525 lambda rpc: rpc.response.blob_key())