App Engine Python SDK version 1.9.12
[gae.git] / python / google / appengine / ext / blobstore / blobstore.py
blob97461f521a9a8746cc05a3ea4a0be6218108d624
1 #!/usr/bin/env python
3 # Copyright 2007 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
21 """A Python blobstore API used by app developers.
23 Contains methods used to interface with Blobstore API. Includes db.Model-like
24 class representing a reference to a very large BLOB. Imports db.Key-like
25 class representing a blob-key.
26 """
37 import base64
38 import email
39 import email.message
41 from google.appengine.api import datastore
42 from google.appengine.api import datastore_errors
43 from google.appengine.api import datastore_types
44 from google.appengine.api.blobstore import blobstore
45 from google.appengine.ext import db
47 __all__ = ['BLOB_INFO_KIND',
48 'BLOB_KEY_HEADER',
49 'BLOB_MIGRATION_KIND',
50 'BLOB_RANGE_HEADER',
51 'BlobFetchSizeTooLargeError',
52 'BlobInfo',
53 'BlobInfoParseError',
54 'BlobKey',
55 'BlobMigrationRecord',
56 'BlobNotFoundError',
57 'BlobReferenceProperty',
58 'BlobReader',
59 'FileInfo',
60 'FileInfoParseError',
61 'DataIndexOutOfRangeError',
62 'PermissionDeniedError',
63 'Error',
64 'InternalError',
65 'MAX_BLOB_FETCH_SIZE',
66 'UPLOAD_INFO_CREATION_HEADER',
67 'CLOUD_STORAGE_OBJECT_HEADER',
68 'create_rpc',
69 'create_upload_url',
70 'create_upload_url_async',
71 'delete',
72 'delete_async',
73 'fetch_data',
74 'fetch_data_async',
75 'create_gs_key',
76 'create_gs_key_async',
77 'GS_PREFIX',
78 'get',
79 'parse_blob_info',
80 'parse_file_info']
82 Error = blobstore.Error
83 InternalError = blobstore.InternalError
84 BlobFetchSizeTooLargeError = blobstore.BlobFetchSizeTooLargeError
85 BlobNotFoundError = blobstore.BlobNotFoundError
86 _CreationFormatError = blobstore._CreationFormatError
87 DataIndexOutOfRangeError = blobstore.DataIndexOutOfRangeError
88 PermissionDeniedError = blobstore.PermissionDeniedError
90 BlobKey = blobstore.BlobKey
91 create_rpc = blobstore.create_rpc
92 create_upload_url = blobstore.create_upload_url
93 create_upload_url_async = blobstore.create_upload_url_async
94 delete = blobstore.delete
95 delete_async = blobstore.delete_async
96 create_gs_key = blobstore.create_gs_key
97 create_gs_key_async = blobstore.create_gs_key_async
100 BLOB_INFO_KIND = blobstore.BLOB_INFO_KIND
101 BLOB_MIGRATION_KIND = blobstore.BLOB_MIGRATION_KIND
102 BLOB_KEY_HEADER = blobstore.BLOB_KEY_HEADER
103 BLOB_RANGE_HEADER = blobstore.BLOB_RANGE_HEADER
104 MAX_BLOB_FETCH_SIZE = blobstore.MAX_BLOB_FETCH_SIZE
105 UPLOAD_INFO_CREATION_HEADER = blobstore.UPLOAD_INFO_CREATION_HEADER
106 CLOUD_STORAGE_OBJECT_HEADER = blobstore.CLOUD_STORAGE_OBJECT_HEADER
107 GS_PREFIX = blobstore.GS_PREFIX
110 class BlobInfoParseError(Error):
111 """CGI parameter does not contain valid BlobInfo record."""
114 class FileInfoParseError(Error):
115 """CGI parameter does not contain valid FileInfo record."""
120 class _GqlQuery(db.GqlQuery):
121 """GqlQuery class that explicitly sets model-class.
123 This does the same as the original db.GqlQuery class except that it does
124 not try to find the model class based on the compiled GQL query. The
125 caller instead provides the query with a model class to use for construction.
127 This class is required for compatibility with the current db.py query
128 mechanism but will be removed in the future. DO NOT USE.
132 def __init__(self, query_string, model_class, *args, **kwds):
133 """Constructor.
135 Args:
136 query_string: Properly formatted GQL query string.
137 model_class: Model class from which entities are constructed.
138 *args: Positional arguments used to bind numeric references in the query.
139 **kwds: Dictionary-based arguments for named references.
143 from google.appengine.ext import gql
144 app = kwds.pop('_app', None)
145 self._proto_query = gql.GQL(query_string, _app=app, namespace='')
147 super(db.GqlQuery, self).__init__(model_class)
148 self.bind(*args, **kwds)
153 class BlobInfo(object):
154 """Information about blobs in Blobstore.
156 This is a db.Model-like class that contains information about blobs stored
157 by an application. Like db.Model, this class is backed by an Datastore
158 entity, however, BlobInfo instances are read-only and have a much more
159 limited interface.
161 Each BlobInfo has a key of type BlobKey associated with it. This key is
162 specific to the Blobstore API and is not compatible with db.get. The key
163 can be used for quick lookup by passing it to BlobInfo.get. This
164 key converts easily to a string, which is web safe and can be embedded
165 in URLs.
167 Properties:
168 content_type: Content type of blob.
169 creation: Creation date of blob, when it was uploaded.
170 filename: Filename user selected from their machine.
171 size: Size of uncompressed blob.
172 md5_hash: The md5 hash value of the uploaded blob.
174 All properties are read-only. Attempting to assign a value to a property
175 will raise NotImplementedError.
178 _unindexed_properties = frozenset([])
181 _all_properties = frozenset(['content_type', 'creation', 'filename',
182 'size', 'md5_hash'])
184 @property
185 def content_type(self):
186 return self.__get_value('content_type')
188 @property
189 def creation(self):
190 return self.__get_value('creation')
192 @property
193 def filename(self):
194 return self.__get_value('filename')
196 @property
197 def size(self):
198 return self.__get_value('size')
200 @property
201 def md5_hash(self):
202 return self.__get_value('md5_hash')
204 def __init__(self, entity_or_blob_key, _values=None):
205 """Constructor for wrapping blobstore entity.
207 The constructor should not be used outside this package and tests.
209 Args:
210 entity: Datastore entity that represents the blob reference.
212 if isinstance(entity_or_blob_key, datastore.Entity):
213 self.__entity = entity_or_blob_key
214 self.__key = BlobKey(entity_or_blob_key.key().name())
215 elif isinstance(entity_or_blob_key, BlobKey):
216 self.__entity = _values
217 self.__key = entity_or_blob_key
218 else:
219 raise TypeError('Must provide Entity or BlobKey')
223 @classmethod
224 def from_entity(cls, entity):
225 """Convert entity to BlobInfo.
227 This method is required for compatibility with the current db.py query
228 mechanism but will be removed in the future. DO NOT USE.
230 return BlobInfo(entity)
234 @classmethod
235 def properties(cls):
236 """Set of properties that belong to BlobInfo.
238 This method is required for compatibility with the current db.py query
239 mechanism but will be removed in the future. DO NOT USE.
241 return set(cls._all_properties)
243 def __get_value(self, name):
244 """Get a BlobInfo value, loading entity if necessary.
246 This method allows lazy loading of the underlying datastore entity. It
247 should never be invoked directly.
249 Args:
250 name: Name of property to get value for.
252 Returns:
253 Value of BlobInfo property from entity.
255 if self.__entity is None:
256 self.__entity = datastore.Get(
257 datastore_types.Key.from_path(
258 self.kind(), str(self.__key), namespace=''))
259 try:
260 return self.__entity[name]
261 except KeyError:
262 raise AttributeError(name)
265 def key(self):
266 """Get key for blob.
268 Returns:
269 BlobKey instance that identifies this blob.
271 return self.__key
273 def delete(self, _token=None):
274 """Permanently delete blob from Blobstore."""
275 delete(self.key(), _token=_token)
277 def open(self, *args, **kwargs):
278 """Returns a BlobReader for this blob.
280 Args:
281 *args, **kwargs: Passed to BlobReader constructor.
282 Returns:
283 A BlobReader instance.
285 return BlobReader(self, *args, **kwargs)
287 @classmethod
288 def get(cls, blob_keys):
289 """Retrieve BlobInfo by key or list of keys.
291 Args:
292 blob_keys: A key or a list of keys. Keys may be instances of str,
293 unicode and BlobKey.
295 Returns:
296 A BlobInfo instance associated with provided key or a list of BlobInfo
297 instances if a list of keys was provided. Keys that are not found in
298 Blobstore return None as their values.
300 blob_keys = cls.__normalize_and_convert_keys(blob_keys)
301 try:
302 entities = datastore.Get(blob_keys)
303 except datastore_errors.EntityNotFoundError:
304 return None
305 if isinstance(entities, datastore.Entity):
306 return BlobInfo(entities)
307 else:
308 references = []
309 for entity in entities:
310 if entity is not None:
311 references.append(BlobInfo(entity))
312 else:
313 references.append(None)
314 return references
316 @classmethod
317 def all(cls):
318 """Get query for all Blobs associated with application.
320 Returns:
321 A db.Query object querying over BlobInfo's datastore kind.
323 return db.Query(model_class=cls, namespace='')
325 @classmethod
326 def __factory_for_kind(cls, kind):
327 if kind == BLOB_INFO_KIND:
328 return BlobInfo
329 raise ValueError('Cannot query for kind %s' % kind)
331 @classmethod
332 def gql(cls, query_string, *args, **kwds):
333 """Returns a query using GQL query string.
335 See appengine/ext/gql for more information about GQL.
337 Args:
338 query_string: Properly formatted GQL query string with the
339 'SELECT * FROM <entity>' part omitted
340 *args: rest of the positional arguments used to bind numeric references
341 in the query.
342 **kwds: dictionary-based arguments (for named parameters).
344 Returns:
345 A gql.GqlQuery object querying over BlobInfo's datastore kind.
347 return _GqlQuery('SELECT * FROM %s %s'
348 % (cls.kind(), query_string),
349 cls,
350 *args,
351 **kwds)
354 @classmethod
355 def kind(self):
356 """Get the entity kind for the BlobInfo.
358 This method is required for compatibility with the current db.py query
359 mechanism but will be removed in the future. DO NOT USE.
361 return BLOB_INFO_KIND
363 @classmethod
364 def __normalize_and_convert_keys(cls, keys):
365 """Normalize and convert all keys to BlobKey type.
367 This method is based on datastore.NormalizeAndTypeCheck().
369 Args:
370 keys: A single key or a list/tuple of keys. Keys may be a string
371 or BlobKey
373 Returns:
374 Single key or list with all strings replaced by BlobKey instances.
376 if isinstance(keys, (list, tuple)):
377 multiple = True
379 keys = list(keys)
380 else:
381 multiple = False
382 keys = [keys]
384 for index, key in enumerate(keys):
385 if not isinstance(key, (basestring, BlobKey)):
386 raise datastore_errors.BadArgumentError(
387 'Expected str or BlobKey; received %s (a %s)' % (
388 key,
389 datastore.typename(key)))
390 keys[index] = datastore.Key.from_path(cls.kind(), str(key), namespace='')
392 if multiple:
393 return keys
394 else:
395 return keys[0]
398 def get(blob_key):
399 """Get a BlobInfo record from blobstore.
401 Does the same as BlobInfo.get.
403 return BlobInfo.get(blob_key)
406 def _get_upload_content(field_storage):
407 """Returns an email.Message holding the values of the file transfer.
409 It decodes the content of the field storage and creates a new email.Message.
411 Args:
412 field_storage: cgi.FieldStorage that represents uploaded blob.
414 Returns:
415 An email.message.Message holding the upload information.
417 message = email.message.Message()
418 message.add_header(
419 'content-transfer-encoding',
420 field_storage.headers.getheader('Content-Transfer-Encoding', ''))
421 message.set_payload(field_storage.file.read())
422 payload = message.get_payload(decode=True)
423 return email.message_from_string(payload)
426 def _parse_upload_info(field_storage, error_class):
427 """Parse the upload info from file upload field_storage.
429 Args:
430 field_storage: cgi.FieldStorage that represents uploaded blob.
431 error_class: error to raise.
433 Returns:
434 A dictionary containing the parsed values. None if there was no
435 field_storage.
437 Raises:
438 error_class when provided a field_storage that does not contain enough
439 information.
441 if field_storage is None:
442 return None
444 field_name = field_storage.name
446 def get_value(dict, name):
447 value = dict.get(name, None)
448 if value is None:
449 raise error_class(
450 'Field %s has no %s.' % (field_name, name))
451 return value
453 filename = get_value(field_storage.disposition_options, 'filename')
454 blob_key = field_storage.type_options.get('blob-key', None)
456 upload_content = _get_upload_content(field_storage)
459 field_storage.file.seek(0)
460 content_type = get_value(upload_content, 'content-type')
461 size = get_value(upload_content, 'content-length')
462 creation_string = get_value(upload_content, UPLOAD_INFO_CREATION_HEADER)
463 md5_hash_encoded = get_value(upload_content, 'content-md5')
464 md5_hash = base64.urlsafe_b64decode(md5_hash_encoded)
465 gs_object_name = upload_content.get(CLOUD_STORAGE_OBJECT_HEADER, None)
467 try:
468 size = int(size)
469 except (TypeError, ValueError):
470 raise error_class(
471 '%s is not a valid value for %s size.' % (size, field_name))
473 try:
474 creation = blobstore._parse_creation(creation_string, field_name)
475 except blobstore._CreationFormatError, err:
476 raise error_class(str(err))
478 return {'blob_key': blob_key,
479 'content_type': content_type,
480 'creation': creation,
481 'filename': filename,
482 'size': size,
483 'md5_hash': md5_hash,
484 'gs_object_name': gs_object_name,
488 def parse_blob_info(field_storage):
489 """Parse a BlobInfo record from file upload field_storage.
491 Args:
492 field_storage: cgi.FieldStorage that represents uploaded blob.
494 Returns:
495 BlobInfo record as parsed from the field-storage instance.
496 None if there was no field_storage.
498 Raises:
499 BlobInfoParseError when provided field_storage does not contain enough
500 information to construct a BlobInfo object.
502 info = _parse_upload_info(field_storage, BlobInfoParseError)
504 if info is None:
505 return None
507 key = info.pop('blob_key', None)
508 if not key:
509 raise BlobInfoParseError('Field %s has no %s.' % (field_storage.name,
510 'blob_key'))
512 info.pop('gs_object_name', None)
514 return BlobInfo(BlobKey(key), info)
517 class FileInfo(object):
518 """Information about uploaded files.
520 This is a class that contains information about blobs stored by an
521 application.
523 This class is similar to BlobInfo, however this has no key and it is not
524 persisted in the datastore.
526 Properties:
527 content_type: Content type of uploaded file.
528 creation: Creation date of uploaded file, when it was uploaded.
529 filename: Filename user selected from their machine.
530 size: Size of uncompressed file.
531 md5_hash: The md5 hash value of the uploaded file.
532 gs_object_name: Name of the file written to Google Cloud Storage or None if
533 the file was not uploaded to Google Cloud Storage.
535 All properties are read-only. Attempting to assign a value to a property
536 will raise AttributeError.
539 def __init__(self, filename=None, content_type=None, creation=None,
540 size=None, md5_hash=None, gs_object_name=None):
541 self.__filename = filename
542 self.__content_type = content_type
543 self.__creation = creation
544 self.__size = size
545 self.__md5_hash = md5_hash
546 self.__gs_object_name = gs_object_name
548 @property
549 def filename(self):
550 return self.__filename
552 @property
553 def content_type(self):
554 return self.__content_type
556 @property
557 def creation(self):
558 return self.__creation
560 @property
561 def size(self):
562 return self.__size
564 @property
565 def md5_hash(self):
566 return self.__md5_hash
568 @property
569 def gs_object_name(self):
570 return self.__gs_object_name
573 def parse_file_info(field_storage):
574 """Parse an FileInfo record from file upload field_storage.
576 Args:
577 field_storage: cgi.FieldStorage that represents uploaded file.
579 Returns:
580 FileInfo record as parsed from the field-storage instance.
581 None if there was no field_storage.
583 Raises:
584 FileInfoParseError when provided a field_storage that does not contain
585 enough information to construct a FileInfo object.
587 info = _parse_upload_info(field_storage, FileInfoParseError)
589 if info is None:
590 return None
592 info.pop('blob_key', None)
594 return FileInfo(**info)
597 class BlobReferenceProperty(db.Property):
598 """Property compatible with db.Model classes.
600 Add references to blobs to domain models using BlobReferenceProperty:
602 class Picture(db.Model):
603 title = db.StringProperty()
604 image = blobstore.BlobReferenceProperty()
605 thumbnail = blobstore.BlobReferenceProperty()
607 To find the size of a picture using this model:
609 picture = Picture.get(picture_key)
610 print picture.image.size
612 BlobInfo objects are lazily loaded so iterating over models with
613 for BlobKeys is efficient, the following does not need to hit
614 Datastore for each image key:
616 list_of_untitled_blobs = []
617 for picture in Picture.gql("WHERE title=''"):
618 list_of_untitled_blobs.append(picture.image.key())
621 data_type = BlobInfo
623 def get_value_for_datastore(self, model_instance):
624 """Translate model property to datastore value."""
625 blob_info = super(BlobReferenceProperty,
626 self).get_value_for_datastore(model_instance)
627 if blob_info is None:
628 return None
629 return blob_info.key()
631 def make_value_from_datastore(self, value):
632 """Translate datastore value to BlobInfo."""
633 if value is None:
634 return None
635 return BlobInfo(value)
637 def validate(self, value):
638 """Validate that assigned value is BlobInfo.
640 Automatically converts from strings and BlobKey instances.
642 if isinstance(value, (basestring)):
643 value = BlobInfo(BlobKey(value))
644 elif isinstance(value, BlobKey):
645 value = BlobInfo(value)
646 return super(BlobReferenceProperty, self).validate(value)
649 def fetch_data(blob, start_index, end_index, rpc=None):
650 """Fetch data for blob.
652 Fetches a fragment of a blob up to MAX_BLOB_FETCH_SIZE in length. Attempting
653 to fetch a fragment that extends beyond the boundaries of the blob will return
654 the amount of data from start_index until the end of the blob, which will be
655 a smaller size than requested. Requesting a fragment which is entirely
656 outside the boundaries of the blob will return empty string. Attempting
657 to fetch a negative index will raise an exception.
659 Args:
660 blob: BlobInfo, BlobKey, str or unicode representation of BlobKey of
661 blob to fetch data from.
662 start_index: Start index of blob data to fetch. May not be negative.
663 end_index: End index (inclusive) of blob data to fetch. Must be
664 >= start_index.
665 rpc: Optional UserRPC object.
667 Returns:
668 str containing partial data of blob. If the indexes are legal but outside
669 the boundaries of the blob, will return empty string.
671 Raises:
672 TypeError if start_index or end_index are not indexes. Also when blob
673 is not a string, BlobKey or BlobInfo.
674 DataIndexOutOfRangeError when start_index < 0 or end_index < start_index.
675 BlobFetchSizeTooLargeError when request blob fragment is larger than
676 MAX_BLOB_FETCH_SIZE.
677 BlobNotFoundError when blob does not exist.
679 rpc = fetch_data_async(blob, start_index, end_index, rpc=rpc)
680 return rpc.get_result()
683 def fetch_data_async(blob, start_index, end_index, rpc=None):
684 """Fetch data for blob -- async version.
686 Fetches a fragment of a blob up to MAX_BLOB_FETCH_SIZE in length. Attempting
687 to fetch a fragment that extends beyond the boundaries of the blob will return
688 the amount of data from start_index until the end of the blob, which will be
689 a smaller size than requested. Requesting a fragment which is entirely
690 outside the boundaries of the blob will return empty string. Attempting
691 to fetch a negative index will raise an exception.
693 Args:
694 blob: BlobInfo, BlobKey, str or unicode representation of BlobKey of
695 blob to fetch data from.
696 start_index: Start index of blob data to fetch. May not be negative.
697 end_index: End index (inclusive) of blob data to fetch. Must be
698 >= start_index.
699 rpc: Optional UserRPC object.
701 Returns:
702 A UserRPC whose result will be a str as returned by fetch_data().
704 Raises:
705 TypeError if start_index or end_index are not indexes. Also when blob
706 is not a string, BlobKey or BlobInfo.
707 The following exceptions may be raised when rpc.get_result() is
708 called:
709 DataIndexOutOfRangeError when start_index < 0 or end_index < start_index.
710 BlobFetchSizeTooLargeError when request blob fragment is larger than
711 MAX_BLOB_FETCH_SIZE.
712 BlobNotFoundError when blob does not exist.
714 if isinstance(blob, BlobInfo):
715 blob = blob.key()
716 return blobstore.fetch_data_async(blob, start_index, end_index, rpc=rpc)
719 class BlobReader(object):
720 """Provides a read-only file-like interface to a blobstore blob."""
722 SEEK_SET = 0
723 SEEK_CUR = 1
724 SEEK_END = 2
726 def __init__(self, blob, buffer_size=131072, position=0):
727 """Constructor.
729 Args:
730 blob: The blob key, blob info, or string blob key to read from.
731 buffer_size: The minimum size to fetch chunks of data from blobstore.
732 position: The initial position in the file.
734 Raises:
735 ValueError if a blob key, blob info or string blob key is not supplied.
737 if not blob:
738 raise ValueError('A BlobKey, BlobInfo or string is required.')
739 if hasattr(blob, 'key'):
740 self.__blob_key = blob.key()
741 self.__blob_info = blob
742 else:
743 self.__blob_key = blob
744 self.__blob_info = None
745 self.__buffer_size = buffer_size
746 self.__buffer = ""
747 self.__position = position
748 self.__buffer_position = 0
749 self.__eof = False
751 def __iter__(self):
752 """Returns a file iterator for this BlobReader."""
753 return self
755 def __getstate__(self):
756 """Returns the serialized state for this BlobReader."""
757 return (self.__blob_key, self.__buffer_size, self.__position)
759 def __setstate__(self, state):
760 """Restores pickled state for this BlobReader."""
761 self.__init__(*state)
763 def close(self):
764 """Close the file.
766 A closed file cannot be read or written any more. Any operation which
767 requires that the file be open will raise a ValueError after the file has
768 been closed. Calling close() more than once is allowed.
770 self.__blob_key = None
772 def flush(self):
773 raise IOError("BlobReaders are read-only")
775 def next(self):
776 """Returns the next line from the file.
778 Returns:
779 A string, terminted by \n. The last line may not be terminated by \n.
780 If EOF is reached, an empty string will be returned.
782 line = self.readline()
783 if not line:
784 raise StopIteration
785 return line
787 def __read_from_buffer(self, size):
788 """Reads at most size bytes from the buffer.
790 Args:
791 size: Number of bytes to read, or negative to read the entire buffer.
792 Returns:
793 Tuple (data, size):
794 data: The bytes read from the buffer.
795 size: The remaining unread byte count. Negative when size
796 is negative. Thus when remaining size != 0, the calling method
797 may choose to fill the buffer again and keep reading.
800 if not self.__blob_key:
801 raise ValueError("File is closed")
803 if size < 0:
804 end_pos = len(self.__buffer)
805 else:
806 end_pos = self.__buffer_position + size
807 data = self.__buffer[self.__buffer_position:end_pos]
810 data_length = len(data)
811 size -= data_length
812 self.__position += data_length
813 self.__buffer_position += data_length
816 if self.__buffer_position == len(self.__buffer):
817 self.__buffer = ""
818 self.__buffer_position = 0
820 return data, size
822 def __fill_buffer(self, size=0):
823 """Fills the internal buffer.
825 Args:
826 size: Number of bytes to read. Will be clamped to
827 [self.__buffer_size, MAX_BLOB_FETCH_SIZE].
829 read_size = min(max(size, self.__buffer_size), MAX_BLOB_FETCH_SIZE)
831 self.__buffer = fetch_data(self.__blob_key, self.__position,
832 self.__position + read_size - 1)
833 self.__buffer_position = 0
834 self.__eof = len(self.__buffer) < read_size
836 def read(self, size=-1):
837 """Read at most size bytes from the file.
839 Fewer bytes are read if the read hits EOF before obtaining size bytes.
840 If the size argument is negative or omitted, read all data until EOF is
841 reached. The bytes are returned as a string object. An empty string is
842 returned when EOF is encountered immediately.
844 Calling read() without a size specified is likely to be dangerous, as it
845 may read excessive amounts of data.
847 Args:
848 size: Optional. The maximum number of bytes to read. When omitted, read()
849 returns all remaining data in the file.
851 Returns:
852 The read data, as a string.
854 data_list = []
855 while True:
856 data, size = self.__read_from_buffer(size)
857 data_list.append(data)
858 if size == 0 or self.__eof:
859 return ''.join(data_list)
860 self.__fill_buffer(size)
862 def readline(self, size=-1):
863 """Read one entire line from the file.
865 A trailing newline character is kept in the string (but may be absent when a
866 file ends with an incomplete line). If the size argument is present and
867 non-negative, it is a maximum byte count (including the trailing newline)
868 and an incomplete line may be returned. An empty string is returned only
869 when EOF is encountered immediately.
871 Args:
872 size: Optional. The maximum number of bytes to read.
874 Returns:
875 The read data, as a string.
877 data_list = []
878 while True:
879 if size < 0:
880 end_pos = len(self.__buffer)
881 else:
882 end_pos = self.__buffer_position + size
883 newline_pos = self.__buffer.find('\n', self.__buffer_position, end_pos)
884 if newline_pos != -1:
886 data_list.append(
887 self.__read_from_buffer(newline_pos
888 - self.__buffer_position + 1)[0])
889 break
890 else:
892 data, size = self.__read_from_buffer(size)
893 data_list.append(data)
894 if size == 0 or self.__eof:
895 break
896 self.__fill_buffer()
897 return ''.join(data_list)
899 def readlines(self, sizehint=None):
900 """Read until EOF using readline() and return a list of lines thus read.
902 If the optional sizehint argument is present, instead of reading up to EOF,
903 whole lines totalling approximately sizehint bytes (possibly after rounding
904 up to an internal buffer size) are read.
906 Args:
907 sizehint: A hint as to the maximum number of bytes to read.
909 Returns:
910 A list of strings, each being a single line from the file.
912 lines = []
913 while sizehint is None or sizehint > 0:
914 line = self.readline()
915 if sizehint:
916 sizehint -= len(line)
917 if not line:
919 break
920 lines.append(line)
921 return lines
923 def seek(self, offset, whence=SEEK_SET):
924 """Set the file's current position, like stdio's fseek().
926 The whence argument is optional and defaults to os.SEEK_SET or 0 (absolute
927 file positioning); other values are os.SEEK_CUR or 1 (seek relative to the
928 current position) and os.SEEK_END or 2 (seek relative to the file's end).
930 Args:
931 offset: The relative offset to seek to.
932 whence: Defines what the offset is relative to. See description for
933 details.
935 if whence == BlobReader.SEEK_CUR:
936 offset = self.__position + offset
937 elif whence == BlobReader.SEEK_END:
938 offset = self.blob_info.size + offset
939 self.__buffer = ""
940 self.__buffer_position = 0
941 self.__position = offset
942 self.__eof = False
944 def tell(self):
945 """Return the file's current position, like stdio's ftell()."""
946 return self.__position
948 def truncate(self, size):
949 raise IOError("BlobReaders are read-only")
951 def write(self, str):
952 raise IOError("BlobReaders are read-only")
954 def writelines(self, sequence):
955 raise IOError("BlobReaders are read-only")
957 @property
958 def blob_info(self):
959 """Returns the BlobInfo for this file."""
960 if not self.__blob_info:
961 self.__blob_info = BlobInfo.get(self.__blob_key)
962 return self.__blob_info
964 @property
965 def closed(self):
966 """Returns True if this file is closed, False otherwise."""
967 return self.__blob_key is None
969 def __enter__(self):
970 return self
972 def __exit__(self, exc_type, exc_value, traceback):
973 self.close()
976 class BlobMigrationRecord(db.Model):
977 """A model that records the result of a blob migration."""
979 new_blob_ref = BlobReferenceProperty(indexed=False, name='new_blob_key')
981 @classmethod
982 def kind(cls):
983 return blobstore.BLOB_MIGRATION_KIND
985 @classmethod
986 def get_by_blob_key(cls, old_blob_key):
987 """Fetches the BlobMigrationRecord for the given blob key.
989 Args:
990 old_blob_key: The blob key used in the previous app.
992 Returns:
993 A instance of blobstore.BlobMigrationRecord or None
995 return cls.get_by_key_name(str(old_blob_key))
997 @classmethod
998 def get_new_blob_key(cls, old_blob_key):
999 """Looks up the new key for a blob.
1001 Args:
1002 old_blob_key: The original blob key.
1004 Returns:
1005 The blobstore.BlobKey of the migrated blob.
1007 record = cls.get_by_blob_key(old_blob_key)
1008 if record:
1009 return record.new_blob_ref.key()