3 # Copyright 2007 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
21 """A Python blobstore API used by app developers.
23 Contains methods used to interface with Blobstore API. Includes db.Model-like
24 class representing a reference to a very large BLOB. Imports db.Key-like
25 class representing a blob-key.
41 from google
.appengine
.api
import datastore
42 from google
.appengine
.api
import datastore_errors
43 from google
.appengine
.api
import datastore_types
44 from google
.appengine
.api
.blobstore
import blobstore
45 from google
.appengine
.ext
import db
47 __all__
= ['BLOB_INFO_KIND',
49 'BLOB_MIGRATION_KIND',
51 'BlobFetchSizeTooLargeError',
55 'BlobMigrationRecord',
57 'BlobReferenceProperty',
61 'DataIndexOutOfRangeError',
62 'PermissionDeniedError',
65 'MAX_BLOB_FETCH_SIZE',
66 'UPLOAD_INFO_CREATION_HEADER',
67 'CLOUD_STORAGE_OBJECT_HEADER',
70 'create_upload_url_async',
76 'create_gs_key_async',
82 Error
= blobstore
.Error
83 InternalError
= blobstore
.InternalError
84 BlobFetchSizeTooLargeError
= blobstore
.BlobFetchSizeTooLargeError
85 BlobNotFoundError
= blobstore
.BlobNotFoundError
86 _CreationFormatError
= blobstore
._CreationFormatError
87 DataIndexOutOfRangeError
= blobstore
.DataIndexOutOfRangeError
88 PermissionDeniedError
= blobstore
.PermissionDeniedError
90 BlobKey
= blobstore
.BlobKey
91 create_rpc
= blobstore
.create_rpc
92 create_upload_url
= blobstore
.create_upload_url
93 create_upload_url_async
= blobstore
.create_upload_url_async
94 delete
= blobstore
.delete
95 delete_async
= blobstore
.delete_async
96 create_gs_key
= blobstore
.create_gs_key
97 create_gs_key_async
= blobstore
.create_gs_key_async
100 BLOB_INFO_KIND
= blobstore
.BLOB_INFO_KIND
101 BLOB_MIGRATION_KIND
= blobstore
.BLOB_MIGRATION_KIND
102 BLOB_KEY_HEADER
= blobstore
.BLOB_KEY_HEADER
103 BLOB_RANGE_HEADER
= blobstore
.BLOB_RANGE_HEADER
104 MAX_BLOB_FETCH_SIZE
= blobstore
.MAX_BLOB_FETCH_SIZE
105 UPLOAD_INFO_CREATION_HEADER
= blobstore
.UPLOAD_INFO_CREATION_HEADER
106 CLOUD_STORAGE_OBJECT_HEADER
= blobstore
.CLOUD_STORAGE_OBJECT_HEADER
107 GS_PREFIX
= blobstore
.GS_PREFIX
110 class BlobInfoParseError(Error
):
111 """CGI parameter does not contain valid BlobInfo record."""
114 class FileInfoParseError(Error
):
115 """CGI parameter does not contain valid FileInfo record."""
120 class _GqlQuery(db
.GqlQuery
):
121 """GqlQuery class that explicitly sets model-class.
123 This does the same as the original db.GqlQuery class except that it does
124 not try to find the model class based on the compiled GQL query. The
125 caller instead provides the query with a model class to use for construction.
127 This class is required for compatibility with the current db.py query
128 mechanism but will be removed in the future. DO NOT USE.
132 def __init__(self
, query_string
, model_class
, *args
, **kwds
):
136 query_string: Properly formatted GQL query string.
137 model_class: Model class from which entities are constructed.
138 *args: Positional arguments used to bind numeric references in the query.
139 **kwds: Dictionary-based arguments for named references.
143 from google
.appengine
.ext
import gql
144 app
= kwds
.pop('_app', None)
145 self
._proto
_query
= gql
.GQL(query_string
, _app
=app
, namespace
='')
147 super(db
.GqlQuery
, self
).__init
__(model_class
)
148 self
.bind(*args
, **kwds
)
153 class BlobInfo(object):
154 """Information about blobs in Blobstore.
156 This is a db.Model-like class that contains information about blobs stored
157 by an application. Like db.Model, this class is backed by an Datastore
158 entity, however, BlobInfo instances are read-only and have a much more
161 Each BlobInfo has a key of type BlobKey associated with it. This key is
162 specific to the Blobstore API and is not compatible with db.get. The key
163 can be used for quick lookup by passing it to BlobInfo.get. This
164 key converts easily to a string, which is web safe and can be embedded
168 content_type: Content type of blob.
169 creation: Creation date of blob, when it was uploaded.
170 filename: Filename user selected from their machine.
171 size: Size of uncompressed blob.
172 md5_hash: The md5 hash value of the uploaded blob.
174 All properties are read-only. Attempting to assign a value to a property
175 will raise NotImplementedError.
178 _unindexed_properties
= frozenset([])
181 _all_properties
= frozenset(['content_type', 'creation', 'filename',
185 def content_type(self
):
186 return self
.__get
_value
('content_type')
190 return self
.__get
_value
('creation')
194 return self
.__get
_value
('filename')
198 return self
.__get
_value
('size')
202 return self
.__get
_value
('md5_hash')
204 def __init__(self
, entity_or_blob_key
, _values
=None):
205 """Constructor for wrapping blobstore entity.
207 The constructor should not be used outside this package and tests.
210 entity: Datastore entity that represents the blob reference.
212 if isinstance(entity_or_blob_key
, datastore
.Entity
):
213 self
.__entity
= entity_or_blob_key
214 self
.__key
= BlobKey(entity_or_blob_key
.key().name())
215 elif isinstance(entity_or_blob_key
, BlobKey
):
216 self
.__entity
= _values
217 self
.__key
= entity_or_blob_key
219 raise TypeError('Must provide Entity or BlobKey')
224 def from_entity(cls
, entity
):
225 """Convert entity to BlobInfo.
227 This method is required for compatibility with the current db.py query
228 mechanism but will be removed in the future. DO NOT USE.
230 return BlobInfo(entity
)
236 """Set of properties that belong to BlobInfo.
238 This method is required for compatibility with the current db.py query
239 mechanism but will be removed in the future. DO NOT USE.
241 return set(cls
._all
_properties
)
243 def __get_value(self
, name
):
244 """Get a BlobInfo value, loading entity if necessary.
246 This method allows lazy loading of the underlying datastore entity. It
247 should never be invoked directly.
250 name: Name of property to get value for.
253 Value of BlobInfo property from entity.
255 if self
.__entity
is None:
256 self
.__entity
= datastore
.Get(
257 datastore_types
.Key
.from_path(
258 self
.kind(), str(self
.__key
), namespace
=''))
260 return self
.__entity
[name
]
262 raise AttributeError(name
)
269 BlobKey instance that identifies this blob.
273 def delete(self
, _token
=None):
274 """Permanently delete blob from Blobstore."""
275 delete(self
.key(), _token
=_token
)
277 def open(self
, *args
, **kwargs
):
278 """Returns a BlobReader for this blob.
281 *args, **kwargs: Passed to BlobReader constructor.
283 A BlobReader instance.
285 return BlobReader(self
, *args
, **kwargs
)
288 def get(cls
, blob_keys
):
289 """Retrieve BlobInfo by key or list of keys.
292 blob_keys: A key or a list of keys. Keys may be instances of str,
296 A BlobInfo instance associated with provided key or a list of BlobInfo
297 instances if a list of keys was provided. Keys that are not found in
298 Blobstore return None as their values.
300 blob_keys
= cls
.__normalize
_and
_convert
_keys
(blob_keys
)
302 entities
= datastore
.Get(blob_keys
)
303 except datastore_errors
.EntityNotFoundError
:
305 if isinstance(entities
, datastore
.Entity
):
306 return BlobInfo(entities
)
309 for entity
in entities
:
310 if entity
is not None:
311 references
.append(BlobInfo(entity
))
313 references
.append(None)
318 """Get query for all Blobs associated with application.
321 A db.Query object querying over BlobInfo's datastore kind.
323 return db
.Query(model_class
=cls
, namespace
='')
326 def __factory_for_kind(cls
, kind
):
327 if kind
== BLOB_INFO_KIND
:
329 raise ValueError('Cannot query for kind %s' % kind
)
332 def gql(cls
, query_string
, *args
, **kwds
):
333 """Returns a query using GQL query string.
335 See appengine/ext/gql for more information about GQL.
338 query_string: Properly formatted GQL query string with the
339 'SELECT * FROM <entity>' part omitted
340 *args: rest of the positional arguments used to bind numeric references
342 **kwds: dictionary-based arguments (for named parameters).
345 A gql.GqlQuery object querying over BlobInfo's datastore kind.
347 return _GqlQuery('SELECT * FROM %s %s'
348 % (cls
.kind(), query_string
),
356 """Get the entity kind for the BlobInfo.
358 This method is required for compatibility with the current db.py query
359 mechanism but will be removed in the future. DO NOT USE.
361 return BLOB_INFO_KIND
364 def __normalize_and_convert_keys(cls
, keys
):
365 """Normalize and convert all keys to BlobKey type.
367 This method is based on datastore.NormalizeAndTypeCheck().
370 keys: A single key or a list/tuple of keys. Keys may be a string
374 Single key or list with all strings replaced by BlobKey instances.
376 if isinstance(keys
, (list, tuple)):
384 for index
, key
in enumerate(keys
):
385 if not isinstance(key
, (basestring
, BlobKey
)):
386 raise datastore_errors
.BadArgumentError(
387 'Expected str or BlobKey; received %s (a %s)' % (
389 datastore
.typename(key
)))
390 keys
[index
] = datastore
.Key
.from_path(cls
.kind(), str(key
), namespace
='')
399 """Get a BlobInfo record from blobstore.
401 Does the same as BlobInfo.get.
403 return BlobInfo
.get(blob_key
)
406 def _get_upload_content(field_storage
):
407 """Returns an email.Message holding the values of the file transfer.
409 It decodes the content of the field storage and creates a new email.Message.
412 field_storage: cgi.FieldStorage that represents uploaded blob.
415 An email.message.Message holding the upload information.
417 message
= email
.message
.Message()
419 'content-transfer-encoding',
420 field_storage
.headers
.getheader('Content-Transfer-Encoding', ''))
421 message
.set_payload(field_storage
.file.read())
422 payload
= message
.get_payload(decode
=True)
423 return email
.message_from_string(payload
)
426 def _parse_upload_info(field_storage
, error_class
):
427 """Parse the upload info from file upload field_storage.
430 field_storage: cgi.FieldStorage that represents uploaded blob.
431 error_class: error to raise.
434 A dictionary containing the parsed values. None if there was no
438 error_class when provided a field_storage that does not contain enough
441 if field_storage
is None:
444 field_name
= field_storage
.name
446 def get_value(dict, name
):
447 value
= dict.get(name
, None)
450 'Field %s has no %s.' % (field_name
, name
))
453 filename
= get_value(field_storage
.disposition_options
, 'filename')
454 blob_key
= field_storage
.type_options
.get('blob-key', None)
456 upload_content
= _get_upload_content(field_storage
)
459 field_storage
.file.seek(0)
460 content_type
= get_value(upload_content
, 'content-type')
461 size
= get_value(upload_content
, 'content-length')
462 creation_string
= get_value(upload_content
, UPLOAD_INFO_CREATION_HEADER
)
463 md5_hash_encoded
= get_value(upload_content
, 'content-md5')
464 md5_hash
= base64
.urlsafe_b64decode(md5_hash_encoded
)
465 gs_object_name
= upload_content
.get(CLOUD_STORAGE_OBJECT_HEADER
, None)
469 except (TypeError, ValueError):
471 '%s is not a valid value for %s size.' % (size
, field_name
))
474 creation
= blobstore
._parse
_creation
(creation_string
, field_name
)
475 except blobstore
._CreationFormatError
, err
:
476 raise error_class(str(err
))
478 return {'blob_key': blob_key
,
479 'content_type': content_type
,
480 'creation': creation
,
481 'filename': filename
,
483 'md5_hash': md5_hash
,
484 'gs_object_name': gs_object_name
,
488 def parse_blob_info(field_storage
):
489 """Parse a BlobInfo record from file upload field_storage.
492 field_storage: cgi.FieldStorage that represents uploaded blob.
495 BlobInfo record as parsed from the field-storage instance.
496 None if there was no field_storage.
499 BlobInfoParseError when provided field_storage does not contain enough
500 information to construct a BlobInfo object.
502 info
= _parse_upload_info(field_storage
, BlobInfoParseError
)
507 key
= info
.pop('blob_key', None)
509 raise BlobInfoParseError('Field %s has no %s.' % (field_storage
.name
,
512 info
.pop('gs_object_name', None)
514 return BlobInfo(BlobKey(key
), info
)
517 class FileInfo(object):
518 """Information about uploaded files.
520 This is a class that contains information about blobs stored by an
523 This class is similar to BlobInfo, however this has no key and it is not
524 persisted in the datastore.
527 content_type: Content type of uploaded file.
528 creation: Creation date of uploaded file, when it was uploaded.
529 filename: Filename user selected from their machine.
530 size: Size of uncompressed file.
531 md5_hash: The md5 hash value of the uploaded file.
532 gs_object_name: Name of the file written to Google Cloud Storage or None if
533 the file was not uploaded to Google Cloud Storage.
535 All properties are read-only. Attempting to assign a value to a property
536 will raise AttributeError.
539 def __init__(self
, filename
=None, content_type
=None, creation
=None,
540 size
=None, md5_hash
=None, gs_object_name
=None):
541 self
.__filename
= filename
542 self
.__content
_type
= content_type
543 self
.__creation
= creation
545 self
.__md
5_hash
= md5_hash
546 self
.__gs
_object
_name
= gs_object_name
550 return self
.__filename
553 def content_type(self
):
554 return self
.__content
_type
558 return self
.__creation
566 return self
.__md
5_hash
569 def gs_object_name(self
):
570 return self
.__gs
_object
_name
573 def parse_file_info(field_storage
):
574 """Parse an FileInfo record from file upload field_storage.
577 field_storage: cgi.FieldStorage that represents uploaded file.
580 FileInfo record as parsed from the field-storage instance.
581 None if there was no field_storage.
584 FileInfoParseError when provided a field_storage that does not contain
585 enough information to construct a FileInfo object.
587 info
= _parse_upload_info(field_storage
, FileInfoParseError
)
592 info
.pop('blob_key', None)
594 return FileInfo(**info
)
597 class BlobReferenceProperty(db
.Property
):
598 """Property compatible with db.Model classes.
600 Add references to blobs to domain models using BlobReferenceProperty:
602 class Picture(db.Model):
603 title = db.StringProperty()
604 image = blobstore.BlobReferenceProperty()
605 thumbnail = blobstore.BlobReferenceProperty()
607 To find the size of a picture using this model:
609 picture = Picture.get(picture_key)
610 print picture.image.size
612 BlobInfo objects are lazily loaded so iterating over models with
613 for BlobKeys is efficient, the following does not need to hit
614 Datastore for each image key:
616 list_of_untitled_blobs = []
617 for picture in Picture.gql("WHERE title=''"):
618 list_of_untitled_blobs.append(picture.image.key())
623 def get_value_for_datastore(self
, model_instance
):
624 """Translate model property to datastore value."""
625 blob_info
= super(BlobReferenceProperty
,
626 self
).get_value_for_datastore(model_instance
)
627 if blob_info
is None:
629 return blob_info
.key()
631 def make_value_from_datastore(self
, value
):
632 """Translate datastore value to BlobInfo."""
635 return BlobInfo(value
)
637 def validate(self
, value
):
638 """Validate that assigned value is BlobInfo.
640 Automatically converts from strings and BlobKey instances.
642 if isinstance(value
, (basestring
)):
643 value
= BlobInfo(BlobKey(value
))
644 elif isinstance(value
, BlobKey
):
645 value
= BlobInfo(value
)
646 return super(BlobReferenceProperty
, self
).validate(value
)
649 def fetch_data(blob
, start_index
, end_index
, rpc
=None):
650 """Fetch data for blob.
652 Fetches a fragment of a blob up to MAX_BLOB_FETCH_SIZE in length. Attempting
653 to fetch a fragment that extends beyond the boundaries of the blob will return
654 the amount of data from start_index until the end of the blob, which will be
655 a smaller size than requested. Requesting a fragment which is entirely
656 outside the boundaries of the blob will return empty string. Attempting
657 to fetch a negative index will raise an exception.
660 blob: BlobInfo, BlobKey, str or unicode representation of BlobKey of
661 blob to fetch data from.
662 start_index: Start index of blob data to fetch. May not be negative.
663 end_index: End index (inclusive) of blob data to fetch. Must be
665 rpc: Optional UserRPC object.
668 str containing partial data of blob. If the indexes are legal but outside
669 the boundaries of the blob, will return empty string.
672 TypeError if start_index or end_index are not indexes. Also when blob
673 is not a string, BlobKey or BlobInfo.
674 DataIndexOutOfRangeError when start_index < 0 or end_index < start_index.
675 BlobFetchSizeTooLargeError when request blob fragment is larger than
677 BlobNotFoundError when blob does not exist.
679 rpc
= fetch_data_async(blob
, start_index
, end_index
, rpc
=rpc
)
680 return rpc
.get_result()
683 def fetch_data_async(blob
, start_index
, end_index
, rpc
=None):
684 """Fetch data for blob -- async version.
686 Fetches a fragment of a blob up to MAX_BLOB_FETCH_SIZE in length. Attempting
687 to fetch a fragment that extends beyond the boundaries of the blob will return
688 the amount of data from start_index until the end of the blob, which will be
689 a smaller size than requested. Requesting a fragment which is entirely
690 outside the boundaries of the blob will return empty string. Attempting
691 to fetch a negative index will raise an exception.
694 blob: BlobInfo, BlobKey, str or unicode representation of BlobKey of
695 blob to fetch data from.
696 start_index: Start index of blob data to fetch. May not be negative.
697 end_index: End index (inclusive) of blob data to fetch. Must be
699 rpc: Optional UserRPC object.
702 A UserRPC whose result will be a str as returned by fetch_data().
705 TypeError if start_index or end_index are not indexes. Also when blob
706 is not a string, BlobKey or BlobInfo.
707 The following exceptions may be raised when rpc.get_result() is
709 DataIndexOutOfRangeError when start_index < 0 or end_index < start_index.
710 BlobFetchSizeTooLargeError when request blob fragment is larger than
712 BlobNotFoundError when blob does not exist.
714 if isinstance(blob
, BlobInfo
):
716 return blobstore
.fetch_data_async(blob
, start_index
, end_index
, rpc
=rpc
)
719 class BlobReader(object):
720 """Provides a read-only file-like interface to a blobstore blob."""
726 def __init__(self
, blob
, buffer_size
=131072, position
=0):
730 blob: The blob key, blob info, or string blob key to read from.
731 buffer_size: The minimum size to fetch chunks of data from blobstore.
732 position: The initial position in the file.
735 ValueError if a blob key, blob info or string blob key is not supplied.
738 raise ValueError('A BlobKey, BlobInfo or string is required.')
739 if hasattr(blob
, 'key'):
740 self
.__blob
_key
= blob
.key()
741 self
.__blob
_info
= blob
743 self
.__blob
_key
= blob
744 self
.__blob
_info
= None
745 self
.__buffer
_size
= buffer_size
747 self
.__position
= position
748 self
.__buffer
_position
= 0
752 """Returns a file iterator for this BlobReader."""
755 def __getstate__(self
):
756 """Returns the serialized state for this BlobReader."""
757 return (self
.__blob
_key
, self
.__buffer
_size
, self
.__position
)
759 def __setstate__(self
, state
):
760 """Restores pickled state for this BlobReader."""
761 self
.__init
__(*state
)
766 A closed file cannot be read or written any more. Any operation which
767 requires that the file be open will raise a ValueError after the file has
768 been closed. Calling close() more than once is allowed.
770 self
.__blob
_key
= None
773 raise IOError("BlobReaders are read-only")
776 """Returns the next line from the file.
779 A string, terminted by \n. The last line may not be terminated by \n.
780 If EOF is reached, an empty string will be returned.
782 line
= self
.readline()
787 def __read_from_buffer(self
, size
):
788 """Reads at most size bytes from the buffer.
791 size: Number of bytes to read, or negative to read the entire buffer.
794 data: The bytes read from the buffer.
795 size: The remaining unread byte count. Negative when size
796 is negative. Thus when remaining size != 0, the calling method
797 may choose to fill the buffer again and keep reading.
800 if not self
.__blob
_key
:
801 raise ValueError("File is closed")
804 end_pos
= len(self
.__buffer
)
806 end_pos
= self
.__buffer
_position
+ size
807 data
= self
.__buffer
[self
.__buffer
_position
:end_pos
]
810 data_length
= len(data
)
812 self
.__position
+= data_length
813 self
.__buffer
_position
+= data_length
816 if self
.__buffer
_position
== len(self
.__buffer
):
818 self
.__buffer
_position
= 0
822 def __fill_buffer(self
, size
=0):
823 """Fills the internal buffer.
826 size: Number of bytes to read. Will be clamped to
827 [self.__buffer_size, MAX_BLOB_FETCH_SIZE].
829 read_size
= min(max(size
, self
.__buffer
_size
), MAX_BLOB_FETCH_SIZE
)
831 self
.__buffer
= fetch_data(self
.__blob
_key
, self
.__position
,
832 self
.__position
+ read_size
- 1)
833 self
.__buffer
_position
= 0
834 self
.__eof
= len(self
.__buffer
) < read_size
836 def read(self
, size
=-1):
837 """Read at most size bytes from the file.
839 Fewer bytes are read if the read hits EOF before obtaining size bytes.
840 If the size argument is negative or omitted, read all data until EOF is
841 reached. The bytes are returned as a string object. An empty string is
842 returned when EOF is encountered immediately.
844 Calling read() without a size specified is likely to be dangerous, as it
845 may read excessive amounts of data.
848 size: Optional. The maximum number of bytes to read. When omitted, read()
849 returns all remaining data in the file.
852 The read data, as a string.
856 data
, size
= self
.__read
_from
_buffer
(size
)
857 data_list
.append(data
)
858 if size
== 0 or self
.__eof
:
859 return ''.join(data_list
)
860 self
.__fill
_buffer
(size
)
862 def readline(self
, size
=-1):
863 """Read one entire line from the file.
865 A trailing newline character is kept in the string (but may be absent when a
866 file ends with an incomplete line). If the size argument is present and
867 non-negative, it is a maximum byte count (including the trailing newline)
868 and an incomplete line may be returned. An empty string is returned only
869 when EOF is encountered immediately.
872 size: Optional. The maximum number of bytes to read.
875 The read data, as a string.
880 end_pos
= len(self
.__buffer
)
882 end_pos
= self
.__buffer
_position
+ size
883 newline_pos
= self
.__buffer
.find('\n', self
.__buffer
_position
, end_pos
)
884 if newline_pos
!= -1:
887 self
.__read
_from
_buffer
(newline_pos
888 - self
.__buffer
_position
+ 1)[0])
892 data
, size
= self
.__read
_from
_buffer
(size
)
893 data_list
.append(data
)
894 if size
== 0 or self
.__eof
:
897 return ''.join(data_list
)
899 def readlines(self
, sizehint
=None):
900 """Read until EOF using readline() and return a list of lines thus read.
902 If the optional sizehint argument is present, instead of reading up to EOF,
903 whole lines totalling approximately sizehint bytes (possibly after rounding
904 up to an internal buffer size) are read.
907 sizehint: A hint as to the maximum number of bytes to read.
910 A list of strings, each being a single line from the file.
913 while sizehint
is None or sizehint
> 0:
914 line
= self
.readline()
916 sizehint
-= len(line
)
923 def seek(self
, offset
, whence
=SEEK_SET
):
924 """Set the file's current position, like stdio's fseek().
926 The whence argument is optional and defaults to os.SEEK_SET or 0 (absolute
927 file positioning); other values are os.SEEK_CUR or 1 (seek relative to the
928 current position) and os.SEEK_END or 2 (seek relative to the file's end).
931 offset: The relative offset to seek to.
932 whence: Defines what the offset is relative to. See description for
935 if whence
== BlobReader
.SEEK_CUR
:
936 offset
= self
.__position
+ offset
937 elif whence
== BlobReader
.SEEK_END
:
938 offset
= self
.blob_info
.size
+ offset
940 self
.__buffer
_position
= 0
941 self
.__position
= offset
945 """Return the file's current position, like stdio's ftell()."""
946 return self
.__position
948 def truncate(self
, size
):
949 raise IOError("BlobReaders are read-only")
951 def write(self
, str):
952 raise IOError("BlobReaders are read-only")
954 def writelines(self
, sequence
):
955 raise IOError("BlobReaders are read-only")
959 """Returns the BlobInfo for this file."""
960 if not self
.__blob
_info
:
961 self
.__blob
_info
= BlobInfo
.get(self
.__blob
_key
)
962 return self
.__blob
_info
966 """Returns True if this file is closed, False otherwise."""
967 return self
.__blob
_key
is None
972 def __exit__(self
, exc_type
, exc_value
, traceback
):
976 class BlobMigrationRecord(db
.Model
):
977 """A model that records the result of a blob migration."""
979 new_blob_ref
= BlobReferenceProperty(indexed
=False, name
='new_blob_key')
983 return blobstore
.BLOB_MIGRATION_KIND
986 def get_by_blob_key(cls
, old_blob_key
):
987 """Fetches the BlobMigrationRecord for the given blob key.
990 old_blob_key: The blob key used in the previous app.
993 A instance of blobstore.BlobMigrationRecord or None
995 return cls
.get_by_key_name(str(old_blob_key
))
998 def get_new_blob_key(cls
, old_blob_key
):
999 """Looks up the new key for a blob.
1002 old_blob_key: The original blob key.
1005 The blobstore.BlobKey of the migrated blob.
1007 record
= cls
.get_by_blob_key(old_blob_key
)
1009 return record
.new_blob_ref
.key()