2 # -*- coding: iso-8859-1 -*-
3 #-------------------------------------------------------------------
5 #-------------------------------------------------------------------
6 # Copyright (C) 2002 Lars Gustäbel <lars@gustaebel.de>
9 # Permission is hereby granted, free of charge, to any person
10 # obtaining a copy of this software and associated documentation
11 # files (the "Software"), to deal in the Software without
12 # restriction, including without limitation the rights to use,
13 # copy, modify, merge, publish, distribute, sublicense, and/or sell
14 # copies of the Software, and to permit persons to whom the
15 # Software is furnished to do so, subject to the following
18 # The above copyright notice and this permission notice shall be
19 # included in all copies or substantial portions of the Software.
21 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
22 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
23 # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
24 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
25 # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
26 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
27 # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
28 # OTHER DEALINGS IN THE SOFTWARE.
30 """Read from and write to tar format archives.
33 __version__
= "$Revision$"
37 __author__
= "Lars Gustäbel (lars@gustaebel.de)"
40 __credits__
= "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
61 # from tarfile import *
62 __all__
= ["TarFile", "TarInfo", "is_tarfile", "TarError"]
64 #---------------------------------------------------------
66 #---------------------------------------------------------
67 NUL
= "\0" # the null character
68 BLOCKSIZE
= 512 # length of processing blocks
69 RECORDSIZE
= BLOCKSIZE
* 20 # length of records
70 GNU_MAGIC
= "ustar \0" # magic gnu tar string
71 POSIX_MAGIC
= "ustar\x0000" # magic posix tar string
73 LENGTH_NAME
= 100 # maximum length of a filename
74 LENGTH_LINK
= 100 # maximum length of a linkname
75 LENGTH_PREFIX
= 155 # maximum length of the prefix field
77 REGTYPE
= "0" # regular file
78 AREGTYPE
= "\0" # regular file
79 LNKTYPE
= "1" # link (inside tarfile)
80 SYMTYPE
= "2" # symbolic link
81 CHRTYPE
= "3" # character special device
82 BLKTYPE
= "4" # block special device
83 DIRTYPE
= "5" # directory
84 FIFOTYPE
= "6" # fifo special device
85 CONTTYPE
= "7" # contiguous file
87 GNUTYPE_LONGNAME
= "L" # GNU tar longname
88 GNUTYPE_LONGLINK
= "K" # GNU tar longlink
89 GNUTYPE_SPARSE
= "S" # GNU tar sparse file
91 XHDTYPE
= "x" # POSIX.1-2001 extended header
92 XGLTYPE
= "g" # POSIX.1-2001 global header
93 SOLARIS_XHDTYPE
= "X" # Solaris extended header
95 USTAR_FORMAT
= 0 # POSIX.1-1988 (ustar) format
96 GNU_FORMAT
= 1 # GNU tar format
97 PAX_FORMAT
= 2 # POSIX.1-2001 (pax) format
98 DEFAULT_FORMAT
= GNU_FORMAT
100 #---------------------------------------------------------
102 #---------------------------------------------------------
103 # File types that tarfile supports:
104 SUPPORTED_TYPES
= (REGTYPE
, AREGTYPE
, LNKTYPE
,
105 SYMTYPE
, DIRTYPE
, FIFOTYPE
,
106 CONTTYPE
, CHRTYPE
, BLKTYPE
,
107 GNUTYPE_LONGNAME
, GNUTYPE_LONGLINK
,
110 # File types that will be treated as a regular file.
111 REGULAR_TYPES
= (REGTYPE
, AREGTYPE
,
112 CONTTYPE
, GNUTYPE_SPARSE
)
114 # File types that are part of the GNU tar format.
115 GNU_TYPES
= (GNUTYPE_LONGNAME
, GNUTYPE_LONGLINK
,
118 # Fields from a pax header that override a TarInfo attribute.
119 PAX_FIELDS
= ("path", "linkpath", "size", "mtime",
120 "uid", "gid", "uname", "gname")
122 # Fields in a pax header that are numbers, all other fields
123 # are treated as strings.
124 PAX_NUMBER_FIELDS
= {
133 #---------------------------------------------------------
134 # Bits used in the mode field, values in octal.
135 #---------------------------------------------------------
136 S_IFLNK
= 0120000 # symbolic link
137 S_IFREG
= 0100000 # regular file
138 S_IFBLK
= 0060000 # block device
139 S_IFDIR
= 0040000 # directory
140 S_IFCHR
= 0020000 # character device
141 S_IFIFO
= 0010000 # fifo
143 TSUID
= 04000 # set UID on execution
144 TSGID
= 02000 # set GID on execution
145 TSVTX
= 01000 # reserved
147 TUREAD
= 0400 # read by owner
148 TUWRITE
= 0200 # write by owner
149 TUEXEC
= 0100 # execute/search by owner
150 TGREAD
= 0040 # read by group
151 TGWRITE
= 0020 # write by group
152 TGEXEC
= 0010 # execute/search by group
153 TOREAD
= 0004 # read by other
154 TOWRITE
= 0002 # write by other
155 TOEXEC
= 0001 # execute/search by other
157 #---------------------------------------------------------
159 #---------------------------------------------------------
160 ENCODING
= sys
.getfilesystemencoding()
162 ENCODING
= sys
.getdefaultencoding()
164 #---------------------------------------------------------
165 # Some useful functions
166 #---------------------------------------------------------
169 """Convert a python string to a null-terminated string buffer.
171 return s
[:length
] + (length
- len(s
)) * NUL
174 """Convert a null-terminated string field to a python string.
176 # Use the string up to the first null char.
183 """Convert a number field to a python number.
185 # There are two possible encodings for a number field, see
187 if s
[0] != chr(0200):
189 n
= int(nts(s
) or "0", 8)
191 raise InvalidHeaderError("invalid header")
194 for i
in xrange(len(s
) - 1):
199 def itn(n
, digits
=8, format
=DEFAULT_FORMAT
):
200 """Convert a python number to a number field.
202 # POSIX 1003.1-1988 requires numbers to be encoded as a string of
203 # octal digits followed by a null-byte, this allows values up to
204 # (8**(digits-1))-1. GNU tar allows storing numbers greater than
205 # that if necessary. A leading 0200 byte indicates this particular
206 # encoding, the following digits-1 bytes are a big-endian
207 # representation. This allows values up to (256**(digits-1))-1.
208 if 0 <= n
< 8 ** (digits
- 1):
209 s
= "%0*o" % (digits
- 1, n
) + NUL
211 if format
!= GNU_FORMAT
or n
>= 256 ** (digits
- 1):
212 raise ValueError("overflow in number field")
215 # XXX We mimic GNU tar's behaviour with negative numbers,
216 # this could raise OverflowError.
217 n
= struct
.unpack("L", struct
.pack("l", n
))[0]
220 for i
in xrange(digits
- 1):
221 s
= chr(n
& 0377) + s
226 def uts(s
, encoding
, errors
):
227 """Convert a unicode object to a string.
229 if errors
== "utf-8":
230 # An extra error handler similar to the -o invalid=UTF-8 option
231 # in POSIX.1-2001. Replace untranslatable characters with their
232 # UTF-8 representation.
234 return s
.encode(encoding
, "strict")
235 except UnicodeEncodeError:
239 x
.append(c
.encode(encoding
, "strict"))
240 except UnicodeEncodeError:
241 x
.append(c
.encode("utf8"))
244 return s
.encode(encoding
, errors
)
246 def calc_chksums(buf
):
247 """Calculate the checksum for a member's header by summing up all
248 characters except for the chksum field which is treated as if
249 it was filled with spaces. According to the GNU tar sources,
250 some tars (Sun and NeXT) calculate chksum with signed char,
251 which will be different if there are chars in the buffer with
252 the high bit set. So we calculate two checksums, unsigned and
255 unsigned_chksum
= 256 + sum(struct
.unpack("148B", buf
[:148]) + struct
.unpack("356B", buf
[156:512]))
256 signed_chksum
= 256 + sum(struct
.unpack("148b", buf
[:148]) + struct
.unpack("356b", buf
[156:512]))
257 return unsigned_chksum
, signed_chksum
259 def copyfileobj(src
, dst
, length
=None):
260 """Copy length bytes from fileobj src to fileobj dst.
261 If length is None, copy the entire content.
266 shutil
.copyfileobj(src
, dst
)
270 blocks
, remainder
= divmod(length
, BUFSIZE
)
271 for b
in xrange(blocks
):
272 buf
= src
.read(BUFSIZE
)
273 if len(buf
) < BUFSIZE
:
274 raise IOError("end of file reached")
278 buf
= src
.read(remainder
)
279 if len(buf
) < remainder
:
280 raise IOError("end of file reached")
294 ((TUEXEC|TSUID
, "s"),
300 ((TGEXEC|TSGID
, "s"),
306 ((TOEXEC|TSVTX
, "t"),
312 """Convert a file's mode to a string of the form
314 Used by TarFile.list()
317 for table
in filemode_table
:
318 for bit
, char
in table
:
319 if mode
& bit
== bit
:
326 class TarError(Exception):
327 """Base exception."""
329 class ExtractError(TarError
):
330 """General exception for extract errors."""
332 class ReadError(TarError
):
333 """Exception for unreadble tar archives."""
335 class CompressionError(TarError
):
336 """Exception for unavailable compression methods."""
338 class StreamError(TarError
):
339 """Exception for unsupported operations on stream-like TarFiles."""
341 class HeaderError(TarError
):
342 """Base exception for header errors."""
344 class EmptyHeaderError(HeaderError
):
345 """Exception for empty headers."""
347 class TruncatedHeaderError(HeaderError
):
348 """Exception for truncated headers."""
350 class EOFHeaderError(HeaderError
):
351 """Exception for end of file headers."""
353 class InvalidHeaderError(HeaderError
):
354 """Exception for invalid headers."""
356 class SubsequentHeaderError(HeaderError
):
357 """Exception for missing and invalid extended headers."""
360 #---------------------------
361 # internal stream interface
362 #---------------------------
364 """Low-level file object. Supports reading and writing.
365 It is used instead of a regular file object for streaming
369 def __init__(self
, name
, mode
):
372 "w": os
.O_WRONLY | os
.O_CREAT | os
.O_TRUNC
,
374 if hasattr(os
, "O_BINARY"):
376 self
.fd
= os
.open(name
, mode
, 0666)
381 def read(self
, size
):
382 return os
.read(self
.fd
, size
)
388 """Class that serves as an adapter between TarFile and
389 a stream-like object. The stream-like object only
390 needs to have a read() or write() method and is accessed
391 blockwise. Use of gzip or bzip2 compression is possible.
392 A stream-like object could be for example: sys.stdin,
393 sys.stdout, a socket, a tape device etc.
395 _Stream is intended to be used only internally.
398 def __init__(self
, name
, mode
, comptype
, fileobj
, bufsize
):
399 """Construct a _Stream object.
401 self
._extfileobj
= True
403 fileobj
= _LowLevelFile(name
, mode
)
404 self
._extfileobj
= False
407 # Enable transparent compression detection for the
409 fileobj
= _StreamProxy(fileobj
)
410 comptype
= fileobj
.getcomptype()
412 self
.name
= name
or ""
414 self
.comptype
= comptype
415 self
.fileobj
= fileobj
416 self
.bufsize
= bufsize
425 raise CompressionError("zlib module is not available")
427 self
.crc
= zlib
.crc32("") & 0xffffffffL
431 self
._init
_write
_gz
()
433 if comptype
== "bz2":
437 raise CompressionError("bz2 module is not available")
440 self
.cmp = bz2
.BZ2Decompressor()
442 self
.cmp = bz2
.BZ2Compressor()
445 if hasattr(self
, "closed") and not self
.closed
:
448 def _init_write_gz(self
):
449 """Initialize for writing with gzip compression.
451 self
.cmp = self
.zlib
.compressobj(9, self
.zlib
.DEFLATED
,
452 -self
.zlib
.MAX_WBITS
,
453 self
.zlib
.DEF_MEM_LEVEL
,
455 timestamp
= struct
.pack("<L", long(time
.time()))
456 self
.__write
("\037\213\010\010%s\002\377" % timestamp
)
457 if self
.name
.endswith(".gz"):
458 self
.name
= self
.name
[:-3]
459 self
.__write
(self
.name
+ NUL
)
462 """Write string s to the stream.
464 if self
.comptype
== "gz":
465 self
.crc
= self
.zlib
.crc32(s
, self
.crc
) & 0xffffffffL
467 if self
.comptype
!= "tar":
468 s
= self
.cmp.compress(s
)
471 def __write(self
, s
):
472 """Write string s to the stream if a whole new block
473 is ready to be written.
476 while len(self
.buf
) > self
.bufsize
:
477 self
.fileobj
.write(self
.buf
[:self
.bufsize
])
478 self
.buf
= self
.buf
[self
.bufsize
:]
481 """Close the _Stream object. No operation should be
482 done on it afterwards.
487 if self
.mode
== "w" and self
.comptype
!= "tar":
488 self
.buf
+= self
.cmp.flush()
490 if self
.mode
== "w" and self
.buf
:
491 self
.fileobj
.write(self
.buf
)
493 if self
.comptype
== "gz":
494 # The native zlib crc is an unsigned 32-bit integer, but
495 # the Python wrapper implicitly casts that to a signed C
496 # long. So, on a 32-bit box self.crc may "look negative",
497 # while the same crc on a 64-bit box may "look positive".
498 # To avoid irksome warnings from the `struct` module, force
499 # it to look positive on all boxes.
500 self
.fileobj
.write(struct
.pack("<L", self
.crc
& 0xffffffffL
))
501 self
.fileobj
.write(struct
.pack("<L", self
.pos
& 0xffffFFFFL
))
503 if not self
._extfileobj
:
508 def _init_read_gz(self
):
509 """Initialize for reading a gzip compressed fileobj.
511 self
.cmp = self
.zlib
.decompressobj(-self
.zlib
.MAX_WBITS
)
514 # taken from gzip.GzipFile with some alterations
515 if self
.__read
(2) != "\037\213":
516 raise ReadError("not a gzip file")
517 if self
.__read
(1) != "\010":
518 raise CompressionError("unsupported compression method")
520 flag
= ord(self
.__read
(1))
524 xlen
= ord(self
.__read
(1)) + 256 * ord(self
.__read
(1))
529 if not s
or s
== NUL
:
534 if not s
or s
== NUL
:
540 """Return the stream's file pointer position.
544 def seek(self
, pos
=0):
545 """Set the stream's file pointer to pos. Negative seeking
548 if pos
- self
.pos
>= 0:
549 blocks
, remainder
= divmod(pos
- self
.pos
, self
.bufsize
)
550 for i
in xrange(blocks
):
551 self
.read(self
.bufsize
)
554 raise StreamError("seeking backwards is not allowed")
557 def read(self
, size
=None):
558 """Return the next size number of bytes from the stream.
559 If size is not defined, return all bytes of the stream
565 buf
= self
._read
(self
.bufsize
)
571 buf
= self
._read
(size
)
575 def _read(self
, size
):
576 """Return size bytes from the stream.
578 if self
.comptype
== "tar":
579 return self
.__read
(size
)
584 buf
= self
.__read
(self
.bufsize
)
588 buf
= self
.cmp.decompress(buf
)
590 raise ReadError("invalid compressed data")
597 def __read(self
, size
):
598 """Return size bytes from stream. If internal buffer is empty,
599 read another block from the stream.
604 buf
= self
.fileobj
.read(self
.bufsize
)
614 class _StreamProxy(object):
615 """Small proxy class that enables transparent compression
616 detection for the Stream interface (mode 'r|*').
619 def __init__(self
, fileobj
):
620 self
.fileobj
= fileobj
621 self
.buf
= self
.fileobj
.read(BLOCKSIZE
)
623 def read(self
, size
):
624 self
.read
= self
.fileobj
.read
627 def getcomptype(self
):
628 if self
.buf
.startswith("\037\213\010"):
630 if self
.buf
.startswith("BZh91"):
638 class _BZ2Proxy(object):
639 """Small proxy class that enables external file object
640 support for "r:bz2" and "w:bz2" modes. This is actually
641 a workaround for a limitation in bz2 module's BZ2File
642 class which (unlike gzip.GzipFile) has no support for
643 a file object argument.
646 blocksize
= 16 * 1024
648 def __init__(self
, fileobj
, mode
):
649 self
.fileobj
= fileobj
651 self
.name
= getattr(self
.fileobj
, "name", None)
658 self
.bz2obj
= bz2
.BZ2Decompressor()
662 self
.bz2obj
= bz2
.BZ2Compressor()
664 def read(self
, size
):
668 raw
= self
.fileobj
.read(self
.blocksize
)
671 data
= self
.bz2obj
.decompress(raw
)
674 self
.buf
= "".join(b
)
676 buf
= self
.buf
[:size
]
677 self
.buf
= self
.buf
[size
:]
684 self
.read(pos
- self
.pos
)
689 def write(self
, data
):
690 self
.pos
+= len(data
)
691 raw
= self
.bz2obj
.compress(data
)
692 self
.fileobj
.write(raw
)
696 raw
= self
.bz2obj
.flush()
697 self
.fileobj
.write(raw
)
700 #------------------------
701 # Extraction file object
702 #------------------------
703 class _FileInFile(object):
704 """A thin wrapper around an existing file object that
705 provides a part of its data as an individual file
709 def __init__(self
, fileobj
, offset
, size
, sparse
=None):
710 self
.fileobj
= fileobj
717 """Return the current file position.
721 def seek(self
, position
):
722 """Seek to a position in the file.
724 self
.position
= position
726 def read(self
, size
=None):
727 """Read data from the file.
730 size
= self
.size
- self
.position
732 size
= min(size
, self
.size
- self
.position
)
734 if self
.sparse
is None:
735 return self
.readnormal(size
)
737 return self
.readsparse(size
)
739 def readnormal(self
, size
):
740 """Read operation for regular files.
742 self
.fileobj
.seek(self
.offset
+ self
.position
)
743 self
.position
+= size
744 return self
.fileobj
.read(size
)
746 def readsparse(self
, size
):
747 """Read operation for sparse files.
751 buf
= self
.readsparsesection(size
)
758 def readsparsesection(self
, size
):
759 """Read a single section of a sparse file.
761 section
= self
.sparse
.find(self
.position
)
766 size
= min(size
, section
.offset
+ section
.size
- self
.position
)
768 if isinstance(section
, _data
):
769 realpos
= section
.realpos
+ self
.position
- section
.offset
770 self
.fileobj
.seek(self
.offset
+ realpos
)
771 self
.position
+= size
772 return self
.fileobj
.read(size
)
774 self
.position
+= size
779 class ExFileObject(object):
780 """File-like object for reading an archive member.
781 Is returned by TarFile.extractfile().
785 def __init__(self
, tarfile
, tarinfo
):
786 self
.fileobj
= _FileInFile(tarfile
.fileobj
,
789 getattr(tarinfo
, "sparse", None))
790 self
.name
= tarinfo
.name
793 self
.size
= tarinfo
.size
798 def read(self
, size
=None):
799 """Read at most size bytes from the file. If size is not
800 present or None, read all data until EOF is reached.
803 raise ValueError("I/O operation on closed file")
811 buf
= self
.buffer[:size
]
812 self
.buffer = self
.buffer[size
:]
815 buf
+= self
.fileobj
.read()
817 buf
+= self
.fileobj
.read(size
- len(buf
))
819 self
.position
+= len(buf
)
822 def readline(self
, size
=-1):
823 """Read one entire line from the file. If size is present
824 and non-negative, return a string with at most that
825 size, which may be an incomplete line.
828 raise ValueError("I/O operation on closed file")
830 if "\n" in self
.buffer:
831 pos
= self
.buffer.find("\n") + 1
833 buffers
= [self
.buffer]
835 buf
= self
.fileobj
.read(self
.blocksize
)
837 if not buf
or "\n" in buf
:
838 self
.buffer = "".join(buffers
)
839 pos
= self
.buffer.find("\n") + 1
842 pos
= len(self
.buffer)
848 buf
= self
.buffer[:pos
]
849 self
.buffer = self
.buffer[pos
:]
850 self
.position
+= len(buf
)
854 """Return a list with all remaining lines.
858 line
= self
.readline()
864 """Return the current file position.
867 raise ValueError("I/O operation on closed file")
871 def seek(self
, pos
, whence
=os
.SEEK_SET
):
872 """Seek to a position in the file.
875 raise ValueError("I/O operation on closed file")
877 if whence
== os
.SEEK_SET
:
878 self
.position
= min(max(pos
, 0), self
.size
)
879 elif whence
== os
.SEEK_CUR
:
881 self
.position
= max(self
.position
+ pos
, 0)
883 self
.position
= min(self
.position
+ pos
, self
.size
)
884 elif whence
== os
.SEEK_END
:
885 self
.position
= max(min(self
.size
+ pos
, self
.size
), 0)
887 raise ValueError("Invalid argument")
890 self
.fileobj
.seek(self
.position
)
893 """Close the file object.
898 """Get an iterator over the file's lines.
901 line
= self
.readline()
910 class TarInfo(object):
911 """Informational class which holds the details about an
912 archive member given by a tar header block.
913 TarInfo objects are returned by TarFile.getmember(),
914 TarFile.getmembers() and TarFile.gettarinfo() and are
915 usually created internally.
918 def __init__(self
, name
=""):
919 """Construct a TarInfo object. name is the optional name
922 self
.name
= name
# member name
923 self
.mode
= 0644 # file permissions
924 self
.uid
= 0 # user id
925 self
.gid
= 0 # group id
926 self
.size
= 0 # file size
927 self
.mtime
= 0 # modification time
928 self
.chksum
= 0 # header checksum
929 self
.type = REGTYPE
# member type
930 self
.linkname
= "" # link name
931 self
.uname
= "root" # user name
932 self
.gname
= "root" # group name
933 self
.devmajor
= 0 # device major number
934 self
.devminor
= 0 # device minor number
936 self
.offset
= 0 # the tar header starts here
937 self
.offset_data
= 0 # the file's data starts here
939 self
.pax_headers
= {} # pax header information
941 # In pax headers the "name" and "linkname" field are called
942 # "path" and "linkpath".
945 def _setpath(self
, name
):
947 path
= property(_getpath
, _setpath
)
949 def _getlinkpath(self
):
951 def _setlinkpath(self
, linkname
):
952 self
.linkname
= linkname
953 linkpath
= property(_getlinkpath
, _setlinkpath
)
956 return "<%s %r at %#x>" % (self
.__class
__.__name
__,self
.name
,id(self
))
958 def get_info(self
, encoding
, errors
):
959 """Return the TarInfo's attributes as a dictionary.
963 "mode": self
.mode
& 07777,
968 "chksum": self
.chksum
,
970 "linkname": self
.linkname
,
973 "devmajor": self
.devmajor
,
974 "devminor": self
.devminor
977 if info
["type"] == DIRTYPE
and not info
["name"].endswith("/"):
980 for key
in ("name", "linkname", "uname", "gname"):
981 if type(info
[key
]) is unicode:
982 info
[key
] = info
[key
].encode(encoding
, errors
)
986 def tobuf(self
, format
=DEFAULT_FORMAT
, encoding
=ENCODING
, errors
="strict"):
987 """Return a tar header as a string of 512 byte blocks.
989 info
= self
.get_info(encoding
, errors
)
991 if format
== USTAR_FORMAT
:
992 return self
.create_ustar_header(info
)
993 elif format
== GNU_FORMAT
:
994 return self
.create_gnu_header(info
)
995 elif format
== PAX_FORMAT
:
996 return self
.create_pax_header(info
, encoding
, errors
)
998 raise ValueError("invalid format")
1000 def create_ustar_header(self
, info
):
1001 """Return the object as a ustar header block.
1003 info
["magic"] = POSIX_MAGIC
1005 if len(info
["linkname"]) > LENGTH_LINK
:
1006 raise ValueError("linkname is too long")
1008 if len(info
["name"]) > LENGTH_NAME
:
1009 info
["prefix"], info
["name"] = self
._posix
_split
_name
(info
["name"])
1011 return self
._create
_header
(info
, USTAR_FORMAT
)
1013 def create_gnu_header(self
, info
):
1014 """Return the object as a GNU header block sequence.
1016 info
["magic"] = GNU_MAGIC
1019 if len(info
["linkname"]) > LENGTH_LINK
:
1020 buf
+= self
._create
_gnu
_long
_header
(info
["linkname"], GNUTYPE_LONGLINK
)
1022 if len(info
["name"]) > LENGTH_NAME
:
1023 buf
+= self
._create
_gnu
_long
_header
(info
["name"], GNUTYPE_LONGNAME
)
1025 return buf
+ self
._create
_header
(info
, GNU_FORMAT
)
1027 def create_pax_header(self
, info
, encoding
, errors
):
1028 """Return the object as a ustar header block. If it cannot be
1029 represented this way, prepend a pax extended header sequence
1030 with supplement information.
1032 info
["magic"] = POSIX_MAGIC
1033 pax_headers
= self
.pax_headers
.copy()
1035 # Test string fields for values that exceed the field length or cannot
1036 # be represented in ASCII encoding.
1037 for name
, hname
, length
in (
1038 ("name", "path", LENGTH_NAME
), ("linkname", "linkpath", LENGTH_LINK
),
1039 ("uname", "uname", 32), ("gname", "gname", 32)):
1041 if hname
in pax_headers
:
1042 # The pax header has priority.
1045 val
= info
[name
].decode(encoding
, errors
)
1047 # Try to encode the string as ASCII.
1050 except UnicodeEncodeError:
1051 pax_headers
[hname
] = val
1054 if len(info
[name
]) > length
:
1055 pax_headers
[hname
] = val
1057 # Test number fields for values that exceed the field limit or values
1058 # that like to be stored as float.
1059 for name
, digits
in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
1060 if name
in pax_headers
:
1061 # The pax header has priority. Avoid overflow.
1066 if not 0 <= val
< 8 ** (digits
- 1) or isinstance(val
, float):
1067 pax_headers
[name
] = unicode(val
)
1070 # Create a pax extended header if necessary.
1072 buf
= self
._create
_pax
_generic
_header
(pax_headers
)
1076 return buf
+ self
._create
_header
(info
, USTAR_FORMAT
)
1079 def create_pax_global_header(cls
, pax_headers
):
1080 """Return the object as a pax global header block sequence.
1082 return cls
._create
_pax
_generic
_header
(pax_headers
, type=XGLTYPE
)
1084 def _posix_split_name(self
, name
):
1085 """Split a name longer than 100 chars into a prefix
1088 prefix
= name
[:LENGTH_PREFIX
+ 1]
1089 while prefix
and prefix
[-1] != "/":
1090 prefix
= prefix
[:-1]
1092 name
= name
[len(prefix
):]
1093 prefix
= prefix
[:-1]
1095 if not prefix
or len(name
) > LENGTH_NAME
:
1096 raise ValueError("name is too long")
1100 def _create_header(info
, format
):
1101 """Return a header block. info is a dictionary with file
1102 information, format must be one of the *_FORMAT constants.
1105 stn(info
.get("name", ""), 100),
1106 itn(info
.get("mode", 0) & 07777, 8, format
),
1107 itn(info
.get("uid", 0), 8, format
),
1108 itn(info
.get("gid", 0), 8, format
),
1109 itn(info
.get("size", 0), 12, format
),
1110 itn(info
.get("mtime", 0), 12, format
),
1111 " ", # checksum field
1112 info
.get("type", REGTYPE
),
1113 stn(info
.get("linkname", ""), 100),
1114 stn(info
.get("magic", POSIX_MAGIC
), 8),
1115 stn(info
.get("uname", "root"), 32),
1116 stn(info
.get("gname", "root"), 32),
1117 itn(info
.get("devmajor", 0), 8, format
),
1118 itn(info
.get("devminor", 0), 8, format
),
1119 stn(info
.get("prefix", ""), 155)
1122 buf
= struct
.pack("%ds" % BLOCKSIZE
, "".join(parts
))
1123 chksum
= calc_chksums(buf
[-BLOCKSIZE
:])[0]
1124 buf
= buf
[:-364] + "%06o\0" % chksum
+ buf
[-357:]
1128 def _create_payload(payload
):
1129 """Return the string payload filled with zero bytes
1130 up to the next 512 byte border.
1132 blocks
, remainder
= divmod(len(payload
), BLOCKSIZE
)
1134 payload
+= (BLOCKSIZE
- remainder
) * NUL
1138 def _create_gnu_long_header(cls
, name
, type):
1139 """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
1145 info
["name"] = "././@LongLink"
1147 info
["size"] = len(name
)
1148 info
["magic"] = GNU_MAGIC
1150 # create extended header + name blocks.
1151 return cls
._create
_header
(info
, USTAR_FORMAT
) + \
1152 cls
._create
_payload
(name
)
1155 def _create_pax_generic_header(cls
, pax_headers
, type=XHDTYPE
):
1156 """Return a POSIX.1-2001 extended or global header sequence
1157 that contains a list of keyword, value pairs. The values
1158 must be unicode objects.
1161 for keyword
, value
in pax_headers
.iteritems():
1162 keyword
= keyword
.encode("utf8")
1163 value
= value
.encode("utf8")
1164 l
= len(keyword
) + len(value
) + 3 # ' ' + '=' + '\n'
1171 records
.append("%d %s=%s\n" % (p
, keyword
, value
))
1172 records
= "".join(records
)
1174 # We use a hardcoded "././@PaxHeader" name like star does
1175 # instead of the one that POSIX recommends.
1177 info
["name"] = "././@PaxHeader"
1179 info
["size"] = len(records
)
1180 info
["magic"] = POSIX_MAGIC
1182 # Create pax header + record blocks.
1183 return cls
._create
_header
(info
, USTAR_FORMAT
) + \
1184 cls
._create
_payload
(records
)
1187 def frombuf(cls
, buf
):
1188 """Construct a TarInfo object from a 512 byte string buffer.
1191 raise EmptyHeaderError("empty header")
1192 if len(buf
) != BLOCKSIZE
:
1193 raise TruncatedHeaderError("truncated header")
1194 if buf
.count(NUL
) == BLOCKSIZE
:
1195 raise EOFHeaderError("end of file header")
1197 chksum
= nti(buf
[148:156])
1198 if chksum
not in calc_chksums(buf
):
1199 raise InvalidHeaderError("bad checksum")
1203 obj
.name
= nts(buf
[0:100])
1204 obj
.mode
= nti(buf
[100:108])
1205 obj
.uid
= nti(buf
[108:116])
1206 obj
.gid
= nti(buf
[116:124])
1207 obj
.size
= nti(buf
[124:136])
1208 obj
.mtime
= nti(buf
[136:148])
1210 obj
.type = buf
[156:157]
1211 obj
.linkname
= nts(buf
[157:257])
1212 obj
.uname
= nts(buf
[265:297])
1213 obj
.gname
= nts(buf
[297:329])
1214 obj
.devmajor
= nti(buf
[329:337])
1215 obj
.devminor
= nti(buf
[337:345])
1216 prefix
= nts(buf
[345:500])
1218 # Old V7 tar format represents a directory as a regular
1219 # file with a trailing slash.
1220 if obj
.type == AREGTYPE
and obj
.name
.endswith("/"):
1223 # Remove redundant slashes from directories.
1225 obj
.name
= obj
.name
.rstrip("/")
1227 # Reconstruct a ustar longname.
1228 if prefix
and obj
.type not in GNU_TYPES
:
1229 obj
.name
= prefix
+ "/" + obj
.name
1233 def fromtarfile(cls
, tarfile
):
1234 """Return the next TarInfo object from TarFile object
1237 buf
= tarfile
.fileobj
.read(BLOCKSIZE
)
1238 obj
= cls
.frombuf(buf
)
1239 obj
.offset
= tarfile
.fileobj
.tell() - BLOCKSIZE
1240 return obj
._proc
_member
(tarfile
)
1242 #--------------------------------------------------------------------------
1243 # The following are methods that are called depending on the type of a
1244 # member. The entry point is _proc_member() which can be overridden in a
1245 # subclass to add custom _proc_*() methods. A _proc_*() method MUST
1246 # implement the following
1248 # 1. Set self.offset_data to the position where the data blocks begin,
1249 # if there is data that follows.
1250 # 2. Set tarfile.offset to the position where the next member's header will
1252 # 3. Return self or another valid TarInfo object.
1253 def _proc_member(self
, tarfile
):
1254 """Choose the right processing method depending on
1255 the type and call it.
1257 if self
.type in (GNUTYPE_LONGNAME
, GNUTYPE_LONGLINK
):
1258 return self
._proc
_gnulong
(tarfile
)
1259 elif self
.type == GNUTYPE_SPARSE
:
1260 return self
._proc
_sparse
(tarfile
)
1261 elif self
.type in (XHDTYPE
, XGLTYPE
, SOLARIS_XHDTYPE
):
1262 return self
._proc
_pax
(tarfile
)
1264 return self
._proc
_builtin
(tarfile
)
1266 def _proc_builtin(self
, tarfile
):
1267 """Process a builtin type or an unknown type which
1268 will be treated as a regular file.
1270 self
.offset_data
= tarfile
.fileobj
.tell()
1271 offset
= self
.offset_data
1272 if self
.isreg() or self
.type not in SUPPORTED_TYPES
:
1273 # Skip the following data blocks.
1274 offset
+= self
._block
(self
.size
)
1275 tarfile
.offset
= offset
1277 # Patch the TarInfo object with saved global
1278 # header information.
1279 self
._apply
_pax
_info
(tarfile
.pax_headers
, tarfile
.encoding
, tarfile
.errors
)
1283 def _proc_gnulong(self
, tarfile
):
1284 """Process the blocks that hold a GNU longname
1287 buf
= tarfile
.fileobj
.read(self
._block
(self
.size
))
1289 # Fetch the next header and process it.
1291 next
= self
.fromtarfile(tarfile
)
1293 raise SubsequentHeaderError("missing or bad subsequent header")
1295 # Patch the TarInfo object from the next header with
1296 # the longname information.
1297 next
.offset
= self
.offset
1298 if self
.type == GNUTYPE_LONGNAME
:
1299 next
.name
= nts(buf
)
1300 elif self
.type == GNUTYPE_LONGLINK
:
1301 next
.linkname
= nts(buf
)
1305 def _proc_sparse(self
, tarfile
):
1306 """Process a GNU sparse header plus extra headers.
1313 # There are 4 possible sparse structs in the
1317 offset
= nti(buf
[pos
:pos
+ 12])
1318 numbytes
= nti(buf
[pos
+ 12:pos
+ 24])
1321 if offset
> lastpos
:
1322 sp
.append(_hole(lastpos
, offset
- lastpos
))
1323 sp
.append(_data(offset
, numbytes
, realpos
))
1325 lastpos
= offset
+ numbytes
1328 isextended
= ord(buf
[482])
1329 origsize
= nti(buf
[483:495])
1331 # If the isextended flag is given,
1332 # there are extra headers to process.
1333 while isextended
== 1:
1334 buf
= tarfile
.fileobj
.read(BLOCKSIZE
)
1336 for i
in xrange(21):
1338 offset
= nti(buf
[pos
:pos
+ 12])
1339 numbytes
= nti(buf
[pos
+ 12:pos
+ 24])
1342 if offset
> lastpos
:
1343 sp
.append(_hole(lastpos
, offset
- lastpos
))
1344 sp
.append(_data(offset
, numbytes
, realpos
))
1346 lastpos
= offset
+ numbytes
1348 isextended
= ord(buf
[504])
1350 if lastpos
< origsize
:
1351 sp
.append(_hole(lastpos
, origsize
- lastpos
))
1355 self
.offset_data
= tarfile
.fileobj
.tell()
1356 tarfile
.offset
= self
.offset_data
+ self
._block
(self
.size
)
1357 self
.size
= origsize
1361 def _proc_pax(self
, tarfile
):
1362 """Process an extended or global header as described in
1365 # Read the header information.
1366 buf
= tarfile
.fileobj
.read(self
._block
(self
.size
))
1368 # A pax header stores supplemental information for either
1369 # the following file (extended) or all following files
1371 if self
.type == XGLTYPE
:
1372 pax_headers
= tarfile
.pax_headers
1374 pax_headers
= tarfile
.pax_headers
.copy()
1376 # Parse pax header information. A record looks like that:
1377 # "%d %s=%s\n" % (length, keyword, value). length is the size
1378 # of the complete record including the length field itself and
1379 # the newline. keyword and value are both UTF-8 encoded strings.
1380 regex
= re
.compile(r
"(\d+) ([^=]+)=", re
.U
)
1383 match
= regex
.match(buf
, pos
)
1387 length
, keyword
= match
.groups()
1388 length
= int(length
)
1389 value
= buf
[match
.end(2) + 1:match
.start(1) + length
- 1]
1391 keyword
= keyword
.decode("utf8")
1392 value
= value
.decode("utf8")
1394 pax_headers
[keyword
] = value
1397 # Fetch the next header.
1399 next
= self
.fromtarfile(tarfile
)
1401 raise SubsequentHeaderError("missing or bad subsequent header")
1403 if self
.type in (XHDTYPE
, SOLARIS_XHDTYPE
):
1404 # Patch the TarInfo object with the extended header info.
1405 next
._apply
_pax
_info
(pax_headers
, tarfile
.encoding
, tarfile
.errors
)
1406 next
.offset
= self
.offset
1408 if "size" in pax_headers
:
1409 # If the extended header replaces the size field,
1410 # we need to recalculate the offset where the next
1412 offset
= next
.offset_data
1413 if next
.isreg() or next
.type not in SUPPORTED_TYPES
:
1414 offset
+= next
._block
(next
.size
)
1415 tarfile
.offset
= offset
1419 def _apply_pax_info(self
, pax_headers
, encoding
, errors
):
1420 """Replace fields with supplemental information from a previous
1421 pax extended or global header.
1423 for keyword
, value
in pax_headers
.iteritems():
1424 if keyword
not in PAX_FIELDS
:
1427 if keyword
== "path":
1428 value
= value
.rstrip("/")
1430 if keyword
in PAX_NUMBER_FIELDS
:
1432 value
= PAX_NUMBER_FIELDS
[keyword
](value
)
1436 value
= uts(value
, encoding
, errors
)
1438 setattr(self
, keyword
, value
)
1440 self
.pax_headers
= pax_headers
.copy()
1442 def _block(self
, count
):
1443 """Round up a byte count by BLOCKSIZE and return it,
1444 e.g. _block(834) => 1024.
1446 blocks
, remainder
= divmod(count
, BLOCKSIZE
)
1449 return blocks
* BLOCKSIZE
1452 return self
.type in REGULAR_TYPES
1456 return self
.type == DIRTYPE
1458 return self
.type == SYMTYPE
1460 return self
.type == LNKTYPE
1462 return self
.type == CHRTYPE
1464 return self
.type == BLKTYPE
1466 return self
.type == FIFOTYPE
1468 return self
.type == GNUTYPE_SPARSE
1470 return self
.type in (CHRTYPE
, BLKTYPE
, FIFOTYPE
)
1473 class TarFile(object):
1474 """The TarFile Class provides an interface to tar archives.
1477 debug
= 0 # May be set from 0 (no msgs) to 3 (all msgs)
1479 dereference
= False # If true, add content of linked file to the
1480 # tar file, else the link.
1482 ignore_zeros
= False # If true, skips empty or invalid blocks and
1483 # continues processing.
1485 errorlevel
= 1 # If 0, fatal errors only appear in debug
1486 # messages (if debug >= 0). If > 0, errors
1487 # are passed to the caller as exceptions.
1489 format
= DEFAULT_FORMAT
# The format to use when creating an archive.
1491 encoding
= ENCODING
# Encoding for 8-bit character strings.
1493 errors
= None # Error handler for unicode conversion.
1495 tarinfo
= TarInfo
# The default TarInfo class to use.
1497 fileobject
= ExFileObject
# The default ExFileObject class to use.
1499 def __init__(self
, name
=None, mode
="r", fileobj
=None, format
=None,
1500 tarinfo
=None, dereference
=None, ignore_zeros
=None, encoding
=None,
1501 errors
=None, pax_headers
=None, debug
=None, errorlevel
=None):
1502 """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
1503 read from an existing archive, 'a' to append data to an existing
1504 file or 'w' to create a new file overwriting an existing one. `mode'
1506 If `fileobj' is given, it is used for reading or writing data. If it
1507 can be determined, `mode' is overridden by `fileobj's mode.
1508 `fileobj' is not closed, when TarFile is closed.
1510 if len(mode
) > 1 or mode
not in "raw":
1511 raise ValueError("mode must be 'r', 'a' or 'w'")
1513 self
._mode
= {"r": "rb", "a": "r+b", "w": "wb"}[mode
]
1516 if self
.mode
== "a" and not os
.path
.exists(name
):
1517 # Create nonexistent files in append mode.
1520 fileobj
= bltn_open(name
, self
._mode
)
1521 self
._extfileobj
= False
1523 if name
is None and hasattr(fileobj
, "name"):
1525 if hasattr(fileobj
, "mode"):
1526 self
._mode
= fileobj
.mode
1527 self
._extfileobj
= True
1528 self
.name
= os
.path
.abspath(name
) if name
else None
1529 self
.fileobj
= fileobj
1532 if format
is not None:
1533 self
.format
= format
1534 if tarinfo
is not None:
1535 self
.tarinfo
= tarinfo
1536 if dereference
is not None:
1537 self
.dereference
= dereference
1538 if ignore_zeros
is not None:
1539 self
.ignore_zeros
= ignore_zeros
1540 if encoding
is not None:
1541 self
.encoding
= encoding
1543 if errors
is not None:
1544 self
.errors
= errors
1546 self
.errors
= "utf-8"
1548 self
.errors
= "strict"
1550 if pax_headers
is not None and self
.format
== PAX_FORMAT
:
1551 self
.pax_headers
= pax_headers
1553 self
.pax_headers
= {}
1555 if debug
is not None:
1557 if errorlevel
is not None:
1558 self
.errorlevel
= errorlevel
1560 # Init datastructures.
1562 self
.members
= [] # list of members as TarInfo objects
1563 self
._loaded
= False # flag if all members have been read
1564 self
.offset
= self
.fileobj
.tell()
1565 # current position in the archive file
1566 self
.inodes
= {} # dictionary caching the inodes of
1567 # archive members already added
1570 if self
.mode
== "r":
1571 self
.firstmember
= None
1572 self
.firstmember
= self
.next()
1574 if self
.mode
== "a":
1575 # Move to the end of the archive,
1576 # before the first empty block.
1578 self
.fileobj
.seek(self
.offset
)
1580 tarinfo
= self
.tarinfo
.fromtarfile(self
)
1581 self
.members
.append(tarinfo
)
1582 except EOFHeaderError
:
1583 self
.fileobj
.seek(self
.offset
)
1585 except HeaderError
, e
:
1586 raise ReadError(str(e
))
1588 if self
.mode
in "aw":
1591 if self
.pax_headers
:
1592 buf
= self
.tarinfo
.create_pax_global_header(self
.pax_headers
.copy())
1593 self
.fileobj
.write(buf
)
1594 self
.offset
+= len(buf
)
1596 if not self
._extfileobj
:
1597 self
.fileobj
.close()
1601 def _getposix(self
):
1602 return self
.format
== USTAR_FORMAT
1603 def _setposix(self
, value
):
1605 warnings
.warn("use the format attribute instead", DeprecationWarning,
1608 self
.format
= USTAR_FORMAT
1610 self
.format
= GNU_FORMAT
1611 posix
= property(_getposix
, _setposix
)
1613 #--------------------------------------------------------------------------
1614 # Below are the classmethods which act as alternate constructors to the
1615 # TarFile class. The open() method is the only one that is needed for
1616 # public use; it is the "super"-constructor and is able to select an
1617 # adequate "sub"-constructor for a particular compression using the mapping
1620 # This concept allows one to subclass TarFile without losing the comfort of
1621 # the super-constructor. A sub-constructor is registered and made available
1622 # by adding it to the mapping in OPEN_METH.
1625 def open(cls
, name
=None, mode
="r", fileobj
=None, bufsize
=RECORDSIZE
, **kwargs
):
1626 """Open a tar archive for reading, writing or appending. Return
1627 an appropriate TarFile class.
1630 'r' or 'r:*' open for reading with transparent compression
1631 'r:' open for reading exclusively uncompressed
1632 'r:gz' open for reading with gzip compression
1633 'r:bz2' open for reading with bzip2 compression
1634 'a' or 'a:' open for appending, creating the file if necessary
1635 'w' or 'w:' open for writing without compression
1636 'w:gz' open for writing with gzip compression
1637 'w:bz2' open for writing with bzip2 compression
1639 'r|*' open a stream of tar blocks with transparent compression
1640 'r|' open an uncompressed stream of tar blocks for reading
1641 'r|gz' open a gzip compressed stream of tar blocks
1642 'r|bz2' open a bzip2 compressed stream of tar blocks
1643 'w|' open an uncompressed stream for writing
1644 'w|gz' open a gzip compressed stream for writing
1645 'w|bz2' open a bzip2 compressed stream for writing
1648 if not name
and not fileobj
:
1649 raise ValueError("nothing to open")
1651 if mode
in ("r", "r:*"):
1652 # Find out which *open() is appropriate for opening the file.
1653 for comptype
in cls
.OPEN_METH
:
1654 func
= getattr(cls
, cls
.OPEN_METH
[comptype
])
1655 if fileobj
is not None:
1656 saved_pos
= fileobj
.tell()
1658 return func(name
, "r", fileobj
, **kwargs
)
1659 except (ReadError
, CompressionError
), e
:
1660 if fileobj
is not None:
1661 fileobj
.seek(saved_pos
)
1663 raise ReadError("file could not be opened successfully")
1666 filemode
, comptype
= mode
.split(":", 1)
1667 filemode
= filemode
or "r"
1668 comptype
= comptype
or "tar"
1670 # Select the *open() function according to
1671 # given compression.
1672 if comptype
in cls
.OPEN_METH
:
1673 func
= getattr(cls
, cls
.OPEN_METH
[comptype
])
1675 raise CompressionError("unknown compression type %r" % comptype
)
1676 return func(name
, filemode
, fileobj
, **kwargs
)
1679 filemode
, comptype
= mode
.split("|", 1)
1680 filemode
= filemode
or "r"
1681 comptype
= comptype
or "tar"
1683 if filemode
not in "rw":
1684 raise ValueError("mode must be 'r' or 'w'")
1686 t
= cls(name
, filemode
,
1687 _Stream(name
, filemode
, comptype
, fileobj
, bufsize
),
1689 t
._extfileobj
= False
1693 return cls
.taropen(name
, mode
, fileobj
, **kwargs
)
1695 raise ValueError("undiscernible mode")
1698 def taropen(cls
, name
, mode
="r", fileobj
=None, **kwargs
):
1699 """Open uncompressed tar archive name for reading or writing.
1701 if len(mode
) > 1 or mode
not in "raw":
1702 raise ValueError("mode must be 'r', 'a' or 'w'")
1703 return cls(name
, mode
, fileobj
, **kwargs
)
1706 def gzopen(cls
, name
, mode
="r", fileobj
=None, compresslevel
=9, **kwargs
):
1707 """Open gzip compressed tar archive name for reading or writing.
1708 Appending is not allowed.
1710 if len(mode
) > 1 or mode
not in "rw":
1711 raise ValueError("mode must be 'r' or 'w'")
1716 except (ImportError, AttributeError):
1717 raise CompressionError("gzip module is not available")
1720 fileobj
= bltn_open(name
, mode
+ "b")
1723 t
= cls
.taropen(name
, mode
,
1724 gzip
.GzipFile(name
, mode
, compresslevel
, fileobj
),
1727 raise ReadError("not a gzip file")
1728 t
._extfileobj
= False
1732 def bz2open(cls
, name
, mode
="r", fileobj
=None, compresslevel
=9, **kwargs
):
1733 """Open bzip2 compressed tar archive name for reading or writing.
1734 Appending is not allowed.
1736 if len(mode
) > 1 or mode
not in "rw":
1737 raise ValueError("mode must be 'r' or 'w'.")
1742 raise CompressionError("bz2 module is not available")
1744 if fileobj
is not None:
1745 fileobj
= _BZ2Proxy(fileobj
, mode
)
1747 fileobj
= bz2
.BZ2File(name
, mode
, compresslevel
=compresslevel
)
1750 t
= cls
.taropen(name
, mode
, fileobj
, **kwargs
)
1751 except (IOError, EOFError):
1752 raise ReadError("not a bzip2 file")
1753 t
._extfileobj
= False
1756 # All *open() methods are registered here.
1758 "tar": "taropen", # uncompressed tar
1759 "gz": "gzopen", # gzip compressed tar
1760 "bz2": "bz2open" # bzip2 compressed tar
1763 #--------------------------------------------------------------------------
1764 # The public methods which TarFile provides:
1767 """Close the TarFile. In write-mode, two finishing zero blocks are
1768 appended to the archive.
1773 if self
.mode
in "aw":
1774 self
.fileobj
.write(NUL
* (BLOCKSIZE
* 2))
1775 self
.offset
+= (BLOCKSIZE
* 2)
1776 # fill up the end with zero-blocks
1777 # (like option -b20 for tar does)
1778 blocks
, remainder
= divmod(self
.offset
, RECORDSIZE
)
1780 self
.fileobj
.write(NUL
* (RECORDSIZE
- remainder
))
1782 if not self
._extfileobj
:
1783 self
.fileobj
.close()
1786 def getmember(self
, name
):
1787 """Return a TarInfo object for member `name'. If `name' can not be
1788 found in the archive, KeyError is raised. If a member occurs more
1789 than once in the archive, its last occurrence is assumed to be the
1790 most up-to-date version.
1792 tarinfo
= self
._getmember
(name
)
1794 raise KeyError("filename %r not found" % name
)
1797 def getmembers(self
):
1798 """Return the members of the archive as a list of TarInfo objects. The
1799 list has the same order as the members in the archive.
1802 if not self
._loaded
: # if we want to obtain a list of
1803 self
._load
() # all members, we first have to
1804 # scan the whole archive.
1808 """Return the members of the archive as a list of their names. It has
1809 the same order as the list returned by getmembers().
1811 return [tarinfo
.name
for tarinfo
in self
.getmembers()]
1813 def gettarinfo(self
, name
=None, arcname
=None, fileobj
=None):
1814 """Create a TarInfo object for either the file `name' or the file
1815 object `fileobj' (using os.fstat on its file descriptor). You can
1816 modify some of the TarInfo's attributes before you add it using
1817 addfile(). If given, `arcname' specifies an alternative name for the
1818 file in the archive.
1822 # When fileobj is given, replace name by
1823 # fileobj's real name.
1824 if fileobj
is not None:
1827 # Building the name of the member in the archive.
1828 # Backward slashes are converted to forward slashes,
1829 # Absolute paths are turned to relative paths.
1832 drv
, arcname
= os
.path
.splitdrive(arcname
)
1833 arcname
= arcname
.replace(os
.sep
, "/")
1834 arcname
= arcname
.lstrip("/")
1836 # Now, fill the TarInfo object with
1837 # information specific for the file.
1838 tarinfo
= self
.tarinfo()
1839 tarinfo
.tarfile
= self
1841 # Use os.stat or os.lstat, depending on platform
1842 # and if symlinks shall be resolved.
1844 if hasattr(os
, "lstat") and not self
.dereference
:
1845 statres
= os
.lstat(name
)
1847 statres
= os
.stat(name
)
1849 statres
= os
.fstat(fileobj
.fileno())
1852 stmd
= statres
.st_mode
1853 if stat
.S_ISREG(stmd
):
1854 inode
= (statres
.st_ino
, statres
.st_dev
)
1855 if not self
.dereference
and statres
.st_nlink
> 1 and \
1856 inode
in self
.inodes
and arcname
!= self
.inodes
[inode
]:
1857 # Is it a hardlink to an already
1860 linkname
= self
.inodes
[inode
]
1862 # The inode is added only if its valid.
1863 # For win32 it is always 0.
1866 self
.inodes
[inode
] = arcname
1867 elif stat
.S_ISDIR(stmd
):
1869 elif stat
.S_ISFIFO(stmd
):
1871 elif stat
.S_ISLNK(stmd
):
1873 linkname
= os
.readlink(name
)
1874 elif stat
.S_ISCHR(stmd
):
1876 elif stat
.S_ISBLK(stmd
):
1881 # Fill the TarInfo object with all
1882 # information we can get.
1883 tarinfo
.name
= arcname
1885 tarinfo
.uid
= statres
.st_uid
1886 tarinfo
.gid
= statres
.st_gid
1888 tarinfo
.size
= statres
.st_size
1891 tarinfo
.mtime
= statres
.st_mtime
1893 tarinfo
.linkname
= linkname
1896 tarinfo
.uname
= pwd
.getpwuid(tarinfo
.uid
)[0]
1901 tarinfo
.gname
= grp
.getgrgid(tarinfo
.gid
)[0]
1905 if type in (CHRTYPE
, BLKTYPE
):
1906 if hasattr(os
, "major") and hasattr(os
, "minor"):
1907 tarinfo
.devmajor
= os
.major(statres
.st_rdev
)
1908 tarinfo
.devminor
= os
.minor(statres
.st_rdev
)
1911 def list(self
, verbose
=True):
1912 """Print a table of contents to sys.stdout. If `verbose' is False, only
1913 the names of the members are printed. If it is True, an `ls -l'-like
1918 for tarinfo
in self
:
1920 print filemode(tarinfo
.mode
),
1921 print "%s/%s" % (tarinfo
.uname
or tarinfo
.uid
,
1922 tarinfo
.gname
or tarinfo
.gid
),
1923 if tarinfo
.ischr() or tarinfo
.isblk():
1924 print "%10s" % ("%d,%d" \
1925 % (tarinfo
.devmajor
, tarinfo
.devminor
)),
1927 print "%10d" % tarinfo
.size
,
1928 print "%d-%02d-%02d %02d:%02d:%02d" \
1929 % time
.localtime(tarinfo
.mtime
)[:6],
1931 print tarinfo
.name
+ ("/" if tarinfo
.isdir() else ""),
1935 print "->", tarinfo
.linkname
,
1937 print "link to", tarinfo
.linkname
,
1940 def add(self
, name
, arcname
=None, recursive
=True, exclude
=None, filter=None):
1941 """Add the file `name' to the archive. `name' may be any type of file
1942 (directory, fifo, symbolic link, etc.). If given, `arcname'
1943 specifies an alternative name for the file in the archive.
1944 Directories are added recursively by default. This can be avoided by
1945 setting `recursive' to False. `exclude' is a function that should
1946 return True for each filename to be excluded. `filter' is a function
1947 that expects a TarInfo object argument and returns the changed
1948 TarInfo object, if it returns None the TarInfo object will be
1949 excluded from the archive.
1956 # Exclude pathnames.
1957 if exclude
is not None:
1959 warnings
.warn("use the filter argument instead",
1960 DeprecationWarning, 2)
1962 self
._dbg
(2, "tarfile: Excluded %r" % name
)
1965 # Skip if somebody tries to archive the archive...
1966 if self
.name
is not None and os
.path
.abspath(name
) == self
.name
:
1967 self
._dbg
(2, "tarfile: Skipped %r" % name
)
1972 # Create a TarInfo object from the file.
1973 tarinfo
= self
.gettarinfo(name
, arcname
)
1976 self
._dbg
(1, "tarfile: Unsupported type %r" % name
)
1979 # Change or exclude the TarInfo object.
1980 if filter is not None:
1981 tarinfo
= filter(tarinfo
)
1983 self
._dbg
(2, "tarfile: Excluded %r" % name
)
1986 # Append the tar header and data to the archive.
1988 f
= bltn_open(name
, "rb")
1989 self
.addfile(tarinfo
, f
)
1992 elif tarinfo
.isdir():
1993 self
.addfile(tarinfo
)
1995 for f
in os
.listdir(name
):
1996 self
.add(os
.path
.join(name
, f
), os
.path
.join(arcname
, f
),
1997 recursive
, exclude
, filter)
2000 self
.addfile(tarinfo
)
2002 def addfile(self
, tarinfo
, fileobj
=None):
2003 """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
2004 given, tarinfo.size bytes are read from it and added to the archive.
2005 You can create TarInfo objects using gettarinfo().
2006 On Windows platforms, `fileobj' should always be opened with mode
2007 'rb' to avoid irritation about the file size.
2011 tarinfo
= copy
.copy(tarinfo
)
2013 buf
= tarinfo
.tobuf(self
.format
, self
.encoding
, self
.errors
)
2014 self
.fileobj
.write(buf
)
2015 self
.offset
+= len(buf
)
2017 # If there's data to follow, append it.
2018 if fileobj
is not None:
2019 copyfileobj(fileobj
, self
.fileobj
, tarinfo
.size
)
2020 blocks
, remainder
= divmod(tarinfo
.size
, BLOCKSIZE
)
2022 self
.fileobj
.write(NUL
* (BLOCKSIZE
- remainder
))
2024 self
.offset
+= blocks
* BLOCKSIZE
2026 self
.members
.append(tarinfo
)
2028 def extractall(self
, path
=".", members
=None):
2029 """Extract all members from the archive to the current working
2030 directory and set owner, modification time and permissions on
2031 directories afterwards. `path' specifies a different directory
2032 to extract to. `members' is optional and must be a subset of the
2033 list returned by getmembers().
2040 for tarinfo
in members
:
2042 # Extract directories with a safe mode.
2043 directories
.append(tarinfo
)
2044 tarinfo
= copy
.copy(tarinfo
)
2046 self
.extract(tarinfo
, path
)
2048 # Reverse sort directories.
2049 directories
.sort(key
=operator
.attrgetter('name'))
2050 directories
.reverse()
2052 # Set correct owner, mtime and filemode on directories.
2053 for tarinfo
in directories
:
2054 dirpath
= os
.path
.join(path
, tarinfo
.name
)
2056 self
.chown(tarinfo
, dirpath
)
2057 self
.utime(tarinfo
, dirpath
)
2058 self
.chmod(tarinfo
, dirpath
)
2059 except ExtractError
, e
:
2060 if self
.errorlevel
> 1:
2063 self
._dbg
(1, "tarfile: %s" % e
)
2065 def extract(self
, member
, path
=""):
2066 """Extract a member from the archive to the current working directory,
2067 using its full name. Its file information is extracted as accurately
2068 as possible. `member' may be a filename or a TarInfo object. You can
2069 specify a different directory using `path'.
2073 if isinstance(member
, basestring
):
2074 tarinfo
= self
.getmember(member
)
2078 # Prepare the link target for makelink().
2080 tarinfo
._link
_target
= os
.path
.join(path
, tarinfo
.linkname
)
2083 self
._extract
_member
(tarinfo
, os
.path
.join(path
, tarinfo
.name
))
2084 except EnvironmentError, e
:
2085 if self
.errorlevel
> 0:
2088 if e
.filename
is None:
2089 self
._dbg
(1, "tarfile: %s" % e
.strerror
)
2091 self
._dbg
(1, "tarfile: %s %r" % (e
.strerror
, e
.filename
))
2092 except ExtractError
, e
:
2093 if self
.errorlevel
> 1:
2096 self
._dbg
(1, "tarfile: %s" % e
)
2098 def extractfile(self
, member
):
2099 """Extract a member from the archive as a file object. `member' may be
2100 a filename or a TarInfo object. If `member' is a regular file, a
2101 file-like object is returned. If `member' is a link, a file-like
2102 object is constructed from the link's target. If `member' is none of
2103 the above, None is returned.
2104 The file-like object is read-only and provides the following
2105 methods: read(), readline(), readlines(), seek() and tell()
2109 if isinstance(member
, basestring
):
2110 tarinfo
= self
.getmember(member
)
2115 return self
.fileobject(self
, tarinfo
)
2117 elif tarinfo
.type not in SUPPORTED_TYPES
:
2118 # If a member's type is unknown, it is treated as a
2120 return self
.fileobject(self
, tarinfo
)
2122 elif tarinfo
.islnk() or tarinfo
.issym():
2123 if isinstance(self
.fileobj
, _Stream
):
2124 # A small but ugly workaround for the case that someone tries
2125 # to extract a (sym)link as a file-object from a non-seekable
2126 # stream of tar blocks.
2127 raise StreamError("cannot extract (sym)link as file object")
2129 # A (sym)link's file object is its target's file object.
2130 return self
.extractfile(self
._find
_link
_target
(tarinfo
))
2132 # If there's no data associated with the member (directory, chrdev,
2133 # blkdev, etc.), return None instead of a file object.
2136 def _extract_member(self
, tarinfo
, targetpath
):
2137 """Extract the TarInfo object tarinfo to a physical
2138 file called targetpath.
2140 # Fetch the TarInfo object for the given name
2141 # and build the destination pathname, replacing
2142 # forward slashes to platform specific separators.
2143 targetpath
= targetpath
.rstrip("/")
2144 targetpath
= targetpath
.replace("/", os
.sep
)
2146 # Create all upper directories.
2147 upperdirs
= os
.path
.dirname(targetpath
)
2148 if upperdirs
and not os
.path
.exists(upperdirs
):
2149 # Create directories that are not part of the archive with
2150 # default permissions.
2151 os
.makedirs(upperdirs
)
2153 if tarinfo
.islnk() or tarinfo
.issym():
2154 self
._dbg
(1, "%s -> %s" % (tarinfo
.name
, tarinfo
.linkname
))
2156 self
._dbg
(1, tarinfo
.name
)
2159 self
.makefile(tarinfo
, targetpath
)
2160 elif tarinfo
.isdir():
2161 self
.makedir(tarinfo
, targetpath
)
2162 elif tarinfo
.isfifo():
2163 self
.makefifo(tarinfo
, targetpath
)
2164 elif tarinfo
.ischr() or tarinfo
.isblk():
2165 self
.makedev(tarinfo
, targetpath
)
2166 elif tarinfo
.islnk() or tarinfo
.issym():
2167 self
.makelink(tarinfo
, targetpath
)
2168 elif tarinfo
.type not in SUPPORTED_TYPES
:
2169 self
.makeunknown(tarinfo
, targetpath
)
2171 self
.makefile(tarinfo
, targetpath
)
2173 self
.chown(tarinfo
, targetpath
)
2174 if not tarinfo
.issym():
2175 self
.chmod(tarinfo
, targetpath
)
2176 self
.utime(tarinfo
, targetpath
)
2178 #--------------------------------------------------------------------------
2179 # Below are the different file methods. They are called via
2180 # _extract_member() when extract() is called. They can be replaced in a
2181 # subclass to implement other functionality.
2183 def makedir(self
, tarinfo
, targetpath
):
2184 """Make a directory called targetpath.
2187 # Use a safe mode for the directory, the real mode is set
2188 # later in _extract_member().
2189 os
.mkdir(targetpath
, 0700)
2190 except EnvironmentError, e
:
2191 if e
.errno
!= errno
.EEXIST
:
2194 def makefile(self
, tarinfo
, targetpath
):
2195 """Make a file called targetpath.
2197 source
= self
.extractfile(tarinfo
)
2198 target
= bltn_open(targetpath
, "wb")
2199 copyfileobj(source
, target
)
2203 def makeunknown(self
, tarinfo
, targetpath
):
2204 """Make a file from a TarInfo object with an unknown type
2207 self
.makefile(tarinfo
, targetpath
)
2208 self
._dbg
(1, "tarfile: Unknown file type %r, " \
2209 "extracted as regular file." % tarinfo
.type)
2211 def makefifo(self
, tarinfo
, targetpath
):
2212 """Make a fifo called targetpath.
2214 if hasattr(os
, "mkfifo"):
2215 os
.mkfifo(targetpath
)
2217 raise ExtractError("fifo not supported by system")
2219 def makedev(self
, tarinfo
, targetpath
):
2220 """Make a character or block device called targetpath.
2222 if not hasattr(os
, "mknod") or not hasattr(os
, "makedev"):
2223 raise ExtractError("special devices not supported by system")
2227 mode |
= stat
.S_IFBLK
2229 mode |
= stat
.S_IFCHR
2231 os
.mknod(targetpath
, mode
,
2232 os
.makedev(tarinfo
.devmajor
, tarinfo
.devminor
))
2234 def makelink(self
, tarinfo
, targetpath
):
2235 """Make a (symbolic) link called targetpath. If it cannot be created
2236 (platform limitation), we try to make a copy of the referenced file
2239 if hasattr(os
, "symlink") and hasattr(os
, "link"):
2240 # For systems that support symbolic and hard links.
2242 os
.symlink(tarinfo
.linkname
, targetpath
)
2245 if os
.path
.exists(tarinfo
._link
_target
):
2246 os
.link(tarinfo
._link
_target
, targetpath
)
2248 self
._extract
_member
(self
._find
_link
_target
(tarinfo
), targetpath
)
2251 self
._extract
_member
(self
._find
_link
_target
(tarinfo
), targetpath
)
2253 raise ExtractError("unable to resolve link inside archive")
2255 def chown(self
, tarinfo
, targetpath
):
2256 """Set owner of targetpath according to tarinfo.
2258 if pwd
and hasattr(os
, "geteuid") and os
.geteuid() == 0:
2259 # We have to be root to do so.
2261 g
= grp
.getgrnam(tarinfo
.gname
)[2]
2264 g
= grp
.getgrgid(tarinfo
.gid
)[2]
2268 u
= pwd
.getpwnam(tarinfo
.uname
)[2]
2271 u
= pwd
.getpwuid(tarinfo
.uid
)[2]
2275 if tarinfo
.issym() and hasattr(os
, "lchown"):
2276 os
.lchown(targetpath
, u
, g
)
2278 if sys
.platform
!= "os2emx":
2279 os
.chown(targetpath
, u
, g
)
2280 except EnvironmentError, e
:
2281 raise ExtractError("could not change owner")
2283 def chmod(self
, tarinfo
, targetpath
):
2284 """Set file permissions of targetpath according to tarinfo.
2286 if hasattr(os
, 'chmod'):
2288 os
.chmod(targetpath
, tarinfo
.mode
)
2289 except EnvironmentError, e
:
2290 raise ExtractError("could not change mode")
2292 def utime(self
, tarinfo
, targetpath
):
2293 """Set modification time of targetpath according to tarinfo.
2295 if not hasattr(os
, 'utime'):
2298 os
.utime(targetpath
, (tarinfo
.mtime
, tarinfo
.mtime
))
2299 except EnvironmentError, e
:
2300 raise ExtractError("could not change modification time")
2302 #--------------------------------------------------------------------------
2304 """Return the next member of the archive as a TarInfo object, when
2305 TarFile is opened for reading. Return None if there is no more
2309 if self
.firstmember
is not None:
2310 m
= self
.firstmember
2311 self
.firstmember
= None
2314 # Read the next block.
2315 self
.fileobj
.seek(self
.offset
)
2319 tarinfo
= self
.tarinfo
.fromtarfile(self
)
2320 except EOFHeaderError
, e
:
2321 if self
.ignore_zeros
:
2322 self
._dbg
(2, "0x%X: %s" % (self
.offset
, e
))
2323 self
.offset
+= BLOCKSIZE
2325 except InvalidHeaderError
, e
:
2326 if self
.ignore_zeros
:
2327 self
._dbg
(2, "0x%X: %s" % (self
.offset
, e
))
2328 self
.offset
+= BLOCKSIZE
2330 elif self
.offset
== 0:
2331 raise ReadError(str(e
))
2332 except EmptyHeaderError
:
2333 if self
.offset
== 0:
2334 raise ReadError("empty file")
2335 except TruncatedHeaderError
, e
:
2336 if self
.offset
== 0:
2337 raise ReadError(str(e
))
2338 except SubsequentHeaderError
, e
:
2339 raise ReadError(str(e
))
2342 if tarinfo
is not None:
2343 self
.members
.append(tarinfo
)
2349 #--------------------------------------------------------------------------
2350 # Little helper methods:
2352 def _getmember(self
, name
, tarinfo
=None, normalize
=False):
2353 """Find an archive member by name from bottom to top.
2354 If tarinfo is given, it is used as the starting point.
2356 # Ensure that all members have been loaded.
2357 members
= self
.getmembers()
2359 # Limit the member search list up to tarinfo.
2360 if tarinfo
is not None:
2361 members
= members
[:members
.index(tarinfo
)]
2364 name
= os
.path
.normpath(name
)
2366 for member
in reversed(members
):
2368 member_name
= os
.path
.normpath(member
.name
)
2370 member_name
= member
.name
2372 if name
== member_name
:
2376 """Read through the entire archive file and look for readable
2380 tarinfo
= self
.next()
2385 def _check(self
, mode
=None):
2386 """Check if TarFile is still open, and if the operation's mode
2387 corresponds to TarFile's mode.
2390 raise IOError("%s is closed" % self
.__class
__.__name
__)
2391 if mode
is not None and self
.mode
not in mode
:
2392 raise IOError("bad operation for mode %r" % self
.mode
)
2394 def _find_link_target(self
, tarinfo
):
2395 """Find the target member of a symlink or hardlink member in the
2399 # Always search the entire archive.
2400 linkname
= os
.path
.dirname(tarinfo
.name
) + "/" + tarinfo
.linkname
2403 # Search the archive before the link, because a hard link is
2404 # just a reference to an already archived file.
2405 linkname
= tarinfo
.linkname
2408 member
= self
._getmember
(linkname
, tarinfo
=limit
, normalize
=True)
2410 raise KeyError("linkname %r not found" % linkname
)
2414 """Provide an iterator object.
2417 return iter(self
.members
)
2419 return TarIter(self
)
2421 def _dbg(self
, level
, msg
):
2422 """Write debugging output to sys.stderr.
2424 if level
<= self
.debug
:
2425 print >> sys
.stderr
, msg
2427 def __enter__(self
):
2431 def __exit__(self
, type, value
, traceback
):
2435 # An exception occurred. We must not call close() because
2436 # it would try to write end-of-archive blocks and padding.
2437 if not self
._extfileobj
:
2438 self
.fileobj
.close()
2445 for tarinfo in TarFile(...):
2449 def __init__(self
, tarfile
):
2450 """Construct a TarIter object.
2452 self
.tarfile
= tarfile
2455 """Return iterator object.
2459 """Return the next item using TarFile's next() method.
2460 When all members have been read, set TarFile as _loaded.
2462 # Fix for SF #1100429: Under rare circumstances it can
2463 # happen that getmembers() is called during iteration,
2464 # which will cause TarIter to stop prematurely.
2465 if not self
.tarfile
._loaded
:
2466 tarinfo
= self
.tarfile
.next()
2468 self
.tarfile
._loaded
= True
2472 tarinfo
= self
.tarfile
.members
[self
.index
]
2478 # Helper classes for sparse file support
2480 """Base class for _data and _hole.
2482 def __init__(self
, offset
, size
):
2483 self
.offset
= offset
2485 def __contains__(self
, offset
):
2486 return self
.offset
<= offset
< self
.offset
+ self
.size
2488 class _data(_section
):
2489 """Represent a data section in a sparse file.
2491 def __init__(self
, offset
, size
, realpos
):
2492 _section
.__init
__(self
, offset
, size
)
2493 self
.realpos
= realpos
2495 class _hole(_section
):
2496 """Represent a hole section in a sparse file.
2500 class _ringbuffer(list):
2501 """Ringbuffer class which increases performance
2502 over a regular list.
2506 def find(self
, offset
):
2513 if idx
== len(self
):
2521 #---------------------------------------------
2522 # zipfile compatible TarFile class
2523 #---------------------------------------------
2524 TAR_PLAIN
= 0 # zipfile.ZIP_STORED
2525 TAR_GZIPPED
= 8 # zipfile.ZIP_DEFLATED
2526 class TarFileCompat
:
2527 """TarFile class compatible with standard module zipfile's
2530 def __init__(self
, file, mode
="r", compression
=TAR_PLAIN
):
2531 from warnings
import warnpy3k
2532 warnpy3k("the TarFileCompat class has been removed in Python 3.0",
2534 if compression
== TAR_PLAIN
:
2535 self
.tarfile
= TarFile
.taropen(file, mode
)
2536 elif compression
== TAR_GZIPPED
:
2537 self
.tarfile
= TarFile
.gzopen(file, mode
)
2539 raise ValueError("unknown compression constant")
2540 if mode
[0:1] == "r":
2541 members
= self
.tarfile
.getmembers()
2544 m
.file_size
= m
.size
2545 m
.date_time
= time
.gmtime(m
.mtime
)[:6]
2547 return map(lambda m
: m
.name
, self
.infolist())
2549 return filter(lambda m
: m
.type in REGULAR_TYPES
,
2550 self
.tarfile
.getmembers())
2555 def getinfo(self
, name
):
2556 return self
.tarfile
.getmember(name
)
2557 def read(self
, name
):
2558 return self
.tarfile
.extractfile(self
.tarfile
.getmember(name
)).read()
2559 def write(self
, filename
, arcname
=None, compress_type
=None):
2560 self
.tarfile
.add(filename
, arcname
)
2561 def writestr(self
, zinfo
, bytes
):
2563 from cStringIO
import StringIO
2565 from StringIO
import StringIO
2567 tinfo
= TarInfo(zinfo
.filename
)
2568 tinfo
.size
= len(bytes
)
2569 tinfo
.mtime
= calendar
.timegm(zinfo
.date_time
)
2570 self
.tarfile
.addfile(tinfo
, StringIO(bytes
))
2572 self
.tarfile
.close()
2573 #class TarFileCompat
2575 #--------------------
2576 # exported functions
2577 #--------------------
2578 def is_tarfile(name
):
2579 """Return True if name points to a tar archive that we
2580 are able to handle, else return False.