PageRenderTime 59ms CodeModel.GetById 18ms RepoModel.GetById 1ms app.codeStats 0ms

/lib-python/2.7/tarfile.py

https://bitbucket.org/kkris/pypy
Python | 2587 lines | 2481 code | 28 blank | 78 comment | 23 complexity | 28e2e56fb878b91e9f999f8f7a36afe6 MD5 | raw file

Large files files are truncated, but you can click here to view the full file

  1. #!/usr/bin/env python
  2. # -*- coding: iso-8859-1 -*-
  3. #-------------------------------------------------------------------
  4. # tarfile.py
  5. #-------------------------------------------------------------------
  6. # Copyright (C) 2002 Lars Gustäbel <lars@gustaebel.de>
  7. # All rights reserved.
  8. #
  9. # Permission is hereby granted, free of charge, to any person
  10. # obtaining a copy of this software and associated documentation
  11. # files (the "Software"), to deal in the Software without
  12. # restriction, including without limitation the rights to use,
  13. # copy, modify, merge, publish, distribute, sublicense, and/or sell
  14. # copies of the Software, and to permit persons to whom the
  15. # Software is furnished to do so, subject to the following
  16. # conditions:
  17. #
  18. # The above copyright notice and this permission notice shall be
  19. # included in all copies or substantial portions of the Software.
  20. #
  21. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  22. # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
  23. # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  24. # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
  25. # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  26. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  27. # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  28. # OTHER DEALINGS IN THE SOFTWARE.
  29. #
  30. """Read from and write to tar format archives.
  31. """
  32. __version__ = "$Revision: 85213 $"
  33. # $Source$
  34. version = "0.9.0"
  35. __author__ = "Lars Gustäbel (lars@gustaebel.de)"
  36. __date__ = "$Date$"
  37. __cvsid__ = "$Id$"
  38. __credits__ = "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
  39. #---------
  40. # Imports
  41. #---------
  42. import sys
  43. import os
  44. import shutil
  45. import stat
  46. import errno
  47. import time
  48. import struct
  49. import copy
  50. import re
  51. import operator
  52. try:
  53. import grp, pwd
  54. except ImportError:
  55. grp = pwd = None
  56. # from tarfile import *
  57. __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
  58. #---------------------------------------------------------
  59. # tar constants
  60. #---------------------------------------------------------
  61. NUL = "\0" # the null character
  62. BLOCKSIZE = 512 # length of processing blocks
  63. RECORDSIZE = BLOCKSIZE * 20 # length of records
  64. GNU_MAGIC = "ustar \0" # magic gnu tar string
  65. POSIX_MAGIC = "ustar\x0000" # magic posix tar string
  66. LENGTH_NAME = 100 # maximum length of a filename
  67. LENGTH_LINK = 100 # maximum length of a linkname
  68. LENGTH_PREFIX = 155 # maximum length of the prefix field
  69. REGTYPE = "0" # regular file
  70. AREGTYPE = "\0" # regular file
  71. LNKTYPE = "1" # link (inside tarfile)
  72. SYMTYPE = "2" # symbolic link
  73. CHRTYPE = "3" # character special device
  74. BLKTYPE = "4" # block special device
  75. DIRTYPE = "5" # directory
  76. FIFOTYPE = "6" # fifo special device
  77. CONTTYPE = "7" # contiguous file
  78. GNUTYPE_LONGNAME = "L" # GNU tar longname
  79. GNUTYPE_LONGLINK = "K" # GNU tar longlink
  80. GNUTYPE_SPARSE = "S" # GNU tar sparse file
  81. XHDTYPE = "x" # POSIX.1-2001 extended header
  82. XGLTYPE = "g" # POSIX.1-2001 global header
  83. SOLARIS_XHDTYPE = "X" # Solaris extended header
  84. USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
  85. GNU_FORMAT = 1 # GNU tar format
  86. PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
  87. DEFAULT_FORMAT = GNU_FORMAT
  88. #---------------------------------------------------------
  89. # tarfile constants
  90. #---------------------------------------------------------
  91. # File types that tarfile supports:
  92. SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
  93. SYMTYPE, DIRTYPE, FIFOTYPE,
  94. CONTTYPE, CHRTYPE, BLKTYPE,
  95. GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  96. GNUTYPE_SPARSE)
  97. # File types that will be treated as a regular file.
  98. REGULAR_TYPES = (REGTYPE, AREGTYPE,
  99. CONTTYPE, GNUTYPE_SPARSE)
  100. # File types that are part of the GNU tar format.
  101. GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  102. GNUTYPE_SPARSE)
  103. # Fields from a pax header that override a TarInfo attribute.
  104. PAX_FIELDS = ("path", "linkpath", "size", "mtime",
  105. "uid", "gid", "uname", "gname")
  106. # Fields in a pax header that are numbers, all other fields
  107. # are treated as strings.
  108. PAX_NUMBER_FIELDS = {
  109. "atime": float,
  110. "ctime": float,
  111. "mtime": float,
  112. "uid": int,
  113. "gid": int,
  114. "size": int
  115. }
  116. #---------------------------------------------------------
  117. # Bits used in the mode field, values in octal.
  118. #---------------------------------------------------------
  119. S_IFLNK = 0120000 # symbolic link
  120. S_IFREG = 0100000 # regular file
  121. S_IFBLK = 0060000 # block device
  122. S_IFDIR = 0040000 # directory
  123. S_IFCHR = 0020000 # character device
  124. S_IFIFO = 0010000 # fifo
  125. TSUID = 04000 # set UID on execution
  126. TSGID = 02000 # set GID on execution
  127. TSVTX = 01000 # reserved
  128. TUREAD = 0400 # read by owner
  129. TUWRITE = 0200 # write by owner
  130. TUEXEC = 0100 # execute/search by owner
  131. TGREAD = 0040 # read by group
  132. TGWRITE = 0020 # write by group
  133. TGEXEC = 0010 # execute/search by group
  134. TOREAD = 0004 # read by other
  135. TOWRITE = 0002 # write by other
  136. TOEXEC = 0001 # execute/search by other
  137. #---------------------------------------------------------
  138. # initialization
  139. #---------------------------------------------------------
  140. ENCODING = sys.getfilesystemencoding()
  141. if ENCODING is None:
  142. ENCODING = sys.getdefaultencoding()
  143. #---------------------------------------------------------
  144. # Some useful functions
  145. #---------------------------------------------------------
  146. def stn(s, length):
  147. """Convert a python string to a null-terminated string buffer.
  148. """
  149. return s[:length] + (length - len(s)) * NUL
  150. def nts(s):
  151. """Convert a null-terminated string field to a python string.
  152. """
  153. # Use the string up to the first null char.
  154. p = s.find("\0")
  155. if p == -1:
  156. return s
  157. return s[:p]
  158. def nti(s):
  159. """Convert a number field to a python number.
  160. """
  161. # There are two possible encodings for a number field, see
  162. # itn() below.
  163. if s[0] != chr(0200):
  164. try:
  165. n = int(nts(s) or "0", 8)
  166. except ValueError:
  167. raise InvalidHeaderError("invalid header")
  168. else:
  169. n = 0L
  170. for i in xrange(len(s) - 1):
  171. n <<= 8
  172. n += ord(s[i + 1])
  173. return n
  174. def itn(n, digits=8, format=DEFAULT_FORMAT):
  175. """Convert a python number to a number field.
  176. """
  177. # POSIX 1003.1-1988 requires numbers to be encoded as a string of
  178. # octal digits followed by a null-byte, this allows values up to
  179. # (8**(digits-1))-1. GNU tar allows storing numbers greater than
  180. # that if necessary. A leading 0200 byte indicates this particular
  181. # encoding, the following digits-1 bytes are a big-endian
  182. # representation. This allows values up to (256**(digits-1))-1.
  183. if 0 <= n < 8 ** (digits - 1):
  184. s = "%0*o" % (digits - 1, n) + NUL
  185. else:
  186. if format != GNU_FORMAT or n >= 256 ** (digits - 1):
  187. raise ValueError("overflow in number field")
  188. if n < 0:
  189. # XXX We mimic GNU tar's behaviour with negative numbers,
  190. # this could raise OverflowError.
  191. n = struct.unpack("L", struct.pack("l", n))[0]
  192. s = ""
  193. for i in xrange(digits - 1):
  194. s = chr(n & 0377) + s
  195. n >>= 8
  196. s = chr(0200) + s
  197. return s
  198. def uts(s, encoding, errors):
  199. """Convert a unicode object to a string.
  200. """
  201. if errors == "utf-8":
  202. # An extra error handler similar to the -o invalid=UTF-8 option
  203. # in POSIX.1-2001. Replace untranslatable characters with their
  204. # UTF-8 representation.
  205. try:
  206. return s.encode(encoding, "strict")
  207. except UnicodeEncodeError:
  208. x = []
  209. for c in s:
  210. try:
  211. x.append(c.encode(encoding, "strict"))
  212. except UnicodeEncodeError:
  213. x.append(c.encode("utf8"))
  214. return "".join(x)
  215. else:
  216. return s.encode(encoding, errors)
  217. def calc_chksums(buf):
  218. """Calculate the checksum for a member's header by summing up all
  219. characters except for the chksum field which is treated as if
  220. it was filled with spaces. According to the GNU tar sources,
  221. some tars (Sun and NeXT) calculate chksum with signed char,
  222. which will be different if there are chars in the buffer with
  223. the high bit set. So we calculate two checksums, unsigned and
  224. signed.
  225. """
  226. unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
  227. signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
  228. return unsigned_chksum, signed_chksum
  229. def copyfileobj(src, dst, length=None):
  230. """Copy length bytes from fileobj src to fileobj dst.
  231. If length is None, copy the entire content.
  232. """
  233. if length == 0:
  234. return
  235. if length is None:
  236. shutil.copyfileobj(src, dst)
  237. return
  238. BUFSIZE = 16 * 1024
  239. blocks, remainder = divmod(length, BUFSIZE)
  240. for b in xrange(blocks):
  241. buf = src.read(BUFSIZE)
  242. if len(buf) < BUFSIZE:
  243. raise IOError("end of file reached")
  244. dst.write(buf)
  245. if remainder != 0:
  246. buf = src.read(remainder)
  247. if len(buf) < remainder:
  248. raise IOError("end of file reached")
  249. dst.write(buf)
  250. return
  251. filemode_table = (
  252. ((S_IFLNK, "l"),
  253. (S_IFREG, "-"),
  254. (S_IFBLK, "b"),
  255. (S_IFDIR, "d"),
  256. (S_IFCHR, "c"),
  257. (S_IFIFO, "p")),
  258. ((TUREAD, "r"),),
  259. ((TUWRITE, "w"),),
  260. ((TUEXEC|TSUID, "s"),
  261. (TSUID, "S"),
  262. (TUEXEC, "x")),
  263. ((TGREAD, "r"),),
  264. ((TGWRITE, "w"),),
  265. ((TGEXEC|TSGID, "s"),
  266. (TSGID, "S"),
  267. (TGEXEC, "x")),
  268. ((TOREAD, "r"),),
  269. ((TOWRITE, "w"),),
  270. ((TOEXEC|TSVTX, "t"),
  271. (TSVTX, "T"),
  272. (TOEXEC, "x"))
  273. )
  274. def filemode(mode):
  275. """Convert a file's mode to a string of the form
  276. -rwxrwxrwx.
  277. Used by TarFile.list()
  278. """
  279. perm = []
  280. for table in filemode_table:
  281. for bit, char in table:
  282. if mode & bit == bit:
  283. perm.append(char)
  284. break
  285. else:
  286. perm.append("-")
  287. return "".join(perm)
  288. class TarError(Exception):
  289. """Base exception."""
  290. pass
  291. class ExtractError(TarError):
  292. """General exception for extract errors."""
  293. pass
  294. class ReadError(TarError):
  295. """Exception for unreadble tar archives."""
  296. pass
  297. class CompressionError(TarError):
  298. """Exception for unavailable compression methods."""
  299. pass
  300. class StreamError(TarError):
  301. """Exception for unsupported operations on stream-like TarFiles."""
  302. pass
  303. class HeaderError(TarError):
  304. """Base exception for header errors."""
  305. pass
  306. class EmptyHeaderError(HeaderError):
  307. """Exception for empty headers."""
  308. pass
  309. class TruncatedHeaderError(HeaderError):
  310. """Exception for truncated headers."""
  311. pass
  312. class EOFHeaderError(HeaderError):
  313. """Exception for end of file headers."""
  314. pass
  315. class InvalidHeaderError(HeaderError):
  316. """Exception for invalid headers."""
  317. pass
  318. class SubsequentHeaderError(HeaderError):
  319. """Exception for missing and invalid extended headers."""
  320. pass
  321. #---------------------------
  322. # internal stream interface
  323. #---------------------------
  324. class _LowLevelFile:
  325. """Low-level file object. Supports reading and writing.
  326. It is used instead of a regular file object for streaming
  327. access.
  328. """
  329. def __init__(self, name, mode):
  330. mode = {
  331. "r": os.O_RDONLY,
  332. "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
  333. }[mode]
  334. if hasattr(os, "O_BINARY"):
  335. mode |= os.O_BINARY
  336. self.fd = os.open(name, mode, 0666)
  337. def close(self):
  338. os.close(self.fd)
  339. def read(self, size):
  340. return os.read(self.fd, size)
  341. def write(self, s):
  342. os.write(self.fd, s)
  343. class _Stream:
  344. """Class that serves as an adapter between TarFile and
  345. a stream-like object. The stream-like object only
  346. needs to have a read() or write() method and is accessed
  347. blockwise. Use of gzip or bzip2 compression is possible.
  348. A stream-like object could be for example: sys.stdin,
  349. sys.stdout, a socket, a tape device etc.
  350. _Stream is intended to be used only internally.
  351. """
  352. def __init__(self, name, mode, comptype, fileobj, bufsize):
  353. """Construct a _Stream object.
  354. """
  355. self._extfileobj = True
  356. if fileobj is None:
  357. fileobj = _LowLevelFile(name, mode)
  358. self._extfileobj = False
  359. if comptype == '*':
  360. # Enable transparent compression detection for the
  361. # stream interface
  362. fileobj = _StreamProxy(fileobj)
  363. comptype = fileobj.getcomptype()
  364. self.name = name or ""
  365. self.mode = mode
  366. self.comptype = comptype
  367. self.fileobj = fileobj
  368. self.bufsize = bufsize
  369. self.buf = ""
  370. self.pos = 0L
  371. self.closed = False
  372. if comptype == "gz":
  373. try:
  374. import zlib
  375. except ImportError:
  376. raise CompressionError("zlib module is not available")
  377. self.zlib = zlib
  378. self.crc = zlib.crc32("") & 0xffffffffL
  379. if mode == "r":
  380. self._init_read_gz()
  381. else:
  382. self._init_write_gz()
  383. if comptype == "bz2":
  384. try:
  385. import bz2
  386. except ImportError:
  387. raise CompressionError("bz2 module is not available")
  388. if mode == "r":
  389. self.dbuf = ""
  390. self.cmp = bz2.BZ2Decompressor()
  391. else:
  392. self.cmp = bz2.BZ2Compressor()
  393. def __del__(self):
  394. if hasattr(self, "closed") and not self.closed:
  395. self.close()
  396. def _init_write_gz(self):
  397. """Initialize for writing with gzip compression.
  398. """
  399. self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
  400. -self.zlib.MAX_WBITS,
  401. self.zlib.DEF_MEM_LEVEL,
  402. 0)
  403. timestamp = struct.pack("<L", long(time.time()))
  404. self.__write("\037\213\010\010%s\002\377" % timestamp)
  405. if type(self.name) is unicode:
  406. self.name = self.name.encode("iso-8859-1", "replace")
  407. if self.name.endswith(".gz"):
  408. self.name = self.name[:-3]
  409. self.__write(self.name + NUL)
  410. def write(self, s):
  411. """Write string s to the stream.
  412. """
  413. if self.comptype == "gz":
  414. self.crc = self.zlib.crc32(s, self.crc) & 0xffffffffL
  415. self.pos += len(s)
  416. if self.comptype != "tar":
  417. s = self.cmp.compress(s)
  418. self.__write(s)
  419. def __write(self, s):
  420. """Write string s to the stream if a whole new block
  421. is ready to be written.
  422. """
  423. self.buf += s
  424. while len(self.buf) > self.bufsize:
  425. self.fileobj.write(self.buf[:self.bufsize])
  426. self.buf = self.buf[self.bufsize:]
  427. def close(self):
  428. """Close the _Stream object. No operation should be
  429. done on it afterwards.
  430. """
  431. if self.closed:
  432. return
  433. if self.mode == "w" and self.comptype != "tar":
  434. self.buf += self.cmp.flush()
  435. if self.mode == "w" and self.buf:
  436. self.fileobj.write(self.buf)
  437. self.buf = ""
  438. if self.comptype == "gz":
  439. # The native zlib crc is an unsigned 32-bit integer, but
  440. # the Python wrapper implicitly casts that to a signed C
  441. # long. So, on a 32-bit box self.crc may "look negative",
  442. # while the same crc on a 64-bit box may "look positive".
  443. # To avoid irksome warnings from the `struct` module, force
  444. # it to look positive on all boxes.
  445. self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
  446. self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
  447. if not self._extfileobj:
  448. self.fileobj.close()
  449. self.closed = True
  450. def _init_read_gz(self):
  451. """Initialize for reading a gzip compressed fileobj.
  452. """
  453. self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
  454. self.dbuf = ""
  455. # taken from gzip.GzipFile with some alterations
  456. if self.__read(2) != "\037\213":
  457. raise ReadError("not a gzip file")
  458. if self.__read(1) != "\010":
  459. raise CompressionError("unsupported compression method")
  460. flag = ord(self.__read(1))
  461. self.__read(6)
  462. if flag & 4:
  463. xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
  464. self.read(xlen)
  465. if flag & 8:
  466. while True:
  467. s = self.__read(1)
  468. if not s or s == NUL:
  469. break
  470. if flag & 16:
  471. while True:
  472. s = self.__read(1)
  473. if not s or s == NUL:
  474. break
  475. if flag & 2:
  476. self.__read(2)
  477. def tell(self):
  478. """Return the stream's file pointer position.
  479. """
  480. return self.pos
  481. def seek(self, pos=0):
  482. """Set the stream's file pointer to pos. Negative seeking
  483. is forbidden.
  484. """
  485. if pos - self.pos >= 0:
  486. blocks, remainder = divmod(pos - self.pos, self.bufsize)
  487. for i in xrange(blocks):
  488. self.read(self.bufsize)
  489. self.read(remainder)
  490. else:
  491. raise StreamError("seeking backwards is not allowed")
  492. return self.pos
  493. def read(self, size=None):
  494. """Return the next size number of bytes from the stream.
  495. If size is not defined, return all bytes of the stream
  496. up to EOF.
  497. """
  498. if size is None:
  499. t = []
  500. while True:
  501. buf = self._read(self.bufsize)
  502. if not buf:
  503. break
  504. t.append(buf)
  505. buf = "".join(t)
  506. else:
  507. buf = self._read(size)
  508. self.pos += len(buf)
  509. return buf
  510. def _read(self, size):
  511. """Return size bytes from the stream.
  512. """
  513. if self.comptype == "tar":
  514. return self.__read(size)
  515. c = len(self.dbuf)
  516. t = [self.dbuf]
  517. while c < size:
  518. buf = self.__read(self.bufsize)
  519. if not buf:
  520. break
  521. try:
  522. buf = self.cmp.decompress(buf)
  523. except IOError:
  524. raise ReadError("invalid compressed data")
  525. t.append(buf)
  526. c += len(buf)
  527. t = "".join(t)
  528. self.dbuf = t[size:]
  529. return t[:size]
  530. def __read(self, size):
  531. """Return size bytes from stream. If internal buffer is empty,
  532. read another block from the stream.
  533. """
  534. c = len(self.buf)
  535. t = [self.buf]
  536. while c < size:
  537. buf = self.fileobj.read(self.bufsize)
  538. if not buf:
  539. break
  540. t.append(buf)
  541. c += len(buf)
  542. t = "".join(t)
  543. self.buf = t[size:]
  544. return t[:size]
  545. # class _Stream
  546. class _StreamProxy(object):
  547. """Small proxy class that enables transparent compression
  548. detection for the Stream interface (mode 'r|*').
  549. """
  550. def __init__(self, fileobj):
  551. self.fileobj = fileobj
  552. self.buf = self.fileobj.read(BLOCKSIZE)
  553. def read(self, size):
  554. self.read = self.fileobj.read
  555. return self.buf
  556. def getcomptype(self):
  557. if self.buf.startswith("\037\213\010"):
  558. return "gz"
  559. if self.buf[0:3] == "BZh" and self.buf[4:10] == "1AY&SY":
  560. return "bz2"
  561. return "tar"
  562. def close(self):
  563. self.fileobj.close()
  564. # class StreamProxy
  565. class _BZ2Proxy(object):
  566. """Small proxy class that enables external file object
  567. support for "r:bz2" and "w:bz2" modes. This is actually
  568. a workaround for a limitation in bz2 module's BZ2File
  569. class which (unlike gzip.GzipFile) has no support for
  570. a file object argument.
  571. """
  572. blocksize = 16 * 1024
  573. def __init__(self, fileobj, mode):
  574. self.fileobj = fileobj
  575. self.mode = mode
  576. self.name = getattr(self.fileobj, "name", None)
  577. self.init()
  578. def init(self):
  579. import bz2
  580. self.pos = 0
  581. if self.mode == "r":
  582. self.bz2obj = bz2.BZ2Decompressor()
  583. self.fileobj.seek(0)
  584. self.buf = ""
  585. else:
  586. self.bz2obj = bz2.BZ2Compressor()
  587. def read(self, size):
  588. b = [self.buf]
  589. x = len(self.buf)
  590. while x < size:
  591. raw = self.fileobj.read(self.blocksize)
  592. if not raw:
  593. break
  594. data = self.bz2obj.decompress(raw)
  595. b.append(data)
  596. x += len(data)
  597. self.buf = "".join(b)
  598. buf = self.buf[:size]
  599. self.buf = self.buf[size:]
  600. self.pos += len(buf)
  601. return buf
  602. def seek(self, pos):
  603. if pos < self.pos:
  604. self.init()
  605. self.read(pos - self.pos)
  606. def tell(self):
  607. return self.pos
  608. def write(self, data):
  609. self.pos += len(data)
  610. raw = self.bz2obj.compress(data)
  611. self.fileobj.write(raw)
  612. def close(self):
  613. if self.mode == "w":
  614. raw = self.bz2obj.flush()
  615. self.fileobj.write(raw)
  616. # class _BZ2Proxy
  617. #------------------------
  618. # Extraction file object
  619. #------------------------
  620. class _FileInFile(object):
  621. """A thin wrapper around an existing file object that
  622. provides a part of its data as an individual file
  623. object.
  624. """
  625. def __init__(self, fileobj, offset, size, sparse=None):
  626. self.fileobj = fileobj
  627. self.offset = offset
  628. self.size = size
  629. self.sparse = sparse
  630. self.position = 0
  631. def tell(self):
  632. """Return the current file position.
  633. """
  634. return self.position
  635. def seek(self, position):
  636. """Seek to a position in the file.
  637. """
  638. self.position = position
  639. def read(self, size=None):
  640. """Read data from the file.
  641. """
  642. if size is None:
  643. size = self.size - self.position
  644. else:
  645. size = min(size, self.size - self.position)
  646. if self.sparse is None:
  647. return self.readnormal(size)
  648. else:
  649. return self.readsparse(size)
  650. def readnormal(self, size):
  651. """Read operation for regular files.
  652. """
  653. self.fileobj.seek(self.offset + self.position)
  654. self.position += size
  655. return self.fileobj.read(size)
  656. def readsparse(self, size):
  657. """Read operation for sparse files.
  658. """
  659. data = []
  660. while size > 0:
  661. buf = self.readsparsesection(size)
  662. if not buf:
  663. break
  664. size -= len(buf)
  665. data.append(buf)
  666. return "".join(data)
  667. def readsparsesection(self, size):
  668. """Read a single section of a sparse file.
  669. """
  670. section = self.sparse.find(self.position)
  671. if section is None:
  672. return ""
  673. size = min(size, section.offset + section.size - self.position)
  674. if isinstance(section, _data):
  675. realpos = section.realpos + self.position - section.offset
  676. self.fileobj.seek(self.offset + realpos)
  677. self.position += size
  678. return self.fileobj.read(size)
  679. else:
  680. self.position += size
  681. return NUL * size
  682. #class _FileInFile
  683. class ExFileObject(object):
  684. """File-like object for reading an archive member.
  685. Is returned by TarFile.extractfile().
  686. """
  687. blocksize = 1024
  688. def __init__(self, tarfile, tarinfo):
  689. self.fileobj = _FileInFile(tarfile.fileobj,
  690. tarinfo.offset_data,
  691. tarinfo.size,
  692. getattr(tarinfo, "sparse", None))
  693. self.name = tarinfo.name
  694. self.mode = "r"
  695. self.closed = False
  696. self.size = tarinfo.size
  697. self.position = 0
  698. self.buffer = ""
  699. def read(self, size=None):
  700. """Read at most size bytes from the file. If size is not
  701. present or None, read all data until EOF is reached.
  702. """
  703. if self.closed:
  704. raise ValueError("I/O operation on closed file")
  705. buf = ""
  706. if self.buffer:
  707. if size is None:
  708. buf = self.buffer
  709. self.buffer = ""
  710. else:
  711. buf = self.buffer[:size]
  712. self.buffer = self.buffer[size:]
  713. if size is None:
  714. buf += self.fileobj.read()
  715. else:
  716. buf += self.fileobj.read(size - len(buf))
  717. self.position += len(buf)
  718. return buf
  719. def readline(self, size=-1):
  720. """Read one entire line from the file. If size is present
  721. and non-negative, return a string with at most that
  722. size, which may be an incomplete line.
  723. """
  724. if self.closed:
  725. raise ValueError("I/O operation on closed file")
  726. if "\n" in self.buffer:
  727. pos = self.buffer.find("\n") + 1
  728. else:
  729. buffers = [self.buffer]
  730. while True:
  731. buf = self.fileobj.read(self.blocksize)
  732. buffers.append(buf)
  733. if not buf or "\n" in buf:
  734. self.buffer = "".join(buffers)
  735. pos = self.buffer.find("\n") + 1
  736. if pos == 0:
  737. # no newline found.
  738. pos = len(self.buffer)
  739. break
  740. if size != -1:
  741. pos = min(size, pos)
  742. buf = self.buffer[:pos]
  743. self.buffer = self.buffer[pos:]
  744. self.position += len(buf)
  745. return buf
  746. def readlines(self):
  747. """Return a list with all remaining lines.
  748. """
  749. result = []
  750. while True:
  751. line = self.readline()
  752. if not line: break
  753. result.append(line)
  754. return result
  755. def tell(self):
  756. """Return the current file position.
  757. """
  758. if self.closed:
  759. raise ValueError("I/O operation on closed file")
  760. return self.position
  761. def seek(self, pos, whence=os.SEEK_SET):
  762. """Seek to a position in the file.
  763. """
  764. if self.closed:
  765. raise ValueError("I/O operation on closed file")
  766. if whence == os.SEEK_SET:
  767. self.position = min(max(pos, 0), self.size)
  768. elif whence == os.SEEK_CUR:
  769. if pos < 0:
  770. self.position = max(self.position + pos, 0)
  771. else:
  772. self.position = min(self.position + pos, self.size)
  773. elif whence == os.SEEK_END:
  774. self.position = max(min(self.size + pos, self.size), 0)
  775. else:
  776. raise ValueError("Invalid argument")
  777. self.buffer = ""
  778. self.fileobj.seek(self.position)
  779. def close(self):
  780. """Close the file object.
  781. """
  782. self.closed = True
  783. def __iter__(self):
  784. """Get an iterator over the file's lines.
  785. """
  786. while True:
  787. line = self.readline()
  788. if not line:
  789. break
  790. yield line
  791. #class ExFileObject
  792. #------------------
  793. # Exported Classes
  794. #------------------
  795. class TarInfo(object):
  796. """Informational class which holds the details about an
  797. archive member given by a tar header block.
  798. TarInfo objects are returned by TarFile.getmember(),
  799. TarFile.getmembers() and TarFile.gettarinfo() and are
  800. usually created internally.
  801. """
  802. def __init__(self, name=""):
  803. """Construct a TarInfo object. name is the optional name
  804. of the member.
  805. """
  806. self.name = name # member name
  807. self.mode = 0644 # file permissions
  808. self.uid = 0 # user id
  809. self.gid = 0 # group id
  810. self.size = 0 # file size
  811. self.mtime = 0 # modification time
  812. self.chksum = 0 # header checksum
  813. self.type = REGTYPE # member type
  814. self.linkname = "" # link name
  815. self.uname = "" # user name
  816. self.gname = "" # group name
  817. self.devmajor = 0 # device major number
  818. self.devminor = 0 # device minor number
  819. self.offset = 0 # the tar header starts here
  820. self.offset_data = 0 # the file's data starts here
  821. self.pax_headers = {} # pax header information
  822. # In pax headers the "name" and "linkname" field are called
  823. # "path" and "linkpath".
  824. def _getpath(self):
  825. return self.name
  826. def _setpath(self, name):
  827. self.name = name
  828. path = property(_getpath, _setpath)
  829. def _getlinkpath(self):
  830. return self.linkname
  831. def _setlinkpath(self, linkname):
  832. self.linkname = linkname
  833. linkpath = property(_getlinkpath, _setlinkpath)
  834. def __repr__(self):
  835. return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
  836. def get_info(self, encoding, errors):
  837. """Return the TarInfo's attributes as a dictionary.
  838. """
  839. info = {
  840. "name": self.name,
  841. "mode": self.mode & 07777,
  842. "uid": self.uid,
  843. "gid": self.gid,
  844. "size": self.size,
  845. "mtime": self.mtime,
  846. "chksum": self.chksum,
  847. "type": self.type,
  848. "linkname": self.linkname,
  849. "uname": self.uname,
  850. "gname": self.gname,
  851. "devmajor": self.devmajor,
  852. "devminor": self.devminor
  853. }
  854. if info["type"] == DIRTYPE and not info["name"].endswith("/"):
  855. info["name"] += "/"
  856. for key in ("name", "linkname", "uname", "gname"):
  857. if type(info[key]) is unicode:
  858. info[key] = info[key].encode(encoding, errors)
  859. return info
  860. def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
  861. """Return a tar header as a string of 512 byte blocks.
  862. """
  863. info = self.get_info(encoding, errors)
  864. if format == USTAR_FORMAT:
  865. return self.create_ustar_header(info)
  866. elif format == GNU_FORMAT:
  867. return self.create_gnu_header(info)
  868. elif format == PAX_FORMAT:
  869. return self.create_pax_header(info, encoding, errors)
  870. else:
  871. raise ValueError("invalid format")
  872. def create_ustar_header(self, info):
  873. """Return the object as a ustar header block.
  874. """
  875. info["magic"] = POSIX_MAGIC
  876. if len(info["linkname"]) > LENGTH_LINK:
  877. raise ValueError("linkname is too long")
  878. if len(info["name"]) > LENGTH_NAME:
  879. info["prefix"], info["name"] = self._posix_split_name(info["name"])
  880. return self._create_header(info, USTAR_FORMAT)
  881. def create_gnu_header(self, info):
  882. """Return the object as a GNU header block sequence.
  883. """
  884. info["magic"] = GNU_MAGIC
  885. buf = ""
  886. if len(info["linkname"]) > LENGTH_LINK:
  887. buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK)
  888. if len(info["name"]) > LENGTH_NAME:
  889. buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME)
  890. return buf + self._create_header(info, GNU_FORMAT)
  891. def create_pax_header(self, info, encoding, errors):
  892. """Return the object as a ustar header block. If it cannot be
  893. represented this way, prepend a pax extended header sequence
  894. with supplement information.
  895. """
  896. info["magic"] = POSIX_MAGIC
  897. pax_headers = self.pax_headers.copy()
  898. # Test string fields for values that exceed the field length or cannot
  899. # be represented in ASCII encoding.
  900. for name, hname, length in (
  901. ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
  902. ("uname", "uname", 32), ("gname", "gname", 32)):
  903. if hname in pax_headers:
  904. # The pax header has priority.
  905. continue
  906. val = info[name].decode(encoding, errors)
  907. # Try to encode the string as ASCII.
  908. try:
  909. val.encode("ascii")
  910. except UnicodeEncodeError:
  911. pax_headers[hname] = val
  912. continue
  913. if len(info[name]) > length:
  914. pax_headers[hname] = val
  915. # Test number fields for values that exceed the field limit or values
  916. # that like to be stored as float.
  917. for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
  918. if name in pax_headers:
  919. # The pax header has priority. Avoid overflow.
  920. info[name] = 0
  921. continue
  922. val = info[name]
  923. if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
  924. pax_headers[name] = unicode(val)
  925. info[name] = 0
  926. # Create a pax extended header if necessary.
  927. if pax_headers:
  928. buf = self._create_pax_generic_header(pax_headers)
  929. else:
  930. buf = ""
  931. return buf + self._create_header(info, USTAR_FORMAT)
  932. @classmethod
  933. def create_pax_global_header(cls, pax_headers):
  934. """Return the object as a pax global header block sequence.
  935. """
  936. return cls._create_pax_generic_header(pax_headers, type=XGLTYPE)
  937. def _posix_split_name(self, name):
  938. """Split a name longer than 100 chars into a prefix
  939. and a name part.
  940. """
  941. prefix = name[:LENGTH_PREFIX + 1]
  942. while prefix and prefix[-1] != "/":
  943. prefix = prefix[:-1]
  944. name = name[len(prefix):]
  945. prefix = prefix[:-1]
  946. if not prefix or len(name) > LENGTH_NAME:
  947. raise ValueError("name is too long")
  948. return prefix, name
  949. @staticmethod
  950. def _create_header(info, format):
  951. """Return a header block. info is a dictionary with file
  952. information, format must be one of the *_FORMAT constants.
  953. """
  954. parts = [
  955. stn(info.get("name", ""), 100),
  956. itn(info.get("mode", 0) & 07777, 8, format),
  957. itn(info.get("uid", 0), 8, format),
  958. itn(info.get("gid", 0), 8, format),
  959. itn(info.get("size", 0), 12, format),
  960. itn(info.get("mtime", 0), 12, format),
  961. " ", # checksum field
  962. info.get("type", REGTYPE),
  963. stn(info.get("linkname", ""), 100),
  964. stn(info.get("magic", POSIX_MAGIC), 8),
  965. stn(info.get("uname", ""), 32),
  966. stn(info.get("gname", ""), 32),
  967. itn(info.get("devmajor", 0), 8, format),
  968. itn(info.get("devminor", 0), 8, format),
  969. stn(info.get("prefix", ""), 155)
  970. ]
  971. buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts))
  972. chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
  973. buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
  974. return buf
  975. @staticmethod
  976. def _create_payload(payload):
  977. """Return the string payload filled with zero bytes
  978. up to the next 512 byte border.
  979. """
  980. blocks, remainder = divmod(len(payload), BLOCKSIZE)
  981. if remainder > 0:
  982. payload += (BLOCKSIZE - remainder) * NUL
  983. return payload
  984. @classmethod
  985. def _create_gnu_long_header(cls, name, type):
  986. """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
  987. for name.
  988. """
  989. name += NUL
  990. info = {}
  991. info["name"] = "././@LongLink"
  992. info["type"] = type
  993. info["size"] = len(name)
  994. info["magic"] = GNU_MAGIC
  995. # create extended header + name blocks.
  996. return cls._create_header(info, USTAR_FORMAT) + \
  997. cls._create_payload(name)
  998. @classmethod
  999. def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE):
  1000. """Return a POSIX.1-2001 extended or global header sequence
  1001. that contains a list of keyword, value pairs. The values
  1002. must be unicode objects.
  1003. """
  1004. records = []
  1005. for keyword, value in pax_headers.iteritems():
  1006. keyword = keyword.encode("utf8")
  1007. value = value.encode("utf8")
  1008. l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
  1009. n = p = 0
  1010. while True:
  1011. n = l + len(str(p))
  1012. if n == p:
  1013. break
  1014. p = n
  1015. records.append("%d %s=%s\n" % (p, keyword, value))
  1016. records = "".join(records)
  1017. # We use a hardcoded "././@PaxHeader" name like star does
  1018. # instead of the one that POSIX recommends.
  1019. info = {}
  1020. info["name"] = "././@PaxHeader"
  1021. info["type"] = type
  1022. info["size"] = len(records)
  1023. info["magic"] = POSIX_MAGIC
  1024. # Create pax header + record blocks.
  1025. return cls._create_header(info, USTAR_FORMAT) + \
  1026. cls._create_payload(records)
  1027. @classmethod
  1028. def frombuf(cls, buf):
  1029. """Construct a TarInfo object from a 512 byte string buffer.
  1030. """
  1031. if len(buf) == 0:
  1032. raise EmptyHeaderError("empty header")
  1033. if len(buf) != BLOCKSIZE:
  1034. raise TruncatedHeaderError("truncated header")
  1035. if buf.count(NUL) == BLOCKSIZE:
  1036. raise EOFHeaderError("end of file header")
  1037. chksum = nti(buf[148:156])
  1038. if chksum not in calc_chksums(buf):
  1039. raise InvalidHeaderError("bad checksum")
  1040. obj = cls()
  1041. obj.buf = buf
  1042. obj.name = nts(buf[0:100])
  1043. obj.mode = nti(buf[100:108])
  1044. obj.uid = nti(buf[108:116])
  1045. obj.gid = nti(buf[116:124])
  1046. obj.size = nti(buf[124:136])
  1047. obj.mtime = nti(buf[136:148])
  1048. obj.chksum = chksum
  1049. obj.type = buf[156:157]
  1050. obj.linkname = nts(buf[157:257])
  1051. obj.uname = nts(buf[265:297])
  1052. obj.gname = nts(buf[297:329])
  1053. obj.devmajor = nti(buf[329:337])
  1054. obj.devminor = nti(buf[337:345])
  1055. prefix = nts(buf[345:500])
  1056. # Old V7 tar format represents a directory as a regular
  1057. # file with a trailing slash.
  1058. if obj.type == AREGTYPE and obj.name.endswith("/"):
  1059. obj.type = DIRTYPE
  1060. # Remove redundant slashes from directories.
  1061. if obj.isdir():
  1062. obj.name = obj.name.rstrip("/")
  1063. # Reconstruct a ustar longname.
  1064. if prefix and obj.type not in GNU_TYPES:
  1065. obj.name = prefix + "/" + obj.name
  1066. return obj
  1067. @classmethod
  1068. def fromtarfile(cls, tarfile):
  1069. """Return the next TarInfo object from TarFile object
  1070. tarfile.
  1071. """
  1072. buf = tarfile.fileobj.read(BLOCKSIZE)
  1073. obj = cls.frombuf(buf)
  1074. obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
  1075. return obj._proc_member(tarfile)
  1076. #--------------------------------------------------------------------------
  1077. # The following are methods that are called depending on the type of a
  1078. # member. The entry point is _proc_member() which can be overridden in a
  1079. # subclass to add custom _proc_*() methods. A _proc_*() method MUST
  1080. # implement the following
  1081. # operations:
  1082. # 1. Set self.offset_data to the position where the data blocks begin,
  1083. # if there is data that follows.
  1084. # 2. Set tarfile.offset to the position where the next member's header will
  1085. # begin.
  1086. # 3. Return self or another valid TarInfo object.
  1087. def _proc_member(self, tarfile):
  1088. """Choose the right processing method depending on
  1089. the type and call it.
  1090. """
  1091. if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
  1092. return self._proc_gnulong(tarfile)
  1093. elif self.type == GNUTYPE_SPARSE:
  1094. return self._proc_sparse(tarfile)
  1095. elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
  1096. return self._proc_pax(tarfile)
  1097. else:
  1098. return self._proc_builtin(tarfile)
  1099. def _proc_builtin(self, tarfile):
  1100. """Process a builtin type or an unknown type which
  1101. will be treated as a regular file.
  1102. """
  1103. self.offset_data = tarfile.fileobj.tell()
  1104. offset = self.offset_data
  1105. if self.isreg() or self.type not in SUPPORTED_TYPES:
  1106. # Skip the following data blocks.
  1107. offset += self._block(self.size)
  1108. tarfile.offset = offset
  1109. # Patch the TarInfo object with saved global
  1110. # header information.
  1111. self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
  1112. return self
  1113. def _proc_gnulong(self, tarfile):
  1114. """Process the blocks that hold a GNU longname
  1115. or longlink member.
  1116. """
  1117. buf = tarfile.fileobj.read(self._block(self.size))
  1118. # Fetch the next header and process it.
  1119. try:
  1120. next = self.fromtarfile(tarfile)
  1121. except HeaderError:
  1122. raise SubsequentHeaderError("missing or bad subsequent header")
  1123. # Patch the TarInfo object from the next header with
  1124. # the longname information.
  1125. next.offset = self.offset
  1126. if self.type == GNUTYPE_LONGNAME:
  1127. next.name = nts(buf)
  1128. elif self.type == GNUTYPE_LONGLINK:
  1129. next.linkname = nts(buf)
  1130. return next
  1131. def _proc_sparse(self, tarfile):
  1132. """Process a GNU sparse header plus extra headers.
  1133. """
  1134. buf = self.buf
  1135. sp = _ringbuffer()
  1136. pos = 386
  1137. lastpos = 0L
  1138. realpos = 0L
  1139. # There are 4 possible sparse structs in the
  1140. # first header.
  1141. for i in xrange(4):
  1142. try:
  1143. offset = nti(buf[pos:pos + 12])
  1144. numbytes = nti(buf[pos + 12:pos + 24])
  1145. except ValueError:
  1146. break
  1147. if offset > lastpos:
  1148. sp.append(_hole(lastpos, offset - lastpos))
  1149. sp.append(_data(offset, numbytes, realpos))
  1150. realpos += numbytes
  1151. lastpos = offset + numbytes
  1152. pos += 24
  1153. isextended = ord(buf[482])
  1154. origsize = nti(buf[483:495])
  1155. # If the isextended flag is given,
  1156. # there are extra headers to process.
  1157. while isextended == 1:
  1158. buf = tarfile.fileobj.read(BLOCKSIZE)
  1159. pos = 0
  1160. for i in xrange(21):
  1161. try:
  1162. offset = nti(buf[pos:pos + 12])
  1163. numbytes = nti(buf[pos + 12:pos + 24])
  1164. except ValueError:
  1165. break
  1166. if offset > lastpos:
  1167. sp.append(_hole(lastpos, offset - lastpos))
  1168. sp.append(_data(offset, numbytes, realpos))
  1169. realpos += numbytes
  1170. lastpos = offset + numbytes
  1171. pos += 24
  1172. isextended = ord(buf[504])
  1173. if lastpos < origsize:
  1174. sp.append(_hole(lastpos, origsize - lastpos))
  1175. self.sparse = sp
  1176. self.offset_data = tarfile.fileobj.tell()
  1177. tarfile.offset = self.offset_data + self._block(self.size)
  1178. self.size = origsize
  1179. return self
  1180. def _proc_pax(self, tarfile):
  1181. """Process an extended or global header as described in
  1182. POSIX.1-2001.
  1183. """
  1184. # Read the header information.
  1185. buf = tarfile.fileobj.read(self._block(self.size))
  1186. # A pax header stores supplemental information for either
  1187. # the following file (extended) or all following files
  1188. # (global).
  1189. if self.type == XGLTYPE:
  1190. pax_headers = tarfile.pax_headers
  1191. else:
  1192. pax_headers = tarfile.pax_headers.copy()
  1193. # Parse pax header information. A record looks like that:
  1194. # "%d %s=%s\n" % (length, keyword, value). length is the size
  1195. # of the complete record including the length field itself and
  1196. # the newline. keyword and value are both UTF-8 encoded strings.
  1197. regex = re.compile(r"(\d+) ([^=]+)=", re.U)
  1198. pos = 0
  1199. while True:
  1200. match = regex.match(buf, pos)
  1201. if not match:
  1202. break
  1203. length, keyword = match.groups()
  1204. length = int(length)
  1205. value = buf[match.end(2) + 1:match.start(1) + length - 1]
  1206. keyword = keyword.decode("utf8")
  1207. value = value.decode("utf8")
  1208. pax_headers[keyword] = value
  1209. pos += length
  1210. # Fetch the next header.
  1211. try:
  1212. next = self.fromtarfile(tarfile)
  1213. except HeaderError:
  1214. raise SubsequentHeaderError("missing or bad subsequent header")
  1215. if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
  1216. # Patch the TarInfo object with the extended header info.
  1217. next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
  1218. next.offset = self.offset
  1219. if "size" in pax_headers:
  1220. # If the extended header replaces the size field,
  1221. # we need to recalculate the offset where the next
  1222. # header starts.
  1223. offset = next.offset_data
  1224. if next.isreg() or next.type not in SUPPORTED_TYPES:
  1225. offset += next._block(next.size)
  1226. tarfile.offset = offset
  1227. return next
  1228. def _apply_pax_info(self, pax_headers, encoding, errors):
  1229. """Replace fields with supplemental information from a previous
  1230. pax extended or global header.
  1231. """
  1232. for keyword, value in pax_headers.iteritems():
  1233. if keyword not in PAX_FIELDS:
  1234. continue
  1235. if keyword == "path":
  1236. value = value.rstrip("/")
  1237. if keyword in PAX_NUMBER_FIELDS:
  1238. try:
  1239. value = PAX_NUMBER_FIELDS[keyword](value)
  1240. except ValueError:
  1241. value = 0
  1242. else:
  1243. value = uts(value, encoding, errors)
  1244. setattr(self, keyword, value)
  1245. self.pax_headers = pax_headers.copy()
  1246. def _block(self, count):
  1247. """Round up a byte count by BLOCKSIZE and return it,
  1248. e.g. _block(834) => 1024.
  1249. """
  1250. blocks, remainder = divmod(count, BLOCKSIZE)
  1251. if remainder:
  1252. blocks += 1
  1253. return blocks * BLOCKSIZE
  1254. def isreg(self):
  1255. return self.type in REGULAR_TYPES
  1256. def isfile(self):
  1257. return self.isreg()
  1258. def isdir(self):
  1259. return self.type == DIRTYPE
  1260. def issym(self):
  1261. return self.type == SYMTYPE
  1262. def islnk(self):
  1263. return self.type == LNKTYPE
  1264. def ischr(self):
  1265. return self.type == CHRTYPE
  1266. def isblk(self):
  1267. return self.type == BLKTYPE
  1268. def isfifo(self):
  1269. return self.type == FIFOTYPE
  1270. def issparse(self):
  1271. return self.type == GNUTYPE_SPARSE
  1272. def isdev(self):
  1273. return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
  1274. # class TarInfo
  1275. class TarFile(object):
  1276. """The TarFile Class provides an interface to tar archives.
  1277. """
  1278. debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
  1279. dereference = False # If true, add content of linked file to the
  1280. # tar file, else the link.
  1281. ignore_zeros = False # If true, skips empty or invalid blocks and
  1282. # continues processing.
  1283. errorlevel = 1 # If 0, fatal errors only appear in debug
  1284. # messages (if debug >= 0). If > 0, errors
  1285. # are passed to the caller as exceptions.
  1286. format = DEFAULT_FORMAT # The format to use when creating an archive.
  1287. encoding = ENCODING # Encoding for 8-bit character strings.
  1288. errors = None # Error handler for unicode conversion.
  1289. tarinfo = TarInfo # The default TarInfo class to use.
  1290. fileobject = ExFileObject # The default ExFileObject class to use.
  1291. def __init__(self, name=None, mode="r", fileobj=None, format=None,
  1292. tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
  1293. errors=None, pax_headers=None, debug=None, errorlevel=None):
  1294. """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
  1295. read from an existing archive, 'a' to append data to an existing
  1296. file or 'w' to create a new file overwriting an existing one. `mode'
  1297. defaults to 'r'.
  1298. If `fileobj' is given, it is used for reading or writing data. If it
  1299. can be determined, `mode' is overridden by `fileobj's mode.
  1300. `fileobj' is not closed, when TarFile is closed.
  1301. """
  1302. if len(mode) > 1 or mode not in "raw":
  1303. raise ValueError("mode must be 'r', 'a' or 'w'")
  1304. self.mode = mode
  1305. self._mode = {"r": "rb", "a": "r+b", "w": "wb"…

Large files files are truncated, but you can click here to view the full file