PageRenderTime 63ms CodeModel.GetById 22ms RepoModel.GetById 0ms app.codeStats 0ms

/lib-python/2.7/tarfile.py

https://bitbucket.org/evelyn559/pypy
Python | 2594 lines | 2488 code | 28 blank | 78 comment | 23 complexity | 4cbb66665d9944279608be91a1e23f41 MD5 | raw file

Large files files are truncated, but you can click here to view the full file

  1. #!/usr/bin/env python
  2. # -*- coding: iso-8859-1 -*-
  3. #-------------------------------------------------------------------
  4. # tarfile.py
  5. #-------------------------------------------------------------------
  6. # Copyright (C) 2002 Lars Gustäbel <lars@gustaebel.de>
  7. # All rights reserved.
  8. #
  9. # Permission is hereby granted, free of charge, to any person
  10. # obtaining a copy of this software and associated documentation
  11. # files (the "Software"), to deal in the Software without
  12. # restriction, including without limitation the rights to use,
  13. # copy, modify, merge, publish, distribute, sublicense, and/or sell
  14. # copies of the Software, and to permit persons to whom the
  15. # Software is furnished to do so, subject to the following
  16. # conditions:
  17. #
  18. # The above copyright notice and this permission notice shall be
  19. # included in all copies or substantial portions of the Software.
  20. #
  21. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  22. # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
  23. # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  24. # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
  25. # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  26. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  27. # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  28. # OTHER DEALINGS IN THE SOFTWARE.
  29. #
  30. """Read from and write to tar format archives.
  31. """
  32. __version__ = "$Revision$"
  33. # $Source$
  34. version = "0.9.0"
  35. __author__ = "Lars Gustäbel (lars@gustaebel.de)"
  36. __date__ = "$Date$"
  37. __cvsid__ = "$Id$"
  38. __credits__ = "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
  39. #---------
  40. # Imports
  41. #---------
  42. import sys
  43. import os
  44. import shutil
  45. import stat
  46. import errno
  47. import time
  48. import struct
  49. import copy
  50. import re
  51. import operator
  52. try:
  53. import grp, pwd
  54. except ImportError:
  55. grp = pwd = None
  56. # from tarfile import *
  57. __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
  58. #---------------------------------------------------------
  59. # tar constants
  60. #---------------------------------------------------------
  61. NUL = "\0" # the null character
  62. BLOCKSIZE = 512 # length of processing blocks
  63. RECORDSIZE = BLOCKSIZE * 20 # length of records
  64. GNU_MAGIC = "ustar \0" # magic gnu tar string
  65. POSIX_MAGIC = "ustar\x0000" # magic posix tar string
  66. LENGTH_NAME = 100 # maximum length of a filename
  67. LENGTH_LINK = 100 # maximum length of a linkname
  68. LENGTH_PREFIX = 155 # maximum length of the prefix field
  69. REGTYPE = "0" # regular file
  70. AREGTYPE = "\0" # regular file
  71. LNKTYPE = "1" # link (inside tarfile)
  72. SYMTYPE = "2" # symbolic link
  73. CHRTYPE = "3" # character special device
  74. BLKTYPE = "4" # block special device
  75. DIRTYPE = "5" # directory
  76. FIFOTYPE = "6" # fifo special device
  77. CONTTYPE = "7" # contiguous file
  78. GNUTYPE_LONGNAME = "L" # GNU tar longname
  79. GNUTYPE_LONGLINK = "K" # GNU tar longlink
  80. GNUTYPE_SPARSE = "S" # GNU tar sparse file
  81. XHDTYPE = "x" # POSIX.1-2001 extended header
  82. XGLTYPE = "g" # POSIX.1-2001 global header
  83. SOLARIS_XHDTYPE = "X" # Solaris extended header
  84. USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
  85. GNU_FORMAT = 1 # GNU tar format
  86. PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
  87. DEFAULT_FORMAT = GNU_FORMAT
  88. #---------------------------------------------------------
  89. # tarfile constants
  90. #---------------------------------------------------------
  91. # File types that tarfile supports:
  92. SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
  93. SYMTYPE, DIRTYPE, FIFOTYPE,
  94. CONTTYPE, CHRTYPE, BLKTYPE,
  95. GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  96. GNUTYPE_SPARSE)
  97. # File types that will be treated as a regular file.
  98. REGULAR_TYPES = (REGTYPE, AREGTYPE,
  99. CONTTYPE, GNUTYPE_SPARSE)
  100. # File types that are part of the GNU tar format.
  101. GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  102. GNUTYPE_SPARSE)
  103. # Fields from a pax header that override a TarInfo attribute.
  104. PAX_FIELDS = ("path", "linkpath", "size", "mtime",
  105. "uid", "gid", "uname", "gname")
  106. # Fields in a pax header that are numbers, all other fields
  107. # are treated as strings.
  108. PAX_NUMBER_FIELDS = {
  109. "atime": float,
  110. "ctime": float,
  111. "mtime": float,
  112. "uid": int,
  113. "gid": int,
  114. "size": int
  115. }
  116. #---------------------------------------------------------
  117. # Bits used in the mode field, values in octal.
  118. #---------------------------------------------------------
  119. S_IFLNK = 0120000 # symbolic link
  120. S_IFREG = 0100000 # regular file
  121. S_IFBLK = 0060000 # block device
  122. S_IFDIR = 0040000 # directory
  123. S_IFCHR = 0020000 # character device
  124. S_IFIFO = 0010000 # fifo
  125. TSUID = 04000 # set UID on execution
  126. TSGID = 02000 # set GID on execution
  127. TSVTX = 01000 # reserved
  128. TUREAD = 0400 # read by owner
  129. TUWRITE = 0200 # write by owner
  130. TUEXEC = 0100 # execute/search by owner
  131. TGREAD = 0040 # read by group
  132. TGWRITE = 0020 # write by group
  133. TGEXEC = 0010 # execute/search by group
  134. TOREAD = 0004 # read by other
  135. TOWRITE = 0002 # write by other
  136. TOEXEC = 0001 # execute/search by other
  137. #---------------------------------------------------------
  138. # initialization
  139. #---------------------------------------------------------
  140. ENCODING = sys.getfilesystemencoding()
  141. if ENCODING is None:
  142. ENCODING = sys.getdefaultencoding()
  143. #---------------------------------------------------------
  144. # Some useful functions
  145. #---------------------------------------------------------
  146. def stn(s, length):
  147. """Convert a python string to a null-terminated string buffer.
  148. """
  149. return s[:length] + (length - len(s)) * NUL
  150. def nts(s):
  151. """Convert a null-terminated string field to a python string.
  152. """
  153. # Use the string up to the first null char.
  154. p = s.find("\0")
  155. if p == -1:
  156. return s
  157. return s[:p]
  158. def nti(s):
  159. """Convert a number field to a python number.
  160. """
  161. # There are two possible encodings for a number field, see
  162. # itn() below.
  163. if s[0] != chr(0200):
  164. try:
  165. n = int(nts(s) or "0", 8)
  166. except ValueError:
  167. raise InvalidHeaderError("invalid header")
  168. else:
  169. n = 0L
  170. for i in xrange(len(s) - 1):
  171. n <<= 8
  172. n += ord(s[i + 1])
  173. return n
  174. def itn(n, digits=8, format=DEFAULT_FORMAT):
  175. """Convert a python number to a number field.
  176. """
  177. # POSIX 1003.1-1988 requires numbers to be encoded as a string of
  178. # octal digits followed by a null-byte, this allows values up to
  179. # (8**(digits-1))-1. GNU tar allows storing numbers greater than
  180. # that if necessary. A leading 0200 byte indicates this particular
  181. # encoding, the following digits-1 bytes are a big-endian
  182. # representation. This allows values up to (256**(digits-1))-1.
  183. if 0 <= n < 8 ** (digits - 1):
  184. s = "%0*o" % (digits - 1, n) + NUL
  185. else:
  186. if format != GNU_FORMAT or n >= 256 ** (digits - 1):
  187. raise ValueError("overflow in number field")
  188. if n < 0:
  189. # XXX We mimic GNU tar's behaviour with negative numbers,
  190. # this could raise OverflowError.
  191. n = struct.unpack("L", struct.pack("l", n))[0]
  192. s = ""
  193. for i in xrange(digits - 1):
  194. s = chr(n & 0377) + s
  195. n >>= 8
  196. s = chr(0200) + s
  197. return s
  198. def uts(s, encoding, errors):
  199. """Convert a unicode object to a string.
  200. """
  201. if errors == "utf-8":
  202. # An extra error handler similar to the -o invalid=UTF-8 option
  203. # in POSIX.1-2001. Replace untranslatable characters with their
  204. # UTF-8 representation.
  205. try:
  206. return s.encode(encoding, "strict")
  207. except UnicodeEncodeError:
  208. x = []
  209. for c in s:
  210. try:
  211. x.append(c.encode(encoding, "strict"))
  212. except UnicodeEncodeError:
  213. x.append(c.encode("utf8"))
  214. return "".join(x)
  215. else:
  216. return s.encode(encoding, errors)
  217. def calc_chksums(buf):
  218. """Calculate the checksum for a member's header by summing up all
  219. characters except for the chksum field which is treated as if
  220. it was filled with spaces. According to the GNU tar sources,
  221. some tars (Sun and NeXT) calculate chksum with signed char,
  222. which will be different if there are chars in the buffer with
  223. the high bit set. So we calculate two checksums, unsigned and
  224. signed.
  225. """
  226. unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
  227. signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
  228. return unsigned_chksum, signed_chksum
  229. def copyfileobj(src, dst, length=None):
  230. """Copy length bytes from fileobj src to fileobj dst.
  231. If length is None, copy the entire content.
  232. """
  233. if length == 0:
  234. return
  235. if length is None:
  236. shutil.copyfileobj(src, dst)
  237. return
  238. BUFSIZE = 16 * 1024
  239. blocks, remainder = divmod(length, BUFSIZE)
  240. for b in xrange(blocks):
  241. buf = src.read(BUFSIZE)
  242. if len(buf) < BUFSIZE:
  243. raise IOError("end of file reached")
  244. dst.write(buf)
  245. if remainder != 0:
  246. buf = src.read(remainder)
  247. if len(buf) < remainder:
  248. raise IOError("end of file reached")
  249. dst.write(buf)
  250. return
  251. filemode_table = (
  252. ((S_IFLNK, "l"),
  253. (S_IFREG, "-"),
  254. (S_IFBLK, "b"),
  255. (S_IFDIR, "d"),
  256. (S_IFCHR, "c"),
  257. (S_IFIFO, "p")),
  258. ((TUREAD, "r"),),
  259. ((TUWRITE, "w"),),
  260. ((TUEXEC|TSUID, "s"),
  261. (TSUID, "S"),
  262. (TUEXEC, "x")),
  263. ((TGREAD, "r"),),
  264. ((TGWRITE, "w"),),
  265. ((TGEXEC|TSGID, "s"),
  266. (TSGID, "S"),
  267. (TGEXEC, "x")),
  268. ((TOREAD, "r"),),
  269. ((TOWRITE, "w"),),
  270. ((TOEXEC|TSVTX, "t"),
  271. (TSVTX, "T"),
  272. (TOEXEC, "x"))
  273. )
  274. def filemode(mode):
  275. """Convert a file's mode to a string of the form
  276. -rwxrwxrwx.
  277. Used by TarFile.list()
  278. """
  279. perm = []
  280. for table in filemode_table:
  281. for bit, char in table:
  282. if mode & bit == bit:
  283. perm.append(char)
  284. break
  285. else:
  286. perm.append("-")
  287. return "".join(perm)
  288. class TarError(Exception):
  289. """Base exception."""
  290. pass
  291. class ExtractError(TarError):
  292. """General exception for extract errors."""
  293. pass
  294. class ReadError(TarError):
  295. """Exception for unreadble tar archives."""
  296. pass
  297. class CompressionError(TarError):
  298. """Exception for unavailable compression methods."""
  299. pass
  300. class StreamError(TarError):
  301. """Exception for unsupported operations on stream-like TarFiles."""
  302. pass
  303. class HeaderError(TarError):
  304. """Base exception for header errors."""
  305. pass
  306. class EmptyHeaderError(HeaderError):
  307. """Exception for empty headers."""
  308. pass
  309. class TruncatedHeaderError(HeaderError):
  310. """Exception for truncated headers."""
  311. pass
  312. class EOFHeaderError(HeaderError):
  313. """Exception for end of file headers."""
  314. pass
  315. class InvalidHeaderError(HeaderError):
  316. """Exception for invalid headers."""
  317. pass
  318. class SubsequentHeaderError(HeaderError):
  319. """Exception for missing and invalid extended headers."""
  320. pass
  321. #---------------------------
  322. # internal stream interface
  323. #---------------------------
  324. class _LowLevelFile:
  325. """Low-level file object. Supports reading and writing.
  326. It is used instead of a regular file object for streaming
  327. access.
  328. """
  329. def __init__(self, name, mode):
  330. mode = {
  331. "r": os.O_RDONLY,
  332. "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
  333. }[mode]
  334. if hasattr(os, "O_BINARY"):
  335. mode |= os.O_BINARY
  336. self.fd = os.open(name, mode, 0666)
  337. def close(self):
  338. os.close(self.fd)
  339. def read(self, size):
  340. return os.read(self.fd, size)
  341. def write(self, s):
  342. os.write(self.fd, s)
  343. class _Stream:
  344. """Class that serves as an adapter between TarFile and
  345. a stream-like object. The stream-like object only
  346. needs to have a read() or write() method and is accessed
  347. blockwise. Use of gzip or bzip2 compression is possible.
  348. A stream-like object could be for example: sys.stdin,
  349. sys.stdout, a socket, a tape device etc.
  350. _Stream is intended to be used only internally.
  351. """
  352. def __init__(self, name, mode, comptype, fileobj, bufsize):
  353. """Construct a _Stream object.
  354. """
  355. self._extfileobj = True
  356. if fileobj is None:
  357. fileobj = _LowLevelFile(name, mode)
  358. self._extfileobj = False
  359. if comptype == '*':
  360. # Enable transparent compression detection for the
  361. # stream interface
  362. fileobj = _StreamProxy(fileobj)
  363. comptype = fileobj.getcomptype()
  364. self.name = name or ""
  365. self.mode = mode
  366. self.comptype = comptype
  367. self.fileobj = fileobj
  368. self.bufsize = bufsize
  369. self.buf = ""
  370. self.pos = 0L
  371. self.closed = False
  372. if comptype == "gz":
  373. try:
  374. import zlib
  375. except ImportError:
  376. raise CompressionError("zlib module is not available")
  377. self.zlib = zlib
  378. self.crc = zlib.crc32("") & 0xffffffffL
  379. if mode == "r":
  380. self._init_read_gz()
  381. else:
  382. self._init_write_gz()
  383. if comptype == "bz2":
  384. try:
  385. import bz2
  386. except ImportError:
  387. raise CompressionError("bz2 module is not available")
  388. if mode == "r":
  389. self.dbuf = ""
  390. self.cmp = bz2.BZ2Decompressor()
  391. else:
  392. self.cmp = bz2.BZ2Compressor()
  393. def __del__(self):
  394. if hasattr(self, "closed") and not self.closed:
  395. self.close()
  396. def _init_write_gz(self):
  397. """Initialize for writing with gzip compression.
  398. """
  399. self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
  400. -self.zlib.MAX_WBITS,
  401. self.zlib.DEF_MEM_LEVEL,
  402. 0)
  403. timestamp = struct.pack("<L", long(time.time()))
  404. self.__write("\037\213\010\010%s\002\377" % timestamp)
  405. if self.name.endswith(".gz"):
  406. self.name = self.name[:-3]
  407. self.__write(self.name + NUL)
  408. def write(self, s):
  409. """Write string s to the stream.
  410. """
  411. if self.comptype == "gz":
  412. self.crc = self.zlib.crc32(s, self.crc) & 0xffffffffL
  413. self.pos += len(s)
  414. if self.comptype != "tar":
  415. s = self.cmp.compress(s)
  416. self.__write(s)
  417. def __write(self, s):
  418. """Write string s to the stream if a whole new block
  419. is ready to be written.
  420. """
  421. self.buf += s
  422. while len(self.buf) > self.bufsize:
  423. self.fileobj.write(self.buf[:self.bufsize])
  424. self.buf = self.buf[self.bufsize:]
  425. def close(self):
  426. """Close the _Stream object. No operation should be
  427. done on it afterwards.
  428. """
  429. if self.closed:
  430. return
  431. if self.mode == "w" and self.comptype != "tar":
  432. self.buf += self.cmp.flush()
  433. if self.mode == "w" and self.buf:
  434. self.fileobj.write(self.buf)
  435. self.buf = ""
  436. if self.comptype == "gz":
  437. # The native zlib crc is an unsigned 32-bit integer, but
  438. # the Python wrapper implicitly casts that to a signed C
  439. # long. So, on a 32-bit box self.crc may "look negative",
  440. # while the same crc on a 64-bit box may "look positive".
  441. # To avoid irksome warnings from the `struct` module, force
  442. # it to look positive on all boxes.
  443. self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
  444. self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
  445. if not self._extfileobj:
  446. self.fileobj.close()
  447. self.closed = True
  448. def _init_read_gz(self):
  449. """Initialize for reading a gzip compressed fileobj.
  450. """
  451. self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
  452. self.dbuf = ""
  453. # taken from gzip.GzipFile with some alterations
  454. if self.__read(2) != "\037\213":
  455. raise ReadError("not a gzip file")
  456. if self.__read(1) != "\010":
  457. raise CompressionError("unsupported compression method")
  458. flag = ord(self.__read(1))
  459. self.__read(6)
  460. if flag & 4:
  461. xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
  462. self.read(xlen)
  463. if flag & 8:
  464. while True:
  465. s = self.__read(1)
  466. if not s or s == NUL:
  467. break
  468. if flag & 16:
  469. while True:
  470. s = self.__read(1)
  471. if not s or s == NUL:
  472. break
  473. if flag & 2:
  474. self.__read(2)
  475. def tell(self):
  476. """Return the stream's file pointer position.
  477. """
  478. return self.pos
  479. def seek(self, pos=0):
  480. """Set the stream's file pointer to pos. Negative seeking
  481. is forbidden.
  482. """
  483. if pos - self.pos >= 0:
  484. blocks, remainder = divmod(pos - self.pos, self.bufsize)
  485. for i in xrange(blocks):
  486. self.read(self.bufsize)
  487. self.read(remainder)
  488. else:
  489. raise StreamError("seeking backwards is not allowed")
  490. return self.pos
  491. def read(self, size=None):
  492. """Return the next size number of bytes from the stream.
  493. If size is not defined, return all bytes of the stream
  494. up to EOF.
  495. """
  496. if size is None:
  497. t = []
  498. while True:
  499. buf = self._read(self.bufsize)
  500. if not buf:
  501. break
  502. t.append(buf)
  503. buf = "".join(t)
  504. else:
  505. buf = self._read(size)
  506. self.pos += len(buf)
  507. return buf
  508. def _read(self, size):
  509. """Return size bytes from the stream.
  510. """
  511. if self.comptype == "tar":
  512. return self.__read(size)
  513. c = len(self.dbuf)
  514. t = [self.dbuf]
  515. while c < size:
  516. buf = self.__read(self.bufsize)
  517. if not buf:
  518. break
  519. try:
  520. buf = self.cmp.decompress(buf)
  521. except IOError:
  522. raise ReadError("invalid compressed data")
  523. t.append(buf)
  524. c += len(buf)
  525. t = "".join(t)
  526. self.dbuf = t[size:]
  527. return t[:size]
  528. def __read(self, size):
  529. """Return size bytes from stream. If internal buffer is empty,
  530. read another block from the stream.
  531. """
  532. c = len(self.buf)
  533. t = [self.buf]
  534. while c < size:
  535. buf = self.fileobj.read(self.bufsize)
  536. if not buf:
  537. break
  538. t.append(buf)
  539. c += len(buf)
  540. t = "".join(t)
  541. self.buf = t[size:]
  542. return t[:size]
  543. # class _Stream
  544. class _StreamProxy(object):
  545. """Small proxy class that enables transparent compression
  546. detection for the Stream interface (mode 'r|*').
  547. """
  548. def __init__(self, fileobj):
  549. self.fileobj = fileobj
  550. self.buf = self.fileobj.read(BLOCKSIZE)
  551. def read(self, size):
  552. self.read = self.fileobj.read
  553. return self.buf
  554. def getcomptype(self):
  555. if self.buf.startswith("\037\213\010"):
  556. return "gz"
  557. if self.buf.startswith("BZh91"):
  558. return "bz2"
  559. return "tar"
  560. def close(self):
  561. self.fileobj.close()
  562. # class StreamProxy
  563. class _BZ2Proxy(object):
  564. """Small proxy class that enables external file object
  565. support for "r:bz2" and "w:bz2" modes. This is actually
  566. a workaround for a limitation in bz2 module's BZ2File
  567. class which (unlike gzip.GzipFile) has no support for
  568. a file object argument.
  569. """
  570. blocksize = 16 * 1024
  571. def __init__(self, fileobj, mode):
  572. self.fileobj = fileobj
  573. self.mode = mode
  574. self.name = getattr(self.fileobj, "name", None)
  575. self.init()
  576. def init(self):
  577. import bz2
  578. self.pos = 0
  579. if self.mode == "r":
  580. self.bz2obj = bz2.BZ2Decompressor()
  581. self.fileobj.seek(0)
  582. self.buf = ""
  583. else:
  584. self.bz2obj = bz2.BZ2Compressor()
  585. def read(self, size):
  586. b = [self.buf]
  587. x = len(self.buf)
  588. while x < size:
  589. raw = self.fileobj.read(self.blocksize)
  590. if not raw:
  591. break
  592. data = self.bz2obj.decompress(raw)
  593. b.append(data)
  594. x += len(data)
  595. self.buf = "".join(b)
  596. buf = self.buf[:size]
  597. self.buf = self.buf[size:]
  598. self.pos += len(buf)
  599. return buf
  600. def seek(self, pos):
  601. if pos < self.pos:
  602. self.init()
  603. self.read(pos - self.pos)
  604. def tell(self):
  605. return self.pos
  606. def write(self, data):
  607. self.pos += len(data)
  608. raw = self.bz2obj.compress(data)
  609. self.fileobj.write(raw)
  610. def close(self):
  611. if self.mode == "w":
  612. raw = self.bz2obj.flush()
  613. self.fileobj.write(raw)
  614. # class _BZ2Proxy
  615. #------------------------
  616. # Extraction file object
  617. #------------------------
  618. class _FileInFile(object):
  619. """A thin wrapper around an existing file object that
  620. provides a part of its data as an individual file
  621. object.
  622. """
  623. def __init__(self, fileobj, offset, size, sparse=None):
  624. self.fileobj = fileobj
  625. self.offset = offset
  626. self.size = size
  627. self.sparse = sparse
  628. self.position = 0
  629. def tell(self):
  630. """Return the current file position.
  631. """
  632. return self.position
  633. def seek(self, position):
  634. """Seek to a position in the file.
  635. """
  636. self.position = position
  637. def read(self, size=None):
  638. """Read data from the file.
  639. """
  640. if size is None:
  641. size = self.size - self.position
  642. else:
  643. size = min(size, self.size - self.position)
  644. if self.sparse is None:
  645. return self.readnormal(size)
  646. else:
  647. return self.readsparse(size)
  648. def readnormal(self, size):
  649. """Read operation for regular files.
  650. """
  651. self.fileobj.seek(self.offset + self.position)
  652. self.position += size
  653. return self.fileobj.read(size)
  654. def readsparse(self, size):
  655. """Read operation for sparse files.
  656. """
  657. data = []
  658. while size > 0:
  659. buf = self.readsparsesection(size)
  660. if not buf:
  661. break
  662. size -= len(buf)
  663. data.append(buf)
  664. return "".join(data)
  665. def readsparsesection(self, size):
  666. """Read a single section of a sparse file.
  667. """
  668. section = self.sparse.find(self.position)
  669. if section is None:
  670. return ""
  671. size = min(size, section.offset + section.size - self.position)
  672. if isinstance(section, _data):
  673. realpos = section.realpos + self.position - section.offset
  674. self.fileobj.seek(self.offset + realpos)
  675. self.position += size
  676. return self.fileobj.read(size)
  677. else:
  678. self.position += size
  679. return NUL * size
  680. #class _FileInFile
  681. class ExFileObject(object):
  682. """File-like object for reading an archive member.
  683. Is returned by TarFile.extractfile().
  684. """
  685. blocksize = 1024
  686. def __init__(self, tarfile, tarinfo):
  687. self.fileobj = _FileInFile(tarfile.fileobj,
  688. tarinfo.offset_data,
  689. tarinfo.size,
  690. getattr(tarinfo, "sparse", None))
  691. self.name = tarinfo.name
  692. self.mode = "r"
  693. self.closed = False
  694. self.size = tarinfo.size
  695. self.position = 0
  696. self.buffer = ""
  697. def read(self, size=None):
  698. """Read at most size bytes from the file. If size is not
  699. present or None, read all data until EOF is reached.
  700. """
  701. if self.closed:
  702. raise ValueError("I/O operation on closed file")
  703. buf = ""
  704. if self.buffer:
  705. if size is None:
  706. buf = self.buffer
  707. self.buffer = ""
  708. else:
  709. buf = self.buffer[:size]
  710. self.buffer = self.buffer[size:]
  711. if size is None:
  712. buf += self.fileobj.read()
  713. else:
  714. buf += self.fileobj.read(size - len(buf))
  715. self.position += len(buf)
  716. return buf
  717. def readline(self, size=-1):
  718. """Read one entire line from the file. If size is present
  719. and non-negative, return a string with at most that
  720. size, which may be an incomplete line.
  721. """
  722. if self.closed:
  723. raise ValueError("I/O operation on closed file")
  724. if "\n" in self.buffer:
  725. pos = self.buffer.find("\n") + 1
  726. else:
  727. buffers = [self.buffer]
  728. while True:
  729. buf = self.fileobj.read(self.blocksize)
  730. buffers.append(buf)
  731. if not buf or "\n" in buf:
  732. self.buffer = "".join(buffers)
  733. pos = self.buffer.find("\n") + 1
  734. if pos == 0:
  735. # no newline found.
  736. pos = len(self.buffer)
  737. break
  738. if size != -1:
  739. pos = min(size, pos)
  740. buf = self.buffer[:pos]
  741. self.buffer = self.buffer[pos:]
  742. self.position += len(buf)
  743. return buf
  744. def readlines(self):
  745. """Return a list with all remaining lines.
  746. """
  747. result = []
  748. while True:
  749. line = self.readline()
  750. if not line: break
  751. result.append(line)
  752. return result
  753. def tell(self):
  754. """Return the current file position.
  755. """
  756. if self.closed:
  757. raise ValueError("I/O operation on closed file")
  758. return self.position
  759. def seek(self, pos, whence=os.SEEK_SET):
  760. """Seek to a position in the file.
  761. """
  762. if self.closed:
  763. raise ValueError("I/O operation on closed file")
  764. if whence == os.SEEK_SET:
  765. self.position = min(max(pos, 0), self.size)
  766. elif whence == os.SEEK_CUR:
  767. if pos < 0:
  768. self.position = max(self.position + pos, 0)
  769. else:
  770. self.position = min(self.position + pos, self.size)
  771. elif whence == os.SEEK_END:
  772. self.position = max(min(self.size + pos, self.size), 0)
  773. else:
  774. raise ValueError("Invalid argument")
  775. self.buffer = ""
  776. self.fileobj.seek(self.position)
  777. def close(self):
  778. """Close the file object.
  779. """
  780. self.closed = True
  781. def __iter__(self):
  782. """Get an iterator over the file's lines.
  783. """
  784. while True:
  785. line = self.readline()
  786. if not line:
  787. break
  788. yield line
  789. #class ExFileObject
  790. #------------------
  791. # Exported Classes
  792. #------------------
  793. class TarInfo(object):
  794. """Informational class which holds the details about an
  795. archive member given by a tar header block.
  796. TarInfo objects are returned by TarFile.getmember(),
  797. TarFile.getmembers() and TarFile.gettarinfo() and are
  798. usually created internally.
  799. """
  800. def __init__(self, name=""):
  801. """Construct a TarInfo object. name is the optional name
  802. of the member.
  803. """
  804. self.name = name # member name
  805. self.mode = 0644 # file permissions
  806. self.uid = 0 # user id
  807. self.gid = 0 # group id
  808. self.size = 0 # file size
  809. self.mtime = 0 # modification time
  810. self.chksum = 0 # header checksum
  811. self.type = REGTYPE # member type
  812. self.linkname = "" # link name
  813. self.uname = "" # user name
  814. self.gname = "" # group name
  815. self.devmajor = 0 # device major number
  816. self.devminor = 0 # device minor number
  817. self.offset = 0 # the tar header starts here
  818. self.offset_data = 0 # the file's data starts here
  819. self.pax_headers = {} # pax header information
  820. # In pax headers the "name" and "linkname" field are called
  821. # "path" and "linkpath".
  822. def _getpath(self):
  823. return self.name
  824. def _setpath(self, name):
  825. self.name = name
  826. path = property(_getpath, _setpath)
  827. def _getlinkpath(self):
  828. return self.linkname
  829. def _setlinkpath(self, linkname):
  830. self.linkname = linkname
  831. linkpath = property(_getlinkpath, _setlinkpath)
  832. def __repr__(self):
  833. return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
  834. def get_info(self, encoding, errors):
  835. """Return the TarInfo's attributes as a dictionary.
  836. """
  837. info = {
  838. "name": self.name,
  839. "mode": self.mode & 07777,
  840. "uid": self.uid,
  841. "gid": self.gid,
  842. "size": self.size,
  843. "mtime": self.mtime,
  844. "chksum": self.chksum,
  845. "type": self.type,
  846. "linkname": self.linkname,
  847. "uname": self.uname,
  848. "gname": self.gname,
  849. "devmajor": self.devmajor,
  850. "devminor": self.devminor
  851. }
  852. if info["type"] == DIRTYPE and not info["name"].endswith("/"):
  853. info["name"] += "/"
  854. for key in ("name", "linkname", "uname", "gname"):
  855. if type(info[key]) is unicode:
  856. info[key] = info[key].encode(encoding, errors)
  857. return info
  858. def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
  859. """Return a tar header as a string of 512 byte blocks.
  860. """
  861. info = self.get_info(encoding, errors)
  862. if format == USTAR_FORMAT:
  863. return self.create_ustar_header(info)
  864. elif format == GNU_FORMAT:
  865. return self.create_gnu_header(info)
  866. elif format == PAX_FORMAT:
  867. return self.create_pax_header(info, encoding, errors)
  868. else:
  869. raise ValueError("invalid format")
  870. def create_ustar_header(self, info):
  871. """Return the object as a ustar header block.
  872. """
  873. info["magic"] = POSIX_MAGIC
  874. if len(info["linkname"]) > LENGTH_LINK:
  875. raise ValueError("linkname is too long")
  876. if len(info["name"]) > LENGTH_NAME:
  877. info["prefix"], info["name"] = self._posix_split_name(info["name"])
  878. return self._create_header(info, USTAR_FORMAT)
  879. def create_gnu_header(self, info):
  880. """Return the object as a GNU header block sequence.
  881. """
  882. info["magic"] = GNU_MAGIC
  883. buf = ""
  884. if len(info["linkname"]) > LENGTH_LINK:
  885. buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK)
  886. if len(info["name"]) > LENGTH_NAME:
  887. buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME)
  888. return buf + self._create_header(info, GNU_FORMAT)
  889. def create_pax_header(self, info, encoding, errors):
  890. """Return the object as a ustar header block. If it cannot be
  891. represented this way, prepend a pax extended header sequence
  892. with supplement information.
  893. """
  894. info["magic"] = POSIX_MAGIC
  895. pax_headers = self.pax_headers.copy()
  896. # Test string fields for values that exceed the field length or cannot
  897. # be represented in ASCII encoding.
  898. for name, hname, length in (
  899. ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
  900. ("uname", "uname", 32), ("gname", "gname", 32)):
  901. if hname in pax_headers:
  902. # The pax header has priority.
  903. continue
  904. val = info[name].decode(encoding, errors)
  905. # Try to encode the string as ASCII.
  906. try:
  907. val.encode("ascii")
  908. except UnicodeEncodeError:
  909. pax_headers[hname] = val
  910. continue
  911. if len(info[name]) > length:
  912. pax_headers[hname] = val
  913. # Test number fields for values that exceed the field limit or values
  914. # that like to be stored as float.
  915. for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
  916. if name in pax_headers:
  917. # The pax header has priority. Avoid overflow.
  918. info[name] = 0
  919. continue
  920. val = info[name]
  921. if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
  922. pax_headers[name] = unicode(val)
  923. info[name] = 0
  924. # Create a pax extended header if necessary.
  925. if pax_headers:
  926. buf = self._create_pax_generic_header(pax_headers)
  927. else:
  928. buf = ""
  929. return buf + self._create_header(info, USTAR_FORMAT)
  930. @classmethod
  931. def create_pax_global_header(cls, pax_headers):
  932. """Return the object as a pax global header block sequence.
  933. """
  934. return cls._create_pax_generic_header(pax_headers, type=XGLTYPE)
  935. def _posix_split_name(self, name):
  936. """Split a name longer than 100 chars into a prefix
  937. and a name part.
  938. """
  939. prefix = name[:LENGTH_PREFIX + 1]
  940. while prefix and prefix[-1] != "/":
  941. prefix = prefix[:-1]
  942. name = name[len(prefix):]
  943. prefix = prefix[:-1]
  944. if not prefix or len(name) > LENGTH_NAME:
  945. raise ValueError("name is too long")
  946. return prefix, name
  947. @staticmethod
  948. def _create_header(info, format):
  949. """Return a header block. info is a dictionary with file
  950. information, format must be one of the *_FORMAT constants.
  951. """
  952. parts = [
  953. stn(info.get("name", ""), 100),
  954. itn(info.get("mode", 0) & 07777, 8, format),
  955. itn(info.get("uid", 0), 8, format),
  956. itn(info.get("gid", 0), 8, format),
  957. itn(info.get("size", 0), 12, format),
  958. itn(info.get("mtime", 0), 12, format),
  959. " ", # checksum field
  960. info.get("type", REGTYPE),
  961. stn(info.get("linkname", ""), 100),
  962. stn(info.get("magic", POSIX_MAGIC), 8),
  963. stn(info.get("uname", ""), 32),
  964. stn(info.get("gname", ""), 32),
  965. itn(info.get("devmajor", 0), 8, format),
  966. itn(info.get("devminor", 0), 8, format),
  967. stn(info.get("prefix", ""), 155)
  968. ]
  969. buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts))
  970. chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
  971. buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
  972. return buf
  973. @staticmethod
  974. def _create_payload(payload):
  975. """Return the string payload filled with zero bytes
  976. up to the next 512 byte border.
  977. """
  978. blocks, remainder = divmod(len(payload), BLOCKSIZE)
  979. if remainder > 0:
  980. payload += (BLOCKSIZE - remainder) * NUL
  981. return payload
  982. @classmethod
  983. def _create_gnu_long_header(cls, name, type):
  984. """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
  985. for name.
  986. """
  987. name += NUL
  988. info = {}
  989. info["name"] = "././@LongLink"
  990. info["type"] = type
  991. info["size"] = len(name)
  992. info["magic"] = GNU_MAGIC
  993. # create extended header + name blocks.
  994. return cls._create_header(info, USTAR_FORMAT) + \
  995. cls._create_payload(name)
  996. @classmethod
  997. def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE):
  998. """Return a POSIX.1-2001 extended or global header sequence
  999. that contains a list of keyword, value pairs. The values
  1000. must be unicode objects.
  1001. """
  1002. records = []
  1003. for keyword, value in pax_headers.iteritems():
  1004. keyword = keyword.encode("utf8")
  1005. value = value.encode("utf8")
  1006. l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
  1007. n = p = 0
  1008. while True:
  1009. n = l + len(str(p))
  1010. if n == p:
  1011. break
  1012. p = n
  1013. records.append("%d %s=%s\n" % (p, keyword, value))
  1014. records = "".join(records)
  1015. # We use a hardcoded "././@PaxHeader" name like star does
  1016. # instead of the one that POSIX recommends.
  1017. info = {}
  1018. info["name"] = "././@PaxHeader"
  1019. info["type"] = type
  1020. info["size"] = len(records)
  1021. info["magic"] = POSIX_MAGIC
  1022. # Create pax header + record blocks.
  1023. return cls._create_header(info, USTAR_FORMAT) + \
  1024. cls._create_payload(records)
  1025. @classmethod
  1026. def frombuf(cls, buf):
  1027. """Construct a TarInfo object from a 512 byte string buffer.
  1028. """
  1029. if len(buf) == 0:
  1030. raise EmptyHeaderError("empty header")
  1031. if len(buf) != BLOCKSIZE:
  1032. raise TruncatedHeaderError("truncated header")
  1033. if buf.count(NUL) == BLOCKSIZE:
  1034. raise EOFHeaderError("end of file header")
  1035. chksum = nti(buf[148:156])
  1036. if chksum not in calc_chksums(buf):
  1037. raise InvalidHeaderError("bad checksum")
  1038. obj = cls()
  1039. obj.buf = buf
  1040. obj.name = nts(buf[0:100])
  1041. obj.mode = nti(buf[100:108])
  1042. obj.uid = nti(buf[108:116])
  1043. obj.gid = nti(buf[116:124])
  1044. obj.size = nti(buf[124:136])
  1045. obj.mtime = nti(buf[136:148])
  1046. obj.chksum = chksum
  1047. obj.type = buf[156:157]
  1048. obj.linkname = nts(buf[157:257])
  1049. obj.uname = nts(buf[265:297])
  1050. obj.gname = nts(buf[297:329])
  1051. obj.devmajor = nti(buf[329:337])
  1052. obj.devminor = nti(buf[337:345])
  1053. prefix = nts(buf[345:500])
  1054. # Old V7 tar format represents a directory as a regular
  1055. # file with a trailing slash.
  1056. if obj.type == AREGTYPE and obj.name.endswith("/"):
  1057. obj.type = DIRTYPE
  1058. # Remove redundant slashes from directories.
  1059. if obj.isdir():
  1060. obj.name = obj.name.rstrip("/")
  1061. # Reconstruct a ustar longname.
  1062. if prefix and obj.type not in GNU_TYPES:
  1063. obj.name = prefix + "/" + obj.name
  1064. return obj
  1065. @classmethod
  1066. def fromtarfile(cls, tarfile):
  1067. """Return the next TarInfo object from TarFile object
  1068. tarfile.
  1069. """
  1070. buf = tarfile.fileobj.read(BLOCKSIZE)
  1071. obj = cls.frombuf(buf)
  1072. obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
  1073. return obj._proc_member(tarfile)
  1074. #--------------------------------------------------------------------------
  1075. # The following are methods that are called depending on the type of a
  1076. # member. The entry point is _proc_member() which can be overridden in a
  1077. # subclass to add custom _proc_*() methods. A _proc_*() method MUST
  1078. # implement the following
  1079. # operations:
  1080. # 1. Set self.offset_data to the position where the data blocks begin,
  1081. # if there is data that follows.
  1082. # 2. Set tarfile.offset to the position where the next member's header will
  1083. # begin.
  1084. # 3. Return self or another valid TarInfo object.
  1085. def _proc_member(self, tarfile):
  1086. """Choose the right processing method depending on
  1087. the type and call it.
  1088. """
  1089. if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
  1090. return self._proc_gnulong(tarfile)
  1091. elif self.type == GNUTYPE_SPARSE:
  1092. return self._proc_sparse(tarfile)
  1093. elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
  1094. return self._proc_pax(tarfile)
  1095. else:
  1096. return self._proc_builtin(tarfile)
  1097. def _proc_builtin(self, tarfile):
  1098. """Process a builtin type or an unknown type which
  1099. will be treated as a regular file.
  1100. """
  1101. self.offset_data = tarfile.fileobj.tell()
  1102. offset = self.offset_data
  1103. if self.isreg() or self.type not in SUPPORTED_TYPES:
  1104. # Skip the following data blocks.
  1105. offset += self._block(self.size)
  1106. tarfile.offset = offset
  1107. # Patch the TarInfo object with saved global
  1108. # header information.
  1109. self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
  1110. return self
  1111. def _proc_gnulong(self, tarfile):
  1112. """Process the blocks that hold a GNU longname
  1113. or longlink member.
  1114. """
  1115. buf = tarfile.fileobj.read(self._block(self.size))
  1116. # Fetch the next header and process it.
  1117. try:
  1118. next = self.fromtarfile(tarfile)
  1119. except HeaderError:
  1120. raise SubsequentHeaderError("missing or bad subsequent header")
  1121. # Patch the TarInfo object from the next header with
  1122. # the longname information.
  1123. next.offset = self.offset
  1124. if self.type == GNUTYPE_LONGNAME:
  1125. next.name = nts(buf)
  1126. elif self.type == GNUTYPE_LONGLINK:
  1127. next.linkname = nts(buf)
  1128. return next
  1129. def _proc_sparse(self, tarfile):
  1130. """Process a GNU sparse header plus extra headers.
  1131. """
  1132. buf = self.buf
  1133. sp = _ringbuffer()
  1134. pos = 386
  1135. lastpos = 0L
  1136. realpos = 0L
  1137. # There are 4 possible sparse structs in the
  1138. # first header.
  1139. for i in xrange(4):
  1140. try:
  1141. offset = nti(buf[pos:pos + 12])
  1142. numbytes = nti(buf[pos + 12:pos + 24])
  1143. except ValueError:
  1144. break
  1145. if offset > lastpos:
  1146. sp.append(_hole(lastpos, offset - lastpos))
  1147. sp.append(_data(offset, numbytes, realpos))
  1148. realpos += numbytes
  1149. lastpos = offset + numbytes
  1150. pos += 24
  1151. isextended = ord(buf[482])
  1152. origsize = nti(buf[483:495])
  1153. # If the isextended flag is given,
  1154. # there are extra headers to process.
  1155. while isextended == 1:
  1156. buf = tarfile.fileobj.read(BLOCKSIZE)
  1157. pos = 0
  1158. for i in xrange(21):
  1159. try:
  1160. offset = nti(buf[pos:pos + 12])
  1161. numbytes = nti(buf[pos + 12:pos + 24])
  1162. except ValueError:
  1163. break
  1164. if offset > lastpos:
  1165. sp.append(_hole(lastpos, offset - lastpos))
  1166. sp.append(_data(offset, numbytes, realpos))
  1167. realpos += numbytes
  1168. lastpos = offset + numbytes
  1169. pos += 24
  1170. isextended = ord(buf[504])
  1171. if lastpos < origsize:
  1172. sp.append(_hole(lastpos, origsize - lastpos))
  1173. self.sparse = sp
  1174. self.offset_data = tarfile.fileobj.tell()
  1175. tarfile.offset = self.offset_data + self._block(self.size)
  1176. self.size = origsize
  1177. return self
  1178. def _proc_pax(self, tarfile):
  1179. """Process an extended or global header as described in
  1180. POSIX.1-2001.
  1181. """
  1182. # Read the header information.
  1183. buf = tarfile.fileobj.read(self._block(self.size))
  1184. # A pax header stores supplemental information for either
  1185. # the following file (extended) or all following files
  1186. # (global).
  1187. if self.type == XGLTYPE:
  1188. pax_headers = tarfile.pax_headers
  1189. else:
  1190. pax_headers = tarfile.pax_headers.copy()
  1191. # Parse pax header information. A record looks like that:
  1192. # "%d %s=%s\n" % (length, keyword, value). length is the size
  1193. # of the complete record including the length field itself and
  1194. # the newline. keyword and value are both UTF-8 encoded strings.
  1195. regex = re.compile(r"(\d+) ([^=]+)=", re.U)
  1196. pos = 0
  1197. while True:
  1198. match = regex.match(buf, pos)
  1199. if not match:
  1200. break
  1201. length, keyword = match.groups()
  1202. length = int(length)
  1203. value = buf[match.end(2) + 1:match.start(1) + length - 1]
  1204. keyword = keyword.decode("utf8")
  1205. value = value.decode("utf8")
  1206. pax_headers[keyword] = value
  1207. pos += length
  1208. # Fetch the next header.
  1209. try:
  1210. next = self.fromtarfile(tarfile)
  1211. except HeaderError:
  1212. raise SubsequentHeaderError("missing or bad subsequent header")
  1213. if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
  1214. # Patch the TarInfo object with the extended header info.
  1215. next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
  1216. next.offset = self.offset
  1217. if "size" in pax_headers:
  1218. # If the extended header replaces the size field,
  1219. # we need to recalculate the offset where the next
  1220. # header starts.
  1221. offset = next.offset_data
  1222. if next.isreg() or next.type not in SUPPORTED_TYPES:
  1223. offset += next._block(next.size)
  1224. tarfile.offset = offset
  1225. return next
  1226. def _apply_pax_info(self, pax_headers, encoding, errors):
  1227. """Replace fields with supplemental information from a previous
  1228. pax extended or global header.
  1229. """
  1230. for keyword, value in pax_headers.iteritems():
  1231. if keyword not in PAX_FIELDS:
  1232. continue
  1233. if keyword == "path":
  1234. value = value.rstrip("/")
  1235. if keyword in PAX_NUMBER_FIELDS:
  1236. try:
  1237. value = PAX_NUMBER_FIELDS[keyword](value)
  1238. except ValueError:
  1239. value = 0
  1240. else:
  1241. value = uts(value, encoding, errors)
  1242. setattr(self, keyword, value)
  1243. self.pax_headers = pax_headers.copy()
  1244. def _block(self, count):
  1245. """Round up a byte count by BLOCKSIZE and return it,
  1246. e.g. _block(834) => 1024.
  1247. """
  1248. blocks, remainder = divmod(count, BLOCKSIZE)
  1249. if remainder:
  1250. blocks += 1
  1251. return blocks * BLOCKSIZE
  1252. def isreg(self):
  1253. return self.type in REGULAR_TYPES
  1254. def isfile(self):
  1255. return self.isreg()
  1256. def isdir(self):
  1257. return self.type == DIRTYPE
  1258. def issym(self):
  1259. return self.type == SYMTYPE
  1260. def islnk(self):
  1261. return self.type == LNKTYPE
  1262. def ischr(self):
  1263. return self.type == CHRTYPE
  1264. def isblk(self):
  1265. return self.type == BLKTYPE
  1266. def isfifo(self):
  1267. return self.type == FIFOTYPE
  1268. def issparse(self):
  1269. return self.type == GNUTYPE_SPARSE
  1270. def isdev(self):
  1271. return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
  1272. # class TarInfo
  1273. class TarFile(object):
  1274. """The TarFile Class provides an interface to tar archives.
  1275. """
  1276. debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
  1277. dereference = False # If true, add content of linked file to the
  1278. # tar file, else the link.
  1279. ignore_zeros = False # If true, skips empty or invalid blocks and
  1280. # continues processing.
  1281. errorlevel = 1 # If 0, fatal errors only appear in debug
  1282. # messages (if debug >= 0). If > 0, errors
  1283. # are passed to the caller as exceptions.
  1284. format = DEFAULT_FORMAT # The format to use when creating an archive.
  1285. encoding = ENCODING # Encoding for 8-bit character strings.
  1286. errors = None # Error handler for unicode conversion.
  1287. tarinfo = TarInfo # The default TarInfo class to use.
  1288. fileobject = ExFileObject # The default ExFileObject class to use.
  1289. def __init__(self, name=None, mode="r", fileobj=None, format=None,
  1290. tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
  1291. errors=None, pax_headers=None, debug=None, errorlevel=None):
  1292. """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
  1293. read from an existing archive, 'a' to append data to an existing
  1294. file or 'w' to create a new file overwriting an existing one. `mode'
  1295. defaults to 'r'.
  1296. If `fileobj' is given, it is used for reading or writing data. If it
  1297. can be determined, `mode' is overridden by `fileobj's mode.
  1298. `fileobj' is not closed, when TarFile is closed.
  1299. """
  1300. if len(mode) > 1 or mode not in "raw":
  1301. raise ValueError("mode must be 'r', 'a' or 'w'")
  1302. self.mode = mode
  1303. self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
  1304. if not fileobj:
  1305. if self.mode == "a" and not os.path.exists(name):
  1306. # Create nonexistent files …

Large files files are truncated, but you can click here to view the full file