PageRenderTime 81ms CodeModel.GetById 10ms RepoModel.GetById 1ms app.codeStats 0ms

/lib-python/2.7/tarfile.py

https://bitbucket.org/kkris/pypy
Python | 2587 lines | 2481 code | 28 blank | 78 comment | 23 complexity | 28e2e56fb878b91e9f999f8f7a36afe6 MD5 | raw file
  1. #!/usr/bin/env python
  2. # -*- coding: iso-8859-1 -*-
  3. #-------------------------------------------------------------------
  4. # tarfile.py
  5. #-------------------------------------------------------------------
  6. # Copyright (C) 2002 Lars Gustäbel <lars@gustaebel.de>
  7. # All rights reserved.
  8. #
  9. # Permission is hereby granted, free of charge, to any person
  10. # obtaining a copy of this software and associated documentation
  11. # files (the "Software"), to deal in the Software without
  12. # restriction, including without limitation the rights to use,
  13. # copy, modify, merge, publish, distribute, sublicense, and/or sell
  14. # copies of the Software, and to permit persons to whom the
  15. # Software is furnished to do so, subject to the following
  16. # conditions:
  17. #
  18. # The above copyright notice and this permission notice shall be
  19. # included in all copies or substantial portions of the Software.
  20. #
  21. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  22. # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
  23. # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  24. # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
  25. # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  26. # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  27. # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  28. # OTHER DEALINGS IN THE SOFTWARE.
  29. #
  30. """Read from and write to tar format archives.
  31. """
  32. __version__ = "$Revision: 85213 $"
  33. # $Source$
  34. version = "0.9.0"
  35. __author__ = "Lars Gustäbel (lars@gustaebel.de)"
  36. __date__ = "$Date$"
  37. __cvsid__ = "$Id$"
  38. __credits__ = "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
  39. #---------
  40. # Imports
  41. #---------
  42. import sys
  43. import os
  44. import shutil
  45. import stat
  46. import errno
  47. import time
  48. import struct
  49. import copy
  50. import re
  51. import operator
  52. try:
  53. import grp, pwd
  54. except ImportError:
  55. grp = pwd = None
  56. # from tarfile import *
  57. __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
  58. #---------------------------------------------------------
  59. # tar constants
  60. #---------------------------------------------------------
  61. NUL = "\0" # the null character
  62. BLOCKSIZE = 512 # length of processing blocks
  63. RECORDSIZE = BLOCKSIZE * 20 # length of records
  64. GNU_MAGIC = "ustar \0" # magic gnu tar string
  65. POSIX_MAGIC = "ustar\x0000" # magic posix tar string
  66. LENGTH_NAME = 100 # maximum length of a filename
  67. LENGTH_LINK = 100 # maximum length of a linkname
  68. LENGTH_PREFIX = 155 # maximum length of the prefix field
  69. REGTYPE = "0" # regular file
  70. AREGTYPE = "\0" # regular file
  71. LNKTYPE = "1" # link (inside tarfile)
  72. SYMTYPE = "2" # symbolic link
  73. CHRTYPE = "3" # character special device
  74. BLKTYPE = "4" # block special device
  75. DIRTYPE = "5" # directory
  76. FIFOTYPE = "6" # fifo special device
  77. CONTTYPE = "7" # contiguous file
  78. GNUTYPE_LONGNAME = "L" # GNU tar longname
  79. GNUTYPE_LONGLINK = "K" # GNU tar longlink
  80. GNUTYPE_SPARSE = "S" # GNU tar sparse file
  81. XHDTYPE = "x" # POSIX.1-2001 extended header
  82. XGLTYPE = "g" # POSIX.1-2001 global header
  83. SOLARIS_XHDTYPE = "X" # Solaris extended header
  84. USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
  85. GNU_FORMAT = 1 # GNU tar format
  86. PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
  87. DEFAULT_FORMAT = GNU_FORMAT
  88. #---------------------------------------------------------
  89. # tarfile constants
  90. #---------------------------------------------------------
  91. # File types that tarfile supports:
  92. SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
  93. SYMTYPE, DIRTYPE, FIFOTYPE,
  94. CONTTYPE, CHRTYPE, BLKTYPE,
  95. GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  96. GNUTYPE_SPARSE)
  97. # File types that will be treated as a regular file.
  98. REGULAR_TYPES = (REGTYPE, AREGTYPE,
  99. CONTTYPE, GNUTYPE_SPARSE)
  100. # File types that are part of the GNU tar format.
  101. GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
  102. GNUTYPE_SPARSE)
  103. # Fields from a pax header that override a TarInfo attribute.
  104. PAX_FIELDS = ("path", "linkpath", "size", "mtime",
  105. "uid", "gid", "uname", "gname")
  106. # Fields in a pax header that are numbers, all other fields
  107. # are treated as strings.
  108. PAX_NUMBER_FIELDS = {
  109. "atime": float,
  110. "ctime": float,
  111. "mtime": float,
  112. "uid": int,
  113. "gid": int,
  114. "size": int
  115. }
  116. #---------------------------------------------------------
  117. # Bits used in the mode field, values in octal.
  118. #---------------------------------------------------------
  119. S_IFLNK = 0120000 # symbolic link
  120. S_IFREG = 0100000 # regular file
  121. S_IFBLK = 0060000 # block device
  122. S_IFDIR = 0040000 # directory
  123. S_IFCHR = 0020000 # character device
  124. S_IFIFO = 0010000 # fifo
  125. TSUID = 04000 # set UID on execution
  126. TSGID = 02000 # set GID on execution
  127. TSVTX = 01000 # reserved
  128. TUREAD = 0400 # read by owner
  129. TUWRITE = 0200 # write by owner
  130. TUEXEC = 0100 # execute/search by owner
  131. TGREAD = 0040 # read by group
  132. TGWRITE = 0020 # write by group
  133. TGEXEC = 0010 # execute/search by group
  134. TOREAD = 0004 # read by other
  135. TOWRITE = 0002 # write by other
  136. TOEXEC = 0001 # execute/search by other
  137. #---------------------------------------------------------
  138. # initialization
  139. #---------------------------------------------------------
  140. ENCODING = sys.getfilesystemencoding()
  141. if ENCODING is None:
  142. ENCODING = sys.getdefaultencoding()
  143. #---------------------------------------------------------
  144. # Some useful functions
  145. #---------------------------------------------------------
  146. def stn(s, length):
  147. """Convert a python string to a null-terminated string buffer.
  148. """
  149. return s[:length] + (length - len(s)) * NUL
  150. def nts(s):
  151. """Convert a null-terminated string field to a python string.
  152. """
  153. # Use the string up to the first null char.
  154. p = s.find("\0")
  155. if p == -1:
  156. return s
  157. return s[:p]
  158. def nti(s):
  159. """Convert a number field to a python number.
  160. """
  161. # There are two possible encodings for a number field, see
  162. # itn() below.
  163. if s[0] != chr(0200):
  164. try:
  165. n = int(nts(s) or "0", 8)
  166. except ValueError:
  167. raise InvalidHeaderError("invalid header")
  168. else:
  169. n = 0L
  170. for i in xrange(len(s) - 1):
  171. n <<= 8
  172. n += ord(s[i + 1])
  173. return n
  174. def itn(n, digits=8, format=DEFAULT_FORMAT):
  175. """Convert a python number to a number field.
  176. """
  177. # POSIX 1003.1-1988 requires numbers to be encoded as a string of
  178. # octal digits followed by a null-byte, this allows values up to
  179. # (8**(digits-1))-1. GNU tar allows storing numbers greater than
  180. # that if necessary. A leading 0200 byte indicates this particular
  181. # encoding, the following digits-1 bytes are a big-endian
  182. # representation. This allows values up to (256**(digits-1))-1.
  183. if 0 <= n < 8 ** (digits - 1):
  184. s = "%0*o" % (digits - 1, n) + NUL
  185. else:
  186. if format != GNU_FORMAT or n >= 256 ** (digits - 1):
  187. raise ValueError("overflow in number field")
  188. if n < 0:
  189. # XXX We mimic GNU tar's behaviour with negative numbers,
  190. # this could raise OverflowError.
  191. n = struct.unpack("L", struct.pack("l", n))[0]
  192. s = ""
  193. for i in xrange(digits - 1):
  194. s = chr(n & 0377) + s
  195. n >>= 8
  196. s = chr(0200) + s
  197. return s
  198. def uts(s, encoding, errors):
  199. """Convert a unicode object to a string.
  200. """
  201. if errors == "utf-8":
  202. # An extra error handler similar to the -o invalid=UTF-8 option
  203. # in POSIX.1-2001. Replace untranslatable characters with their
  204. # UTF-8 representation.
  205. try:
  206. return s.encode(encoding, "strict")
  207. except UnicodeEncodeError:
  208. x = []
  209. for c in s:
  210. try:
  211. x.append(c.encode(encoding, "strict"))
  212. except UnicodeEncodeError:
  213. x.append(c.encode("utf8"))
  214. return "".join(x)
  215. else:
  216. return s.encode(encoding, errors)
  217. def calc_chksums(buf):
  218. """Calculate the checksum for a member's header by summing up all
  219. characters except for the chksum field which is treated as if
  220. it was filled with spaces. According to the GNU tar sources,
  221. some tars (Sun and NeXT) calculate chksum with signed char,
  222. which will be different if there are chars in the buffer with
  223. the high bit set. So we calculate two checksums, unsigned and
  224. signed.
  225. """
  226. unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
  227. signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
  228. return unsigned_chksum, signed_chksum
  229. def copyfileobj(src, dst, length=None):
  230. """Copy length bytes from fileobj src to fileobj dst.
  231. If length is None, copy the entire content.
  232. """
  233. if length == 0:
  234. return
  235. if length is None:
  236. shutil.copyfileobj(src, dst)
  237. return
  238. BUFSIZE = 16 * 1024
  239. blocks, remainder = divmod(length, BUFSIZE)
  240. for b in xrange(blocks):
  241. buf = src.read(BUFSIZE)
  242. if len(buf) < BUFSIZE:
  243. raise IOError("end of file reached")
  244. dst.write(buf)
  245. if remainder != 0:
  246. buf = src.read(remainder)
  247. if len(buf) < remainder:
  248. raise IOError("end of file reached")
  249. dst.write(buf)
  250. return
  251. filemode_table = (
  252. ((S_IFLNK, "l"),
  253. (S_IFREG, "-"),
  254. (S_IFBLK, "b"),
  255. (S_IFDIR, "d"),
  256. (S_IFCHR, "c"),
  257. (S_IFIFO, "p")),
  258. ((TUREAD, "r"),),
  259. ((TUWRITE, "w"),),
  260. ((TUEXEC|TSUID, "s"),
  261. (TSUID, "S"),
  262. (TUEXEC, "x")),
  263. ((TGREAD, "r"),),
  264. ((TGWRITE, "w"),),
  265. ((TGEXEC|TSGID, "s"),
  266. (TSGID, "S"),
  267. (TGEXEC, "x")),
  268. ((TOREAD, "r"),),
  269. ((TOWRITE, "w"),),
  270. ((TOEXEC|TSVTX, "t"),
  271. (TSVTX, "T"),
  272. (TOEXEC, "x"))
  273. )
  274. def filemode(mode):
  275. """Convert a file's mode to a string of the form
  276. -rwxrwxrwx.
  277. Used by TarFile.list()
  278. """
  279. perm = []
  280. for table in filemode_table:
  281. for bit, char in table:
  282. if mode & bit == bit:
  283. perm.append(char)
  284. break
  285. else:
  286. perm.append("-")
  287. return "".join(perm)
  288. class TarError(Exception):
  289. """Base exception."""
  290. pass
  291. class ExtractError(TarError):
  292. """General exception for extract errors."""
  293. pass
  294. class ReadError(TarError):
  295. """Exception for unreadble tar archives."""
  296. pass
  297. class CompressionError(TarError):
  298. """Exception for unavailable compression methods."""
  299. pass
  300. class StreamError(TarError):
  301. """Exception for unsupported operations on stream-like TarFiles."""
  302. pass
  303. class HeaderError(TarError):
  304. """Base exception for header errors."""
  305. pass
  306. class EmptyHeaderError(HeaderError):
  307. """Exception for empty headers."""
  308. pass
  309. class TruncatedHeaderError(HeaderError):
  310. """Exception for truncated headers."""
  311. pass
  312. class EOFHeaderError(HeaderError):
  313. """Exception for end of file headers."""
  314. pass
  315. class InvalidHeaderError(HeaderError):
  316. """Exception for invalid headers."""
  317. pass
  318. class SubsequentHeaderError(HeaderError):
  319. """Exception for missing and invalid extended headers."""
  320. pass
  321. #---------------------------
  322. # internal stream interface
  323. #---------------------------
  324. class _LowLevelFile:
  325. """Low-level file object. Supports reading and writing.
  326. It is used instead of a regular file object for streaming
  327. access.
  328. """
  329. def __init__(self, name, mode):
  330. mode = {
  331. "r": os.O_RDONLY,
  332. "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
  333. }[mode]
  334. if hasattr(os, "O_BINARY"):
  335. mode |= os.O_BINARY
  336. self.fd = os.open(name, mode, 0666)
  337. def close(self):
  338. os.close(self.fd)
  339. def read(self, size):
  340. return os.read(self.fd, size)
  341. def write(self, s):
  342. os.write(self.fd, s)
  343. class _Stream:
  344. """Class that serves as an adapter between TarFile and
  345. a stream-like object. The stream-like object only
  346. needs to have a read() or write() method and is accessed
  347. blockwise. Use of gzip or bzip2 compression is possible.
  348. A stream-like object could be for example: sys.stdin,
  349. sys.stdout, a socket, a tape device etc.
  350. _Stream is intended to be used only internally.
  351. """
  352. def __init__(self, name, mode, comptype, fileobj, bufsize):
  353. """Construct a _Stream object.
  354. """
  355. self._extfileobj = True
  356. if fileobj is None:
  357. fileobj = _LowLevelFile(name, mode)
  358. self._extfileobj = False
  359. if comptype == '*':
  360. # Enable transparent compression detection for the
  361. # stream interface
  362. fileobj = _StreamProxy(fileobj)
  363. comptype = fileobj.getcomptype()
  364. self.name = name or ""
  365. self.mode = mode
  366. self.comptype = comptype
  367. self.fileobj = fileobj
  368. self.bufsize = bufsize
  369. self.buf = ""
  370. self.pos = 0L
  371. self.closed = False
  372. if comptype == "gz":
  373. try:
  374. import zlib
  375. except ImportError:
  376. raise CompressionError("zlib module is not available")
  377. self.zlib = zlib
  378. self.crc = zlib.crc32("") & 0xffffffffL
  379. if mode == "r":
  380. self._init_read_gz()
  381. else:
  382. self._init_write_gz()
  383. if comptype == "bz2":
  384. try:
  385. import bz2
  386. except ImportError:
  387. raise CompressionError("bz2 module is not available")
  388. if mode == "r":
  389. self.dbuf = ""
  390. self.cmp = bz2.BZ2Decompressor()
  391. else:
  392. self.cmp = bz2.BZ2Compressor()
  393. def __del__(self):
  394. if hasattr(self, "closed") and not self.closed:
  395. self.close()
  396. def _init_write_gz(self):
  397. """Initialize for writing with gzip compression.
  398. """
  399. self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
  400. -self.zlib.MAX_WBITS,
  401. self.zlib.DEF_MEM_LEVEL,
  402. 0)
  403. timestamp = struct.pack("<L", long(time.time()))
  404. self.__write("\037\213\010\010%s\002\377" % timestamp)
  405. if type(self.name) is unicode:
  406. self.name = self.name.encode("iso-8859-1", "replace")
  407. if self.name.endswith(".gz"):
  408. self.name = self.name[:-3]
  409. self.__write(self.name + NUL)
  410. def write(self, s):
  411. """Write string s to the stream.
  412. """
  413. if self.comptype == "gz":
  414. self.crc = self.zlib.crc32(s, self.crc) & 0xffffffffL
  415. self.pos += len(s)
  416. if self.comptype != "tar":
  417. s = self.cmp.compress(s)
  418. self.__write(s)
  419. def __write(self, s):
  420. """Write string s to the stream if a whole new block
  421. is ready to be written.
  422. """
  423. self.buf += s
  424. while len(self.buf) > self.bufsize:
  425. self.fileobj.write(self.buf[:self.bufsize])
  426. self.buf = self.buf[self.bufsize:]
  427. def close(self):
  428. """Close the _Stream object. No operation should be
  429. done on it afterwards.
  430. """
  431. if self.closed:
  432. return
  433. if self.mode == "w" and self.comptype != "tar":
  434. self.buf += self.cmp.flush()
  435. if self.mode == "w" and self.buf:
  436. self.fileobj.write(self.buf)
  437. self.buf = ""
  438. if self.comptype == "gz":
  439. # The native zlib crc is an unsigned 32-bit integer, but
  440. # the Python wrapper implicitly casts that to a signed C
  441. # long. So, on a 32-bit box self.crc may "look negative",
  442. # while the same crc on a 64-bit box may "look positive".
  443. # To avoid irksome warnings from the `struct` module, force
  444. # it to look positive on all boxes.
  445. self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
  446. self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
  447. if not self._extfileobj:
  448. self.fileobj.close()
  449. self.closed = True
  450. def _init_read_gz(self):
  451. """Initialize for reading a gzip compressed fileobj.
  452. """
  453. self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
  454. self.dbuf = ""
  455. # taken from gzip.GzipFile with some alterations
  456. if self.__read(2) != "\037\213":
  457. raise ReadError("not a gzip file")
  458. if self.__read(1) != "\010":
  459. raise CompressionError("unsupported compression method")
  460. flag = ord(self.__read(1))
  461. self.__read(6)
  462. if flag & 4:
  463. xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
  464. self.read(xlen)
  465. if flag & 8:
  466. while True:
  467. s = self.__read(1)
  468. if not s or s == NUL:
  469. break
  470. if flag & 16:
  471. while True:
  472. s = self.__read(1)
  473. if not s or s == NUL:
  474. break
  475. if flag & 2:
  476. self.__read(2)
  477. def tell(self):
  478. """Return the stream's file pointer position.
  479. """
  480. return self.pos
  481. def seek(self, pos=0):
  482. """Set the stream's file pointer to pos. Negative seeking
  483. is forbidden.
  484. """
  485. if pos - self.pos >= 0:
  486. blocks, remainder = divmod(pos - self.pos, self.bufsize)
  487. for i in xrange(blocks):
  488. self.read(self.bufsize)
  489. self.read(remainder)
  490. else:
  491. raise StreamError("seeking backwards is not allowed")
  492. return self.pos
  493. def read(self, size=None):
  494. """Return the next size number of bytes from the stream.
  495. If size is not defined, return all bytes of the stream
  496. up to EOF.
  497. """
  498. if size is None:
  499. t = []
  500. while True:
  501. buf = self._read(self.bufsize)
  502. if not buf:
  503. break
  504. t.append(buf)
  505. buf = "".join(t)
  506. else:
  507. buf = self._read(size)
  508. self.pos += len(buf)
  509. return buf
  510. def _read(self, size):
  511. """Return size bytes from the stream.
  512. """
  513. if self.comptype == "tar":
  514. return self.__read(size)
  515. c = len(self.dbuf)
  516. t = [self.dbuf]
  517. while c < size:
  518. buf = self.__read(self.bufsize)
  519. if not buf:
  520. break
  521. try:
  522. buf = self.cmp.decompress(buf)
  523. except IOError:
  524. raise ReadError("invalid compressed data")
  525. t.append(buf)
  526. c += len(buf)
  527. t = "".join(t)
  528. self.dbuf = t[size:]
  529. return t[:size]
  530. def __read(self, size):
  531. """Return size bytes from stream. If internal buffer is empty,
  532. read another block from the stream.
  533. """
  534. c = len(self.buf)
  535. t = [self.buf]
  536. while c < size:
  537. buf = self.fileobj.read(self.bufsize)
  538. if not buf:
  539. break
  540. t.append(buf)
  541. c += len(buf)
  542. t = "".join(t)
  543. self.buf = t[size:]
  544. return t[:size]
  545. # class _Stream
  546. class _StreamProxy(object):
  547. """Small proxy class that enables transparent compression
  548. detection for the Stream interface (mode 'r|*').
  549. """
  550. def __init__(self, fileobj):
  551. self.fileobj = fileobj
  552. self.buf = self.fileobj.read(BLOCKSIZE)
  553. def read(self, size):
  554. self.read = self.fileobj.read
  555. return self.buf
  556. def getcomptype(self):
  557. if self.buf.startswith("\037\213\010"):
  558. return "gz"
  559. if self.buf[0:3] == "BZh" and self.buf[4:10] == "1AY&SY":
  560. return "bz2"
  561. return "tar"
  562. def close(self):
  563. self.fileobj.close()
  564. # class StreamProxy
  565. class _BZ2Proxy(object):
  566. """Small proxy class that enables external file object
  567. support for "r:bz2" and "w:bz2" modes. This is actually
  568. a workaround for a limitation in bz2 module's BZ2File
  569. class which (unlike gzip.GzipFile) has no support for
  570. a file object argument.
  571. """
  572. blocksize = 16 * 1024
  573. def __init__(self, fileobj, mode):
  574. self.fileobj = fileobj
  575. self.mode = mode
  576. self.name = getattr(self.fileobj, "name", None)
  577. self.init()
  578. def init(self):
  579. import bz2
  580. self.pos = 0
  581. if self.mode == "r":
  582. self.bz2obj = bz2.BZ2Decompressor()
  583. self.fileobj.seek(0)
  584. self.buf = ""
  585. else:
  586. self.bz2obj = bz2.BZ2Compressor()
  587. def read(self, size):
  588. b = [self.buf]
  589. x = len(self.buf)
  590. while x < size:
  591. raw = self.fileobj.read(self.blocksize)
  592. if not raw:
  593. break
  594. data = self.bz2obj.decompress(raw)
  595. b.append(data)
  596. x += len(data)
  597. self.buf = "".join(b)
  598. buf = self.buf[:size]
  599. self.buf = self.buf[size:]
  600. self.pos += len(buf)
  601. return buf
  602. def seek(self, pos):
  603. if pos < self.pos:
  604. self.init()
  605. self.read(pos - self.pos)
  606. def tell(self):
  607. return self.pos
  608. def write(self, data):
  609. self.pos += len(data)
  610. raw = self.bz2obj.compress(data)
  611. self.fileobj.write(raw)
  612. def close(self):
  613. if self.mode == "w":
  614. raw = self.bz2obj.flush()
  615. self.fileobj.write(raw)
  616. # class _BZ2Proxy
  617. #------------------------
  618. # Extraction file object
  619. #------------------------
  620. class _FileInFile(object):
  621. """A thin wrapper around an existing file object that
  622. provides a part of its data as an individual file
  623. object.
  624. """
  625. def __init__(self, fileobj, offset, size, sparse=None):
  626. self.fileobj = fileobj
  627. self.offset = offset
  628. self.size = size
  629. self.sparse = sparse
  630. self.position = 0
  631. def tell(self):
  632. """Return the current file position.
  633. """
  634. return self.position
  635. def seek(self, position):
  636. """Seek to a position in the file.
  637. """
  638. self.position = position
  639. def read(self, size=None):
  640. """Read data from the file.
  641. """
  642. if size is None:
  643. size = self.size - self.position
  644. else:
  645. size = min(size, self.size - self.position)
  646. if self.sparse is None:
  647. return self.readnormal(size)
  648. else:
  649. return self.readsparse(size)
  650. def readnormal(self, size):
  651. """Read operation for regular files.
  652. """
  653. self.fileobj.seek(self.offset + self.position)
  654. self.position += size
  655. return self.fileobj.read(size)
  656. def readsparse(self, size):
  657. """Read operation for sparse files.
  658. """
  659. data = []
  660. while size > 0:
  661. buf = self.readsparsesection(size)
  662. if not buf:
  663. break
  664. size -= len(buf)
  665. data.append(buf)
  666. return "".join(data)
  667. def readsparsesection(self, size):
  668. """Read a single section of a sparse file.
  669. """
  670. section = self.sparse.find(self.position)
  671. if section is None:
  672. return ""
  673. size = min(size, section.offset + section.size - self.position)
  674. if isinstance(section, _data):
  675. realpos = section.realpos + self.position - section.offset
  676. self.fileobj.seek(self.offset + realpos)
  677. self.position += size
  678. return self.fileobj.read(size)
  679. else:
  680. self.position += size
  681. return NUL * size
  682. #class _FileInFile
  683. class ExFileObject(object):
  684. """File-like object for reading an archive member.
  685. Is returned by TarFile.extractfile().
  686. """
  687. blocksize = 1024
  688. def __init__(self, tarfile, tarinfo):
  689. self.fileobj = _FileInFile(tarfile.fileobj,
  690. tarinfo.offset_data,
  691. tarinfo.size,
  692. getattr(tarinfo, "sparse", None))
  693. self.name = tarinfo.name
  694. self.mode = "r"
  695. self.closed = False
  696. self.size = tarinfo.size
  697. self.position = 0
  698. self.buffer = ""
  699. def read(self, size=None):
  700. """Read at most size bytes from the file. If size is not
  701. present or None, read all data until EOF is reached.
  702. """
  703. if self.closed:
  704. raise ValueError("I/O operation on closed file")
  705. buf = ""
  706. if self.buffer:
  707. if size is None:
  708. buf = self.buffer
  709. self.buffer = ""
  710. else:
  711. buf = self.buffer[:size]
  712. self.buffer = self.buffer[size:]
  713. if size is None:
  714. buf += self.fileobj.read()
  715. else:
  716. buf += self.fileobj.read(size - len(buf))
  717. self.position += len(buf)
  718. return buf
  719. def readline(self, size=-1):
  720. """Read one entire line from the file. If size is present
  721. and non-negative, return a string with at most that
  722. size, which may be an incomplete line.
  723. """
  724. if self.closed:
  725. raise ValueError("I/O operation on closed file")
  726. if "\n" in self.buffer:
  727. pos = self.buffer.find("\n") + 1
  728. else:
  729. buffers = [self.buffer]
  730. while True:
  731. buf = self.fileobj.read(self.blocksize)
  732. buffers.append(buf)
  733. if not buf or "\n" in buf:
  734. self.buffer = "".join(buffers)
  735. pos = self.buffer.find("\n") + 1
  736. if pos == 0:
  737. # no newline found.
  738. pos = len(self.buffer)
  739. break
  740. if size != -1:
  741. pos = min(size, pos)
  742. buf = self.buffer[:pos]
  743. self.buffer = self.buffer[pos:]
  744. self.position += len(buf)
  745. return buf
  746. def readlines(self):
  747. """Return a list with all remaining lines.
  748. """
  749. result = []
  750. while True:
  751. line = self.readline()
  752. if not line: break
  753. result.append(line)
  754. return result
  755. def tell(self):
  756. """Return the current file position.
  757. """
  758. if self.closed:
  759. raise ValueError("I/O operation on closed file")
  760. return self.position
  761. def seek(self, pos, whence=os.SEEK_SET):
  762. """Seek to a position in the file.
  763. """
  764. if self.closed:
  765. raise ValueError("I/O operation on closed file")
  766. if whence == os.SEEK_SET:
  767. self.position = min(max(pos, 0), self.size)
  768. elif whence == os.SEEK_CUR:
  769. if pos < 0:
  770. self.position = max(self.position + pos, 0)
  771. else:
  772. self.position = min(self.position + pos, self.size)
  773. elif whence == os.SEEK_END:
  774. self.position = max(min(self.size + pos, self.size), 0)
  775. else:
  776. raise ValueError("Invalid argument")
  777. self.buffer = ""
  778. self.fileobj.seek(self.position)
  779. def close(self):
  780. """Close the file object.
  781. """
  782. self.closed = True
  783. def __iter__(self):
  784. """Get an iterator over the file's lines.
  785. """
  786. while True:
  787. line = self.readline()
  788. if not line:
  789. break
  790. yield line
  791. #class ExFileObject
  792. #------------------
  793. # Exported Classes
  794. #------------------
  795. class TarInfo(object):
  796. """Informational class which holds the details about an
  797. archive member given by a tar header block.
  798. TarInfo objects are returned by TarFile.getmember(),
  799. TarFile.getmembers() and TarFile.gettarinfo() and are
  800. usually created internally.
  801. """
  802. def __init__(self, name=""):
  803. """Construct a TarInfo object. name is the optional name
  804. of the member.
  805. """
  806. self.name = name # member name
  807. self.mode = 0644 # file permissions
  808. self.uid = 0 # user id
  809. self.gid = 0 # group id
  810. self.size = 0 # file size
  811. self.mtime = 0 # modification time
  812. self.chksum = 0 # header checksum
  813. self.type = REGTYPE # member type
  814. self.linkname = "" # link name
  815. self.uname = "" # user name
  816. self.gname = "" # group name
  817. self.devmajor = 0 # device major number
  818. self.devminor = 0 # device minor number
  819. self.offset = 0 # the tar header starts here
  820. self.offset_data = 0 # the file's data starts here
  821. self.pax_headers = {} # pax header information
  822. # In pax headers the "name" and "linkname" field are called
  823. # "path" and "linkpath".
  824. def _getpath(self):
  825. return self.name
  826. def _setpath(self, name):
  827. self.name = name
  828. path = property(_getpath, _setpath)
  829. def _getlinkpath(self):
  830. return self.linkname
  831. def _setlinkpath(self, linkname):
  832. self.linkname = linkname
  833. linkpath = property(_getlinkpath, _setlinkpath)
  834. def __repr__(self):
  835. return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
  836. def get_info(self, encoding, errors):
  837. """Return the TarInfo's attributes as a dictionary.
  838. """
  839. info = {
  840. "name": self.name,
  841. "mode": self.mode & 07777,
  842. "uid": self.uid,
  843. "gid": self.gid,
  844. "size": self.size,
  845. "mtime": self.mtime,
  846. "chksum": self.chksum,
  847. "type": self.type,
  848. "linkname": self.linkname,
  849. "uname": self.uname,
  850. "gname": self.gname,
  851. "devmajor": self.devmajor,
  852. "devminor": self.devminor
  853. }
  854. if info["type"] == DIRTYPE and not info["name"].endswith("/"):
  855. info["name"] += "/"
  856. for key in ("name", "linkname", "uname", "gname"):
  857. if type(info[key]) is unicode:
  858. info[key] = info[key].encode(encoding, errors)
  859. return info
  860. def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
  861. """Return a tar header as a string of 512 byte blocks.
  862. """
  863. info = self.get_info(encoding, errors)
  864. if format == USTAR_FORMAT:
  865. return self.create_ustar_header(info)
  866. elif format == GNU_FORMAT:
  867. return self.create_gnu_header(info)
  868. elif format == PAX_FORMAT:
  869. return self.create_pax_header(info, encoding, errors)
  870. else:
  871. raise ValueError("invalid format")
  872. def create_ustar_header(self, info):
  873. """Return the object as a ustar header block.
  874. """
  875. info["magic"] = POSIX_MAGIC
  876. if len(info["linkname"]) > LENGTH_LINK:
  877. raise ValueError("linkname is too long")
  878. if len(info["name"]) > LENGTH_NAME:
  879. info["prefix"], info["name"] = self._posix_split_name(info["name"])
  880. return self._create_header(info, USTAR_FORMAT)
  881. def create_gnu_header(self, info):
  882. """Return the object as a GNU header block sequence.
  883. """
  884. info["magic"] = GNU_MAGIC
  885. buf = ""
  886. if len(info["linkname"]) > LENGTH_LINK:
  887. buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK)
  888. if len(info["name"]) > LENGTH_NAME:
  889. buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME)
  890. return buf + self._create_header(info, GNU_FORMAT)
  891. def create_pax_header(self, info, encoding, errors):
  892. """Return the object as a ustar header block. If it cannot be
  893. represented this way, prepend a pax extended header sequence
  894. with supplement information.
  895. """
  896. info["magic"] = POSIX_MAGIC
  897. pax_headers = self.pax_headers.copy()
  898. # Test string fields for values that exceed the field length or cannot
  899. # be represented in ASCII encoding.
  900. for name, hname, length in (
  901. ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
  902. ("uname", "uname", 32), ("gname", "gname", 32)):
  903. if hname in pax_headers:
  904. # The pax header has priority.
  905. continue
  906. val = info[name].decode(encoding, errors)
  907. # Try to encode the string as ASCII.
  908. try:
  909. val.encode("ascii")
  910. except UnicodeEncodeError:
  911. pax_headers[hname] = val
  912. continue
  913. if len(info[name]) > length:
  914. pax_headers[hname] = val
  915. # Test number fields for values that exceed the field limit or values
  916. # that like to be stored as float.
  917. for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
  918. if name in pax_headers:
  919. # The pax header has priority. Avoid overflow.
  920. info[name] = 0
  921. continue
  922. val = info[name]
  923. if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
  924. pax_headers[name] = unicode(val)
  925. info[name] = 0
  926. # Create a pax extended header if necessary.
  927. if pax_headers:
  928. buf = self._create_pax_generic_header(pax_headers)
  929. else:
  930. buf = ""
  931. return buf + self._create_header(info, USTAR_FORMAT)
  932. @classmethod
  933. def create_pax_global_header(cls, pax_headers):
  934. """Return the object as a pax global header block sequence.
  935. """
  936. return cls._create_pax_generic_header(pax_headers, type=XGLTYPE)
  937. def _posix_split_name(self, name):
  938. """Split a name longer than 100 chars into a prefix
  939. and a name part.
  940. """
  941. prefix = name[:LENGTH_PREFIX + 1]
  942. while prefix and prefix[-1] != "/":
  943. prefix = prefix[:-1]
  944. name = name[len(prefix):]
  945. prefix = prefix[:-1]
  946. if not prefix or len(name) > LENGTH_NAME:
  947. raise ValueError("name is too long")
  948. return prefix, name
  949. @staticmethod
  950. def _create_header(info, format):
  951. """Return a header block. info is a dictionary with file
  952. information, format must be one of the *_FORMAT constants.
  953. """
  954. parts = [
  955. stn(info.get("name", ""), 100),
  956. itn(info.get("mode", 0) & 07777, 8, format),
  957. itn(info.get("uid", 0), 8, format),
  958. itn(info.get("gid", 0), 8, format),
  959. itn(info.get("size", 0), 12, format),
  960. itn(info.get("mtime", 0), 12, format),
  961. " ", # checksum field
  962. info.get("type", REGTYPE),
  963. stn(info.get("linkname", ""), 100),
  964. stn(info.get("magic", POSIX_MAGIC), 8),
  965. stn(info.get("uname", ""), 32),
  966. stn(info.get("gname", ""), 32),
  967. itn(info.get("devmajor", 0), 8, format),
  968. itn(info.get("devminor", 0), 8, format),
  969. stn(info.get("prefix", ""), 155)
  970. ]
  971. buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts))
  972. chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
  973. buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
  974. return buf
  975. @staticmethod
  976. def _create_payload(payload):
  977. """Return the string payload filled with zero bytes
  978. up to the next 512 byte border.
  979. """
  980. blocks, remainder = divmod(len(payload), BLOCKSIZE)
  981. if remainder > 0:
  982. payload += (BLOCKSIZE - remainder) * NUL
  983. return payload
  984. @classmethod
  985. def _create_gnu_long_header(cls, name, type):
  986. """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
  987. for name.
  988. """
  989. name += NUL
  990. info = {}
  991. info["name"] = "././@LongLink"
  992. info["type"] = type
  993. info["size"] = len(name)
  994. info["magic"] = GNU_MAGIC
  995. # create extended header + name blocks.
  996. return cls._create_header(info, USTAR_FORMAT) + \
  997. cls._create_payload(name)
  998. @classmethod
  999. def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE):
  1000. """Return a POSIX.1-2001 extended or global header sequence
  1001. that contains a list of keyword, value pairs. The values
  1002. must be unicode objects.
  1003. """
  1004. records = []
  1005. for keyword, value in pax_headers.iteritems():
  1006. keyword = keyword.encode("utf8")
  1007. value = value.encode("utf8")
  1008. l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
  1009. n = p = 0
  1010. while True:
  1011. n = l + len(str(p))
  1012. if n == p:
  1013. break
  1014. p = n
  1015. records.append("%d %s=%s\n" % (p, keyword, value))
  1016. records = "".join(records)
  1017. # We use a hardcoded "././@PaxHeader" name like star does
  1018. # instead of the one that POSIX recommends.
  1019. info = {}
  1020. info["name"] = "././@PaxHeader"
  1021. info["type"] = type
  1022. info["size"] = len(records)
  1023. info["magic"] = POSIX_MAGIC
  1024. # Create pax header + record blocks.
  1025. return cls._create_header(info, USTAR_FORMAT) + \
  1026. cls._create_payload(records)
  1027. @classmethod
  1028. def frombuf(cls, buf):
  1029. """Construct a TarInfo object from a 512 byte string buffer.
  1030. """
  1031. if len(buf) == 0:
  1032. raise EmptyHeaderError("empty header")
  1033. if len(buf) != BLOCKSIZE:
  1034. raise TruncatedHeaderError("truncated header")
  1035. if buf.count(NUL) == BLOCKSIZE:
  1036. raise EOFHeaderError("end of file header")
  1037. chksum = nti(buf[148:156])
  1038. if chksum not in calc_chksums(buf):
  1039. raise InvalidHeaderError("bad checksum")
  1040. obj = cls()
  1041. obj.buf = buf
  1042. obj.name = nts(buf[0:100])
  1043. obj.mode = nti(buf[100:108])
  1044. obj.uid = nti(buf[108:116])
  1045. obj.gid = nti(buf[116:124])
  1046. obj.size = nti(buf[124:136])
  1047. obj.mtime = nti(buf[136:148])
  1048. obj.chksum = chksum
  1049. obj.type = buf[156:157]
  1050. obj.linkname = nts(buf[157:257])
  1051. obj.uname = nts(buf[265:297])
  1052. obj.gname = nts(buf[297:329])
  1053. obj.devmajor = nti(buf[329:337])
  1054. obj.devminor = nti(buf[337:345])
  1055. prefix = nts(buf[345:500])
  1056. # Old V7 tar format represents a directory as a regular
  1057. # file with a trailing slash.
  1058. if obj.type == AREGTYPE and obj.name.endswith("/"):
  1059. obj.type = DIRTYPE
  1060. # Remove redundant slashes from directories.
  1061. if obj.isdir():
  1062. obj.name = obj.name.rstrip("/")
  1063. # Reconstruct a ustar longname.
  1064. if prefix and obj.type not in GNU_TYPES:
  1065. obj.name = prefix + "/" + obj.name
  1066. return obj
  1067. @classmethod
  1068. def fromtarfile(cls, tarfile):
  1069. """Return the next TarInfo object from TarFile object
  1070. tarfile.
  1071. """
  1072. buf = tarfile.fileobj.read(BLOCKSIZE)
  1073. obj = cls.frombuf(buf)
  1074. obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
  1075. return obj._proc_member(tarfile)
  1076. #--------------------------------------------------------------------------
  1077. # The following are methods that are called depending on the type of a
  1078. # member. The entry point is _proc_member() which can be overridden in a
  1079. # subclass to add custom _proc_*() methods. A _proc_*() method MUST
  1080. # implement the following
  1081. # operations:
  1082. # 1. Set self.offset_data to the position where the data blocks begin,
  1083. # if there is data that follows.
  1084. # 2. Set tarfile.offset to the position where the next member's header will
  1085. # begin.
  1086. # 3. Return self or another valid TarInfo object.
  1087. def _proc_member(self, tarfile):
  1088. """Choose the right processing method depending on
  1089. the type and call it.
  1090. """
  1091. if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
  1092. return self._proc_gnulong(tarfile)
  1093. elif self.type == GNUTYPE_SPARSE:
  1094. return self._proc_sparse(tarfile)
  1095. elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
  1096. return self._proc_pax(tarfile)
  1097. else:
  1098. return self._proc_builtin(tarfile)
  1099. def _proc_builtin(self, tarfile):
  1100. """Process a builtin type or an unknown type which
  1101. will be treated as a regular file.
  1102. """
  1103. self.offset_data = tarfile.fileobj.tell()
  1104. offset = self.offset_data
  1105. if self.isreg() or self.type not in SUPPORTED_TYPES:
  1106. # Skip the following data blocks.
  1107. offset += self._block(self.size)
  1108. tarfile.offset = offset
  1109. # Patch the TarInfo object with saved global
  1110. # header information.
  1111. self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
  1112. return self
  1113. def _proc_gnulong(self, tarfile):
  1114. """Process the blocks that hold a GNU longname
  1115. or longlink member.
  1116. """
  1117. buf = tarfile.fileobj.read(self._block(self.size))
  1118. # Fetch the next header and process it.
  1119. try:
  1120. next = self.fromtarfile(tarfile)
  1121. except HeaderError:
  1122. raise SubsequentHeaderError("missing or bad subsequent header")
  1123. # Patch the TarInfo object from the next header with
  1124. # the longname information.
  1125. next.offset = self.offset
  1126. if self.type == GNUTYPE_LONGNAME:
  1127. next.name = nts(buf)
  1128. elif self.type == GNUTYPE_LONGLINK:
  1129. next.linkname = nts(buf)
  1130. return next
  1131. def _proc_sparse(self, tarfile):
  1132. """Process a GNU sparse header plus extra headers.
  1133. """
  1134. buf = self.buf
  1135. sp = _ringbuffer()
  1136. pos = 386
  1137. lastpos = 0L
  1138. realpos = 0L
  1139. # There are 4 possible sparse structs in the
  1140. # first header.
  1141. for i in xrange(4):
  1142. try:
  1143. offset = nti(buf[pos:pos + 12])
  1144. numbytes = nti(buf[pos + 12:pos + 24])
  1145. except ValueError:
  1146. break
  1147. if offset > lastpos:
  1148. sp.append(_hole(lastpos, offset - lastpos))
  1149. sp.append(_data(offset, numbytes, realpos))
  1150. realpos += numbytes
  1151. lastpos = offset + numbytes
  1152. pos += 24
  1153. isextended = ord(buf[482])
  1154. origsize = nti(buf[483:495])
  1155. # If the isextended flag is given,
  1156. # there are extra headers to process.
  1157. while isextended == 1:
  1158. buf = tarfile.fileobj.read(BLOCKSIZE)
  1159. pos = 0
  1160. for i in xrange(21):
  1161. try:
  1162. offset = nti(buf[pos:pos + 12])
  1163. numbytes = nti(buf[pos + 12:pos + 24])
  1164. except ValueError:
  1165. break
  1166. if offset > lastpos:
  1167. sp.append(_hole(lastpos, offset - lastpos))
  1168. sp.append(_data(offset, numbytes, realpos))
  1169. realpos += numbytes
  1170. lastpos = offset + numbytes
  1171. pos += 24
  1172. isextended = ord(buf[504])
  1173. if lastpos < origsize:
  1174. sp.append(_hole(lastpos, origsize - lastpos))
  1175. self.sparse = sp
  1176. self.offset_data = tarfile.fileobj.tell()
  1177. tarfile.offset = self.offset_data + self._block(self.size)
  1178. self.size = origsize
  1179. return self
  1180. def _proc_pax(self, tarfile):
  1181. """Process an extended or global header as described in
  1182. POSIX.1-2001.
  1183. """
  1184. # Read the header information.
  1185. buf = tarfile.fileobj.read(self._block(self.size))
  1186. # A pax header stores supplemental information for either
  1187. # the following file (extended) or all following files
  1188. # (global).
  1189. if self.type == XGLTYPE:
  1190. pax_headers = tarfile.pax_headers
  1191. else:
  1192. pax_headers = tarfile.pax_headers.copy()
  1193. # Parse pax header information. A record looks like that:
  1194. # "%d %s=%s\n" % (length, keyword, value). length is the size
  1195. # of the complete record including the length field itself and
  1196. # the newline. keyword and value are both UTF-8 encoded strings.
  1197. regex = re.compile(r"(\d+) ([^=]+)=", re.U)
  1198. pos = 0
  1199. while True:
  1200. match = regex.match(buf, pos)
  1201. if not match:
  1202. break
  1203. length, keyword = match.groups()
  1204. length = int(length)
  1205. value = buf[match.end(2) + 1:match.start(1) + length - 1]
  1206. keyword = keyword.decode("utf8")
  1207. value = value.decode("utf8")
  1208. pax_headers[keyword] = value
  1209. pos += length
  1210. # Fetch the next header.
  1211. try:
  1212. next = self.fromtarfile(tarfile)
  1213. except HeaderError:
  1214. raise SubsequentHeaderError("missing or bad subsequent header")
  1215. if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
  1216. # Patch the TarInfo object with the extended header info.
  1217. next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
  1218. next.offset = self.offset
  1219. if "size" in pax_headers:
  1220. # If the extended header replaces the size field,
  1221. # we need to recalculate the offset where the next
  1222. # header starts.
  1223. offset = next.offset_data
  1224. if next.isreg() or next.type not in SUPPORTED_TYPES:
  1225. offset += next._block(next.size)
  1226. tarfile.offset = offset
  1227. return next
  1228. def _apply_pax_info(self, pax_headers, encoding, errors):
  1229. """Replace fields with supplemental information from a previous
  1230. pax extended or global header.
  1231. """
  1232. for keyword, value in pax_headers.iteritems():
  1233. if keyword not in PAX_FIELDS:
  1234. continue
  1235. if keyword == "path":
  1236. value = value.rstrip("/")
  1237. if keyword in PAX_NUMBER_FIELDS:
  1238. try:
  1239. value = PAX_NUMBER_FIELDS[keyword](value)
  1240. except ValueError:
  1241. value = 0
  1242. else:
  1243. value = uts(value, encoding, errors)
  1244. setattr(self, keyword, value)
  1245. self.pax_headers = pax_headers.copy()
  1246. def _block(self, count):
  1247. """Round up a byte count by BLOCKSIZE and return it,
  1248. e.g. _block(834) => 1024.
  1249. """
  1250. blocks, remainder = divmod(count, BLOCKSIZE)
  1251. if remainder:
  1252. blocks += 1
  1253. return blocks * BLOCKSIZE
  1254. def isreg(self):
  1255. return self.type in REGULAR_TYPES
  1256. def isfile(self):
  1257. return self.isreg()
  1258. def isdir(self):
  1259. return self.type == DIRTYPE
  1260. def issym(self):
  1261. return self.type == SYMTYPE
  1262. def islnk(self):
  1263. return self.type == LNKTYPE
  1264. def ischr(self):
  1265. return self.type == CHRTYPE
  1266. def isblk(self):
  1267. return self.type == BLKTYPE
  1268. def isfifo(self):
  1269. return self.type == FIFOTYPE
  1270. def issparse(self):
  1271. return self.type == GNUTYPE_SPARSE
  1272. def isdev(self):
  1273. return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
  1274. # class TarInfo
  1275. class TarFile(object):
  1276. """The TarFile Class provides an interface to tar archives.
  1277. """
  1278. debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
  1279. dereference = False # If true, add content of linked file to the
  1280. # tar file, else the link.
  1281. ignore_zeros = False # If true, skips empty or invalid blocks and
  1282. # continues processing.
  1283. errorlevel = 1 # If 0, fatal errors only appear in debug
  1284. # messages (if debug >= 0). If > 0, errors
  1285. # are passed to the caller as exceptions.
  1286. format = DEFAULT_FORMAT # The format to use when creating an archive.
  1287. encoding = ENCODING # Encoding for 8-bit character strings.
  1288. errors = None # Error handler for unicode conversion.
  1289. tarinfo = TarInfo # The default TarInfo class to use.
  1290. fileobject = ExFileObject # The default ExFileObject class to use.
  1291. def __init__(self, name=None, mode="r", fileobj=None, format=None,
  1292. tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
  1293. errors=None, pax_headers=None, debug=None, errorlevel=None):
  1294. """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
  1295. read from an existing archive, 'a' to append data to an existing
  1296. file or 'w' to create a new file overwriting an existing one. `mode'
  1297. defaults to 'r'.
  1298. If `fileobj' is given, it is used for reading or writing data. If it
  1299. can be determined, `mode' is overridden by `fileobj's mode.
  1300. `fileobj' is not closed, when TarFile is closed.
  1301. """
  1302. if len(mode) > 1 or mode not in "raw":
  1303. raise ValueError("mode must be 'r', 'a' or 'w'")
  1304. self.mode = mode
  1305. self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
  1306. if not fileobj:
  1307. if self.mode == "a" and not os.path.exists(name):
  1308. # Create nonexistent files in append mode.
  1309. self.mode = "w"
  1310. self._mode = "wb"
  1311. fileobj = bltn_open(name, self._mode)
  1312. self._extfileobj = False
  1313. else:
  1314. if name is None and hasattr(fileobj, "name"):
  1315. name = fileobj.name
  1316. if hasattr(fileobj, "mode"):
  1317. self._mode = fileobj.mode
  1318. self._extfileobj = True
  1319. self.name = os.path.abspath(name) if name else None
  1320. self.fileobj = fileobj
  1321. # Init attributes.
  1322. if format is not None:
  1323. self.format = format
  1324. if tarinfo is not None:
  1325. self.tarinfo = tarinfo
  1326. if dereference is not None:
  1327. self.dereference = dereference
  1328. if ignore_zeros is not None:
  1329. self.ignore_zeros = ignore_zeros
  1330. if encoding is not None:
  1331. self.encoding = encoding
  1332. if errors is not None:
  1333. self.errors = errors
  1334. elif mode == "r":
  1335. self.errors = "utf-8"
  1336. else:
  1337. self.errors = "strict"
  1338. if pax_headers is not None and self.format == PAX_FORMAT:
  1339. self.pax_headers = pax_headers
  1340. else:
  1341. self.pax_headers = {}
  1342. if debug is not None:
  1343. self.debug = debug
  1344. if errorlevel is not None:
  1345. self.errorlevel = errorlevel
  1346. # Init datastructures.
  1347. self.closed = False
  1348. self.members = [] # list of members as TarInfo objects
  1349. self._loaded = False # flag if all members have been read
  1350. self.offset = self.fileobj.tell()
  1351. # current position in the archive file
  1352. self.inodes = {} # dictionary caching the inodes of
  1353. # archive members already added
  1354. try:
  1355. if self.mode == "r":
  1356. self.firstmember = None
  1357. self.firstmember = self.next()
  1358. if self.mode == "a":
  1359. # Move to the end of the archive,
  1360. # before the first empty block.
  1361. while True:
  1362. self.fileobj.seek(self.offset)
  1363. try:
  1364. tarinfo = self.tarinfo.fromtarfile(self)
  1365. self.members.append(tarinfo)
  1366. except EOFHeaderError:
  1367. self.fileobj.seek(self.offset)
  1368. break
  1369. except HeaderError, e:
  1370. raise ReadError(str(e))
  1371. if self.mode in "aw":
  1372. self._loaded = True
  1373. if self.pax_headers:
  1374. buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
  1375. self.fileobj.write(buf)
  1376. self.offset += len(buf)
  1377. except:
  1378. if not self._extfileobj:
  1379. self.fileobj.close()
  1380. self.closed = True
  1381. raise
  1382. def _getposix(self):
  1383. return self.format == USTAR_FORMAT
  1384. def _setposix(self, value):
  1385. import warnings
  1386. warnings.warn("use the format attribute instead", DeprecationWarning,
  1387. 2)
  1388. if value:
  1389. self.format = USTAR_FORMAT
  1390. else:
  1391. self.format = GNU_FORMAT
  1392. posix = property(_getposix, _setposix)
  1393. #--------------------------------------------------------------------------
  1394. # Below are the classmethods which act as alternate constructors to the
  1395. # TarFile class. The open() method is the only one that is needed for
  1396. # public use; it is the "super"-constructor and is able to select an
  1397. # adequate "sub"-constructor for a particular compression using the mapping
  1398. # from OPEN_METH.
  1399. #
  1400. # This concept allows one to subclass TarFile without losing the comfort of
  1401. # the super-constructor. A sub-constructor is registered and made available
  1402. # by adding it to the mapping in OPEN_METH.
  1403. @classmethod
  1404. def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
  1405. """Open a tar archive for reading, writing or appending. Return
  1406. an appropriate TarFile class.
  1407. mode:
  1408. 'r' or 'r:*' open for reading with transparent compression
  1409. 'r:' open for reading exclusively uncompressed
  1410. 'r:gz' open for reading with gzip compression
  1411. 'r:bz2' open for reading with bzip2 compression
  1412. 'a' or 'a:' open for appending, creating the file if necessary
  1413. 'w' or 'w:' open for writing without compression
  1414. 'w:gz' open for writing with gzip compression
  1415. 'w:bz2' open for writing with bzip2 compression
  1416. 'r|*' open a stream of tar blocks with transparent compression
  1417. 'r|' open an uncompressed stream of tar blocks for reading
  1418. 'r|gz' open a gzip compressed stream of tar blocks
  1419. 'r|bz2' open a bzip2 compressed stream of tar blocks
  1420. 'w|' open an uncompressed stream for writing
  1421. 'w|gz' open a gzip compressed stream for writing
  1422. 'w|bz2' open a bzip2 compressed stream for writing
  1423. """
  1424. if not name and not fileobj:
  1425. raise ValueError("nothing to open")
  1426. if mode in ("r", "r:*"):
  1427. # Find out which *open() is appropriate for opening the file.
  1428. for comptype in cls.OPEN_METH:
  1429. func = getattr(cls, cls.OPEN_METH[comptype])
  1430. if fileobj is not None:
  1431. saved_pos = fileobj.tell()
  1432. try:
  1433. return func(name, "r", fileobj, **kwargs)
  1434. except (ReadError, CompressionError), e:
  1435. if fileobj is not None:
  1436. fileobj.seek(saved_pos)
  1437. continue
  1438. raise ReadError("file could not be opened successfully")
  1439. elif ":" in mode:
  1440. filemode, comptype = mode.split(":", 1)
  1441. filemode = filemode or "r"
  1442. comptype = comptype or "tar"
  1443. # Select the *open() function according to
  1444. # given compression.
  1445. if comptype in cls.OPEN_METH:
  1446. func = getattr(cls, cls.OPEN_METH[comptype])
  1447. else:
  1448. raise CompressionError("unknown compression type %r" % comptype)
  1449. return func(name, filemode, fileobj, **kwargs)
  1450. elif "|" in mode:
  1451. filemode, comptype = mode.split("|", 1)
  1452. filemode = filemode or "r"
  1453. comptype = comptype or "tar"
  1454. if filemode not in "rw":
  1455. raise ValueError("mode must be 'r' or 'w'")
  1456. t = cls(name, filemode,
  1457. _Stream(name, filemode, comptype, fileobj, bufsize),
  1458. **kwargs)
  1459. t._extfileobj = False
  1460. return t
  1461. elif mode in "aw":
  1462. return cls.taropen(name, mode, fileobj, **kwargs)
  1463. raise ValueError("undiscernible mode")
  1464. @classmethod
  1465. def taropen(cls, name, mode="r", fileobj=None, **kwargs):
  1466. """Open uncompressed tar archive name for reading or writing.
  1467. """
  1468. if len(mode) > 1 or mode not in "raw":
  1469. raise ValueError("mode must be 'r', 'a' or 'w'")
  1470. return cls(name, mode, fileobj, **kwargs)
  1471. @classmethod
  1472. def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1473. """Open gzip compressed tar archive name for reading or writing.
  1474. Appending is not allowed.
  1475. """
  1476. if len(mode) > 1 or mode not in "rw":
  1477. raise ValueError("mode must be 'r' or 'w'")
  1478. try:
  1479. import gzip
  1480. gzip.GzipFile
  1481. except (ImportError, AttributeError):
  1482. raise CompressionError("gzip module is not available")
  1483. try:
  1484. t = cls.taropen(name, mode,
  1485. gzip.GzipFile(name, mode, compresslevel, fileobj),
  1486. **kwargs)
  1487. except IOError:
  1488. raise ReadError("not a gzip file")
  1489. t._extfileobj = False
  1490. return t
  1491. @classmethod
  1492. def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
  1493. """Open bzip2 compressed tar archive name for reading or writing.
  1494. Appending is not allowed.
  1495. """
  1496. if len(mode) > 1 or mode not in "rw":
  1497. raise ValueError("mode must be 'r' or 'w'.")
  1498. try:
  1499. import bz2
  1500. except ImportError:
  1501. raise CompressionError("bz2 module is not available")
  1502. if fileobj is not None:
  1503. fileobj = _BZ2Proxy(fileobj, mode)
  1504. else:
  1505. fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
  1506. try:
  1507. t = cls.taropen(name, mode, fileobj, **kwargs)
  1508. except (IOError, EOFError):
  1509. raise ReadError("not a bzip2 file")
  1510. t._extfileobj = False
  1511. return t
  1512. # All *open() methods are registered here.
  1513. OPEN_METH = {
  1514. "tar": "taropen", # uncompressed tar
  1515. "gz": "gzopen", # gzip compressed tar
  1516. "bz2": "bz2open" # bzip2 compressed tar
  1517. }
  1518. #--------------------------------------------------------------------------
  1519. # The public methods which TarFile provides:
  1520. def close(self):
  1521. """Close the TarFile. In write-mode, two finishing zero blocks are
  1522. appended to the archive.
  1523. """
  1524. if self.closed:
  1525. return
  1526. if self.mode in "aw":
  1527. self.fileobj.write(NUL * (BLOCKSIZE * 2))
  1528. self.offset += (BLOCKSIZE * 2)
  1529. # fill up the end with zero-blocks
  1530. # (like option -b20 for tar does)
  1531. blocks, remainder = divmod(self.offset, RECORDSIZE)
  1532. if remainder > 0:
  1533. self.fileobj.write(NUL * (RECORDSIZE - remainder))
  1534. if not self._extfileobj:
  1535. self.fileobj.close()
  1536. self.closed = True
  1537. def getmember(self, name):
  1538. """Return a TarInfo object for member `name'. If `name' can not be
  1539. found in the archive, KeyError is raised. If a member occurs more
  1540. than once in the archive, its last occurrence is assumed to be the
  1541. most up-to-date version.
  1542. """
  1543. tarinfo = self._getmember(name)
  1544. if tarinfo is None:
  1545. raise KeyError("filename %r not found" % name)
  1546. return tarinfo
  1547. def getmembers(self):
  1548. """Return the members of the archive as a list of TarInfo objects. The
  1549. list has the same order as the members in the archive.
  1550. """
  1551. self._check()
  1552. if not self._loaded: # if we want to obtain a list of
  1553. self._load() # all members, we first have to
  1554. # scan the whole archive.
  1555. return self.members
  1556. def getnames(self):
  1557. """Return the members of the archive as a list of their names. It has
  1558. the same order as the list returned by getmembers().
  1559. """
  1560. return [tarinfo.name for tarinfo in self.getmembers()]
  1561. def gettarinfo(self, name=None, arcname=None, fileobj=None):
  1562. """Create a TarInfo object for either the file `name' or the file
  1563. object `fileobj' (using os.fstat on its file descriptor). You can
  1564. modify some of the TarInfo's attributes before you add it using
  1565. addfile(). If given, `arcname' specifies an alternative name for the
  1566. file in the archive.
  1567. """
  1568. self._check("aw")
  1569. # When fileobj is given, replace name by
  1570. # fileobj's real name.
  1571. if fileobj is not None:
  1572. name = fileobj.name
  1573. # Building the name of the member in the archive.
  1574. # Backward slashes are converted to forward slashes,
  1575. # Absolute paths are turned to relative paths.
  1576. if arcname is None:
  1577. arcname = name
  1578. drv, arcname = os.path.splitdrive(arcname)
  1579. arcname = arcname.replace(os.sep, "/")
  1580. arcname = arcname.lstrip("/")
  1581. # Now, fill the TarInfo object with
  1582. # information specific for the file.
  1583. tarinfo = self.tarinfo()
  1584. tarinfo.tarfile = self
  1585. # Use os.stat or os.lstat, depending on platform
  1586. # and if symlinks shall be resolved.
  1587. if fileobj is None:
  1588. if hasattr(os, "lstat") and not self.dereference:
  1589. statres = os.lstat(name)
  1590. else:
  1591. statres = os.stat(name)
  1592. else:
  1593. statres = os.fstat(fileobj.fileno())
  1594. linkname = ""
  1595. stmd = statres.st_mode
  1596. if stat.S_ISREG(stmd):
  1597. inode = (statres.st_ino, statres.st_dev)
  1598. if not self.dereference and statres.st_nlink > 1 and \
  1599. inode in self.inodes and arcname != self.inodes[inode]:
  1600. # Is it a hardlink to an already
  1601. # archived file?
  1602. type = LNKTYPE
  1603. linkname = self.inodes[inode]
  1604. else:
  1605. # The inode is added only if its valid.
  1606. # For win32 it is always 0.
  1607. type = REGTYPE
  1608. if inode[0]:
  1609. self.inodes[inode] = arcname
  1610. elif stat.S_ISDIR(stmd):
  1611. type = DIRTYPE
  1612. elif stat.S_ISFIFO(stmd):
  1613. type = FIFOTYPE
  1614. elif stat.S_ISLNK(stmd):
  1615. type = SYMTYPE
  1616. linkname = os.readlink(name)
  1617. elif stat.S_ISCHR(stmd):
  1618. type = CHRTYPE
  1619. elif stat.S_ISBLK(stmd):
  1620. type = BLKTYPE
  1621. else:
  1622. return None
  1623. # Fill the TarInfo object with all
  1624. # information we can get.
  1625. tarinfo.name = arcname
  1626. tarinfo.mode = stmd
  1627. tarinfo.uid = statres.st_uid
  1628. tarinfo.gid = statres.st_gid
  1629. if type == REGTYPE:
  1630. tarinfo.size = statres.st_size
  1631. else:
  1632. tarinfo.size = 0L
  1633. tarinfo.mtime = statres.st_mtime
  1634. tarinfo.type = type
  1635. tarinfo.linkname = linkname
  1636. if pwd:
  1637. try:
  1638. tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
  1639. except KeyError:
  1640. pass
  1641. if grp:
  1642. try:
  1643. tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
  1644. except KeyError:
  1645. pass
  1646. if type in (CHRTYPE, BLKTYPE):
  1647. if hasattr(os, "major") and hasattr(os, "minor"):
  1648. tarinfo.devmajor = os.major(statres.st_rdev)
  1649. tarinfo.devminor = os.minor(statres.st_rdev)
  1650. return tarinfo
  1651. def list(self, verbose=True):
  1652. """Print a table of contents to sys.stdout. If `verbose' is False, only
  1653. the names of the members are printed. If it is True, an `ls -l'-like
  1654. output is produced.
  1655. """
  1656. self._check()
  1657. for tarinfo in self:
  1658. if verbose:
  1659. print filemode(tarinfo.mode),
  1660. print "%s/%s" % (tarinfo.uname or tarinfo.uid,
  1661. tarinfo.gname or tarinfo.gid),
  1662. if tarinfo.ischr() or tarinfo.isblk():
  1663. print "%10s" % ("%d,%d" \
  1664. % (tarinfo.devmajor, tarinfo.devminor)),
  1665. else:
  1666. print "%10d" % tarinfo.size,
  1667. print "%d-%02d-%02d %02d:%02d:%02d" \
  1668. % time.localtime(tarinfo.mtime)[:6],
  1669. print tarinfo.name + ("/" if tarinfo.isdir() else ""),
  1670. if verbose:
  1671. if tarinfo.issym():
  1672. print "->", tarinfo.linkname,
  1673. if tarinfo.islnk():
  1674. print "link to", tarinfo.linkname,
  1675. print
  1676. def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
  1677. """Add the file `name' to the archive. `name' may be any type of file
  1678. (directory, fifo, symbolic link, etc.). If given, `arcname'
  1679. specifies an alternative name for the file in the archive.
  1680. Directories are added recursively by default. This can be avoided by
  1681. setting `recursive' to False. `exclude' is a function that should
  1682. return True for each filename to be excluded. `filter' is a function
  1683. that expects a TarInfo object argument and returns the changed
  1684. TarInfo object, if it returns None the TarInfo object will be
  1685. excluded from the archive.
  1686. """
  1687. self._check("aw")
  1688. if arcname is None:
  1689. arcname = name
  1690. # Exclude pathnames.
  1691. if exclude is not None:
  1692. import warnings
  1693. warnings.warn("use the filter argument instead",
  1694. DeprecationWarning, 2)
  1695. if exclude(name):
  1696. self._dbg(2, "tarfile: Excluded %r" % name)
  1697. return
  1698. # Skip if somebody tries to archive the archive...
  1699. if self.name is not None and os.path.abspath(name) == self.name:
  1700. self._dbg(2, "tarfile: Skipped %r" % name)
  1701. return
  1702. self._dbg(1, name)
  1703. # Create a TarInfo object from the file.
  1704. tarinfo = self.gettarinfo(name, arcname)
  1705. if tarinfo is None:
  1706. self._dbg(1, "tarfile: Unsupported type %r" % name)
  1707. return
  1708. # Change or exclude the TarInfo object.
  1709. if filter is not None:
  1710. tarinfo = filter(tarinfo)
  1711. if tarinfo is None:
  1712. self._dbg(2, "tarfile: Excluded %r" % name)
  1713. return
  1714. # Append the tar header and data to the archive.
  1715. if tarinfo.isreg():
  1716. f = bltn_open(name, "rb")
  1717. self.addfile(tarinfo, f)
  1718. f.close()
  1719. elif tarinfo.isdir():
  1720. self.addfile(tarinfo)
  1721. if recursive:
  1722. for f in os.listdir(name):
  1723. self.add(os.path.join(name, f), os.path.join(arcname, f),
  1724. recursive, exclude, filter)
  1725. else:
  1726. self.addfile(tarinfo)
  1727. def addfile(self, tarinfo, fileobj=None):
  1728. """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
  1729. given, tarinfo.size bytes are read from it and added to the archive.
  1730. You can create TarInfo objects using gettarinfo().
  1731. On Windows platforms, `fileobj' should always be opened with mode
  1732. 'rb' to avoid irritation about the file size.
  1733. """
  1734. self._check("aw")
  1735. tarinfo = copy.copy(tarinfo)
  1736. buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
  1737. self.fileobj.write(buf)
  1738. self.offset += len(buf)
  1739. # If there's data to follow, append it.
  1740. if fileobj is not None:
  1741. copyfileobj(fileobj, self.fileobj, tarinfo.size)
  1742. blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
  1743. if remainder > 0:
  1744. self.fileobj.write(NUL * (BLOCKSIZE - remainder))
  1745. blocks += 1
  1746. self.offset += blocks * BLOCKSIZE
  1747. self.members.append(tarinfo)
  1748. def extractall(self, path=".", members=None):
  1749. """Extract all members from the archive to the current working
  1750. directory and set owner, modification time and permissions on
  1751. directories afterwards. `path' specifies a different directory
  1752. to extract to. `members' is optional and must be a subset of the
  1753. list returned by getmembers().
  1754. """
  1755. directories = []
  1756. if members is None:
  1757. members = self
  1758. for tarinfo in members:
  1759. if tarinfo.isdir():
  1760. # Extract directories with a safe mode.
  1761. directories.append(tarinfo)
  1762. tarinfo = copy.copy(tarinfo)
  1763. tarinfo.mode = 0700
  1764. self.extract(tarinfo, path)
  1765. # Reverse sort directories.
  1766. directories.sort(key=operator.attrgetter('name'))
  1767. directories.reverse()
  1768. # Set correct owner, mtime and filemode on directories.
  1769. for tarinfo in directories:
  1770. dirpath = os.path.join(path, tarinfo.name)
  1771. try:
  1772. self.chown(tarinfo, dirpath)
  1773. self.utime(tarinfo, dirpath)
  1774. self.chmod(tarinfo, dirpath)
  1775. except ExtractError, e:
  1776. if self.errorlevel > 1:
  1777. raise
  1778. else:
  1779. self._dbg(1, "tarfile: %s" % e)
  1780. def extract(self, member, path=""):
  1781. """Extract a member from the archive to the current working directory,
  1782. using its full name. Its file information is extracted as accurately
  1783. as possible. `member' may be a filename or a TarInfo object. You can
  1784. specify a different directory using `path'.
  1785. """
  1786. self._check("r")
  1787. if isinstance(member, basestring):
  1788. tarinfo = self.getmember(member)
  1789. else:
  1790. tarinfo = member
  1791. # Prepare the link target for makelink().
  1792. if tarinfo.islnk():
  1793. tarinfo._link_target = os.path.join(path, tarinfo.linkname)
  1794. try:
  1795. self._extract_member(tarinfo, os.path.join(path, tarinfo.name))
  1796. except EnvironmentError, e:
  1797. if self.errorlevel > 0:
  1798. raise
  1799. else:
  1800. if e.filename is None:
  1801. self._dbg(1, "tarfile: %s" % e.strerror)
  1802. else:
  1803. self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
  1804. except ExtractError, e:
  1805. if self.errorlevel > 1:
  1806. raise
  1807. else:
  1808. self._dbg(1, "tarfile: %s" % e)
  1809. def extractfile(self, member):
  1810. """Extract a member from the archive as a file object. `member' may be
  1811. a filename or a TarInfo object. If `member' is a regular file, a
  1812. file-like object is returned. If `member' is a link, a file-like
  1813. object is constructed from the link's target. If `member' is none of
  1814. the above, None is returned.
  1815. The file-like object is read-only and provides the following
  1816. methods: read(), readline(), readlines(), seek() and tell()
  1817. """
  1818. self._check("r")
  1819. if isinstance(member, basestring):
  1820. tarinfo = self.getmember(member)
  1821. else:
  1822. tarinfo = member
  1823. if tarinfo.isreg():
  1824. return self.fileobject(self, tarinfo)
  1825. elif tarinfo.type not in SUPPORTED_TYPES:
  1826. # If a member's type is unknown, it is treated as a
  1827. # regular file.
  1828. return self.fileobject(self, tarinfo)
  1829. elif tarinfo.islnk() or tarinfo.issym():
  1830. if isinstance(self.fileobj, _Stream):
  1831. # A small but ugly workaround for the case that someone tries
  1832. # to extract a (sym)link as a file-object from a non-seekable
  1833. # stream of tar blocks.
  1834. raise StreamError("cannot extract (sym)link as file object")
  1835. else:
  1836. # A (sym)link's file object is its target's file object.
  1837. return self.extractfile(self._find_link_target(tarinfo))
  1838. else:
  1839. # If there's no data associated with the member (directory, chrdev,
  1840. # blkdev, etc.), return None instead of a file object.
  1841. return None
  1842. def _extract_member(self, tarinfo, targetpath):
  1843. """Extract the TarInfo object tarinfo to a physical
  1844. file called targetpath.
  1845. """
  1846. # Fetch the TarInfo object for the given name
  1847. # and build the destination pathname, replacing
  1848. # forward slashes to platform specific separators.
  1849. targetpath = targetpath.rstrip("/")
  1850. targetpath = targetpath.replace("/", os.sep)
  1851. # Create all upper directories.
  1852. upperdirs = os.path.dirname(targetpath)
  1853. if upperdirs and not os.path.exists(upperdirs):
  1854. # Create directories that are not part of the archive with
  1855. # default permissions.
  1856. os.makedirs(upperdirs)
  1857. if tarinfo.islnk() or tarinfo.issym():
  1858. self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
  1859. else:
  1860. self._dbg(1, tarinfo.name)
  1861. if tarinfo.isreg():
  1862. self.makefile(tarinfo, targetpath)
  1863. elif tarinfo.isdir():
  1864. self.makedir(tarinfo, targetpath)
  1865. elif tarinfo.isfifo():
  1866. self.makefifo(tarinfo, targetpath)
  1867. elif tarinfo.ischr() or tarinfo.isblk():
  1868. self.makedev(tarinfo, targetpath)
  1869. elif tarinfo.islnk() or tarinfo.issym():
  1870. self.makelink(tarinfo, targetpath)
  1871. elif tarinfo.type not in SUPPORTED_TYPES:
  1872. self.makeunknown(tarinfo, targetpath)
  1873. else:
  1874. self.makefile(tarinfo, targetpath)
  1875. self.chown(tarinfo, targetpath)
  1876. if not tarinfo.issym():
  1877. self.chmod(tarinfo, targetpath)
  1878. self.utime(tarinfo, targetpath)
  1879. #--------------------------------------------------------------------------
  1880. # Below are the different file methods. They are called via
  1881. # _extract_member() when extract() is called. They can be replaced in a
  1882. # subclass to implement other functionality.
  1883. def makedir(self, tarinfo, targetpath):
  1884. """Make a directory called targetpath.
  1885. """
  1886. try:
  1887. # Use a safe mode for the directory, the real mode is set
  1888. # later in _extract_member().
  1889. os.mkdir(targetpath, 0700)
  1890. except EnvironmentError, e:
  1891. if e.errno != errno.EEXIST:
  1892. raise
  1893. def makefile(self, tarinfo, targetpath):
  1894. """Make a file called targetpath.
  1895. """
  1896. source = self.extractfile(tarinfo)
  1897. target = bltn_open(targetpath, "wb")
  1898. copyfileobj(source, target)
  1899. source.close()
  1900. target.close()
  1901. def makeunknown(self, tarinfo, targetpath):
  1902. """Make a file from a TarInfo object with an unknown type
  1903. at targetpath.
  1904. """
  1905. self.makefile(tarinfo, targetpath)
  1906. self._dbg(1, "tarfile: Unknown file type %r, " \
  1907. "extracted as regular file." % tarinfo.type)
  1908. def makefifo(self, tarinfo, targetpath):
  1909. """Make a fifo called targetpath.
  1910. """
  1911. if hasattr(os, "mkfifo"):
  1912. os.mkfifo(targetpath)
  1913. else:
  1914. raise ExtractError("fifo not supported by system")
  1915. def makedev(self, tarinfo, targetpath):
  1916. """Make a character or block device called targetpath.
  1917. """
  1918. if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
  1919. raise ExtractError("special devices not supported by system")
  1920. mode = tarinfo.mode
  1921. if tarinfo.isblk():
  1922. mode |= stat.S_IFBLK
  1923. else:
  1924. mode |= stat.S_IFCHR
  1925. os.mknod(targetpath, mode,
  1926. os.makedev(tarinfo.devmajor, tarinfo.devminor))
  1927. def makelink(self, tarinfo, targetpath):
  1928. """Make a (symbolic) link called targetpath. If it cannot be created
  1929. (platform limitation), we try to make a copy of the referenced file
  1930. instead of a link.
  1931. """
  1932. if hasattr(os, "symlink") and hasattr(os, "link"):
  1933. # For systems that support symbolic and hard links.
  1934. if tarinfo.issym():
  1935. if os.path.lexists(targetpath):
  1936. os.unlink(targetpath)
  1937. os.symlink(tarinfo.linkname, targetpath)
  1938. else:
  1939. # See extract().
  1940. if os.path.exists(tarinfo._link_target):
  1941. if os.path.lexists(targetpath):
  1942. os.unlink(targetpath)
  1943. os.link(tarinfo._link_target, targetpath)
  1944. else:
  1945. self._extract_member(self._find_link_target(tarinfo), targetpath)
  1946. else:
  1947. try:
  1948. self._extract_member(self._find_link_target(tarinfo), targetpath)
  1949. except KeyError:
  1950. raise ExtractError("unable to resolve link inside archive")
  1951. def chown(self, tarinfo, targetpath):
  1952. """Set owner of targetpath according to tarinfo.
  1953. """
  1954. if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
  1955. # We have to be root to do so.
  1956. try:
  1957. g = grp.getgrnam(tarinfo.gname)[2]
  1958. except KeyError:
  1959. g = tarinfo.gid
  1960. try:
  1961. u = pwd.getpwnam(tarinfo.uname)[2]
  1962. except KeyError:
  1963. u = tarinfo.uid
  1964. try:
  1965. if tarinfo.issym() and hasattr(os, "lchown"):
  1966. os.lchown(targetpath, u, g)
  1967. else:
  1968. if sys.platform != "os2emx":
  1969. os.chown(targetpath, u, g)
  1970. except EnvironmentError, e:
  1971. raise ExtractError("could not change owner")
  1972. def chmod(self, tarinfo, targetpath):
  1973. """Set file permissions of targetpath according to tarinfo.
  1974. """
  1975. if hasattr(os, 'chmod'):
  1976. try:
  1977. os.chmod(targetpath, tarinfo.mode)
  1978. except EnvironmentError, e:
  1979. raise ExtractError("could not change mode")
  1980. def utime(self, tarinfo, targetpath):
  1981. """Set modification time of targetpath according to tarinfo.
  1982. """
  1983. if not hasattr(os, 'utime'):
  1984. return
  1985. try:
  1986. os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
  1987. except EnvironmentError, e:
  1988. raise ExtractError("could not change modification time")
  1989. #--------------------------------------------------------------------------
  1990. def next(self):
  1991. """Return the next member of the archive as a TarInfo object, when
  1992. TarFile is opened for reading. Return None if there is no more
  1993. available.
  1994. """
  1995. self._check("ra")
  1996. if self.firstmember is not None:
  1997. m = self.firstmember
  1998. self.firstmember = None
  1999. return m
  2000. # Read the next block.
  2001. self.fileobj.seek(self.offset)
  2002. tarinfo = None
  2003. while True:
  2004. try:
  2005. tarinfo = self.tarinfo.fromtarfile(self)
  2006. except EOFHeaderError, e:
  2007. if self.ignore_zeros:
  2008. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2009. self.offset += BLOCKSIZE
  2010. continue
  2011. except InvalidHeaderError, e:
  2012. if self.ignore_zeros:
  2013. self._dbg(2, "0x%X: %s" % (self.offset, e))
  2014. self.offset += BLOCKSIZE
  2015. continue
  2016. elif self.offset == 0:
  2017. raise ReadError(str(e))
  2018. except EmptyHeaderError:
  2019. if self.offset == 0:
  2020. raise ReadError("empty file")
  2021. except TruncatedHeaderError, e:
  2022. if self.offset == 0:
  2023. raise ReadError(str(e))
  2024. except SubsequentHeaderError, e:
  2025. raise ReadError(str(e))
  2026. break
  2027. if tarinfo is not None:
  2028. self.members.append(tarinfo)
  2029. else:
  2030. self._loaded = True
  2031. return tarinfo
  2032. #--------------------------------------------------------------------------
  2033. # Little helper methods:
  2034. def _getmember(self, name, tarinfo=None, normalize=False):
  2035. """Find an archive member by name from bottom to top.
  2036. If tarinfo is given, it is used as the starting point.
  2037. """
  2038. # Ensure that all members have been loaded.
  2039. members = self.getmembers()
  2040. # Limit the member search list up to tarinfo.
  2041. if tarinfo is not None:
  2042. members = members[:members.index(tarinfo)]
  2043. if normalize:
  2044. name = os.path.normpath(name)
  2045. for member in reversed(members):
  2046. if normalize:
  2047. member_name = os.path.normpath(member.name)
  2048. else:
  2049. member_name = member.name
  2050. if name == member_name:
  2051. return member
  2052. def _load(self):
  2053. """Read through the entire archive file and look for readable
  2054. members.
  2055. """
  2056. while True:
  2057. tarinfo = self.next()
  2058. if tarinfo is None:
  2059. break
  2060. self._loaded = True
  2061. def _check(self, mode=None):
  2062. """Check if TarFile is still open, and if the operation's mode
  2063. corresponds to TarFile's mode.
  2064. """
  2065. if self.closed:
  2066. raise IOError("%s is closed" % self.__class__.__name__)
  2067. if mode is not None and self.mode not in mode:
  2068. raise IOError("bad operation for mode %r" % self.mode)
  2069. def _find_link_target(self, tarinfo):
  2070. """Find the target member of a symlink or hardlink member in the
  2071. archive.
  2072. """
  2073. if tarinfo.issym():
  2074. # Always search the entire archive.
  2075. linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
  2076. limit = None
  2077. else:
  2078. # Search the archive before the link, because a hard link is
  2079. # just a reference to an already archived file.
  2080. linkname = tarinfo.linkname
  2081. limit = tarinfo
  2082. member = self._getmember(linkname, tarinfo=limit, normalize=True)
  2083. if member is None:
  2084. raise KeyError("linkname %r not found" % linkname)
  2085. return member
  2086. def __iter__(self):
  2087. """Provide an iterator object.
  2088. """
  2089. if self._loaded:
  2090. return iter(self.members)
  2091. else:
  2092. return TarIter(self)
  2093. def _dbg(self, level, msg):
  2094. """Write debugging output to sys.stderr.
  2095. """
  2096. if level <= self.debug:
  2097. print >> sys.stderr, msg
  2098. def __enter__(self):
  2099. self._check()
  2100. return self
  2101. def __exit__(self, type, value, traceback):
  2102. if type is None:
  2103. self.close()
  2104. else:
  2105. # An exception occurred. We must not call close() because
  2106. # it would try to write end-of-archive blocks and padding.
  2107. if not self._extfileobj:
  2108. self.fileobj.close()
  2109. self.closed = True
  2110. # class TarFile
  2111. class TarIter:
  2112. """Iterator Class.
  2113. for tarinfo in TarFile(...):
  2114. suite...
  2115. """
  2116. def __init__(self, tarfile):
  2117. """Construct a TarIter object.
  2118. """
  2119. self.tarfile = tarfile
  2120. self.index = 0
  2121. def __iter__(self):
  2122. """Return iterator object.
  2123. """
  2124. return self
  2125. def next(self):
  2126. """Return the next item using TarFile's next() method.
  2127. When all members have been read, set TarFile as _loaded.
  2128. """
  2129. # Fix for SF #1100429: Under rare circumstances it can
  2130. # happen that getmembers() is called during iteration,
  2131. # which will cause TarIter to stop prematurely.
  2132. if not self.tarfile._loaded:
  2133. tarinfo = self.tarfile.next()
  2134. if not tarinfo:
  2135. self.tarfile._loaded = True
  2136. raise StopIteration
  2137. else:
  2138. try:
  2139. tarinfo = self.tarfile.members[self.index]
  2140. except IndexError:
  2141. raise StopIteration
  2142. self.index += 1
  2143. return tarinfo
  2144. # Helper classes for sparse file support
  2145. class _section:
  2146. """Base class for _data and _hole.
  2147. """
  2148. def __init__(self, offset, size):
  2149. self.offset = offset
  2150. self.size = size
  2151. def __contains__(self, offset):
  2152. return self.offset <= offset < self.offset + self.size
  2153. class _data(_section):
  2154. """Represent a data section in a sparse file.
  2155. """
  2156. def __init__(self, offset, size, realpos):
  2157. _section.__init__(self, offset, size)
  2158. self.realpos = realpos
  2159. class _hole(_section):
  2160. """Represent a hole section in a sparse file.
  2161. """
  2162. pass
  2163. class _ringbuffer(list):
  2164. """Ringbuffer class which increases performance
  2165. over a regular list.
  2166. """
  2167. def __init__(self):
  2168. self.idx = 0
  2169. def find(self, offset):
  2170. idx = self.idx
  2171. while True:
  2172. item = self[idx]
  2173. if offset in item:
  2174. break
  2175. idx += 1
  2176. if idx == len(self):
  2177. idx = 0
  2178. if idx == self.idx:
  2179. # End of File
  2180. return None
  2181. self.idx = idx
  2182. return item
  2183. #---------------------------------------------
  2184. # zipfile compatible TarFile class
  2185. #---------------------------------------------
  2186. TAR_PLAIN = 0 # zipfile.ZIP_STORED
  2187. TAR_GZIPPED = 8 # zipfile.ZIP_DEFLATED
  2188. class TarFileCompat:
  2189. """TarFile class compatible with standard module zipfile's
  2190. ZipFile class.
  2191. """
  2192. def __init__(self, file, mode="r", compression=TAR_PLAIN):
  2193. from warnings import warnpy3k
  2194. warnpy3k("the TarFileCompat class has been removed in Python 3.0",
  2195. stacklevel=2)
  2196. if compression == TAR_PLAIN:
  2197. self.tarfile = TarFile.taropen(file, mode)
  2198. elif compression == TAR_GZIPPED:
  2199. self.tarfile = TarFile.gzopen(file, mode)
  2200. else:
  2201. raise ValueError("unknown compression constant")
  2202. if mode[0:1] == "r":
  2203. members = self.tarfile.getmembers()
  2204. for m in members:
  2205. m.filename = m.name
  2206. m.file_size = m.size
  2207. m.date_time = time.gmtime(m.mtime)[:6]
  2208. def namelist(self):
  2209. return map(lambda m: m.name, self.infolist())
  2210. def infolist(self):
  2211. return filter(lambda m: m.type in REGULAR_TYPES,
  2212. self.tarfile.getmembers())
  2213. def printdir(self):
  2214. self.tarfile.list()
  2215. def testzip(self):
  2216. return
  2217. def getinfo(self, name):
  2218. return self.tarfile.getmember(name)
  2219. def read(self, name):
  2220. return self.tarfile.extractfile(self.tarfile.getmember(name)).read()
  2221. def write(self, filename, arcname=None, compress_type=None):
  2222. self.tarfile.add(filename, arcname)
  2223. def writestr(self, zinfo, bytes):
  2224. try:
  2225. from cStringIO import StringIO
  2226. except ImportError:
  2227. from StringIO import StringIO
  2228. import calendar
  2229. tinfo = TarInfo(zinfo.filename)
  2230. tinfo.size = len(bytes)
  2231. tinfo.mtime = calendar.timegm(zinfo.date_time)
  2232. self.tarfile.addfile(tinfo, StringIO(bytes))
  2233. def close(self):
  2234. self.tarfile.close()
  2235. #class TarFileCompat
  2236. #--------------------
  2237. # exported functions
  2238. #--------------------
  2239. def is_tarfile(name):
  2240. """Return True if name points to a tar archive that we
  2241. are able to handle, else return False.
  2242. """
  2243. try:
  2244. t = open(name)
  2245. t.close()
  2246. return True
  2247. except TarError:
  2248. return False
  2249. bltn_open = open
  2250. open = TarFile.open