PageRenderTime 51ms CodeModel.GetById 23ms RepoModel.GetById 0ms app.codeStats 0ms

/lib-python/2.7/test/test_zlib.py

https://bitbucket.org/dac_io/pypy
Python | 572 lines | 567 code | 4 blank | 1 comment | 4 complexity | 9169d799afd6a410d8637ac0a2610db9 MD5 | raw file
  1. import unittest
  2. from test.test_support import TESTFN, run_unittest, import_module, unlink, requires
  3. import binascii
  4. import random
  5. from test.test_support import precisionbigmemtest, _1G, _4G
  6. import sys
  7. try:
  8. import mmap
  9. except ImportError:
  10. mmap = None
  11. zlib = import_module('zlib')
  12. class ChecksumTestCase(unittest.TestCase):
  13. # checksum test cases
  14. def test_crc32start(self):
  15. self.assertEqual(zlib.crc32(""), zlib.crc32("", 0))
  16. self.assertTrue(zlib.crc32("abc", 0xffffffff))
  17. def test_crc32empty(self):
  18. self.assertEqual(zlib.crc32("", 0), 0)
  19. self.assertEqual(zlib.crc32("", 1), 1)
  20. self.assertEqual(zlib.crc32("", 432), 432)
  21. def test_adler32start(self):
  22. self.assertEqual(zlib.adler32(""), zlib.adler32("", 1))
  23. self.assertTrue(zlib.adler32("abc", 0xffffffff))
  24. def test_adler32empty(self):
  25. self.assertEqual(zlib.adler32("", 0), 0)
  26. self.assertEqual(zlib.adler32("", 1), 1)
  27. self.assertEqual(zlib.adler32("", 432), 432)
  28. def assertEqual32(self, seen, expected):
  29. # 32-bit values masked -- checksums on 32- vs 64- bit machines
  30. # This is important if bit 31 (0x08000000L) is set.
  31. self.assertEqual(seen & 0x0FFFFFFFFL, expected & 0x0FFFFFFFFL)
  32. def test_penguins(self):
  33. self.assertEqual32(zlib.crc32("penguin", 0), 0x0e5c1a120L)
  34. self.assertEqual32(zlib.crc32("penguin", 1), 0x43b6aa94)
  35. self.assertEqual32(zlib.adler32("penguin", 0), 0x0bcf02f6)
  36. self.assertEqual32(zlib.adler32("penguin", 1), 0x0bd602f7)
  37. self.assertEqual(zlib.crc32("penguin"), zlib.crc32("penguin", 0))
  38. self.assertEqual(zlib.adler32("penguin"),zlib.adler32("penguin",1))
  39. def test_abcdefghijklmnop(self):
  40. """test issue1202 compliance: signed crc32, adler32 in 2.x"""
  41. foo = 'abcdefghijklmnop'
  42. # explicitly test signed behavior
  43. self.assertEqual(zlib.crc32(foo), -1808088941)
  44. self.assertEqual(zlib.crc32('spam'), 1138425661)
  45. self.assertEqual(zlib.adler32(foo+foo), -721416943)
  46. self.assertEqual(zlib.adler32('spam'), 72286642)
  47. def test_same_as_binascii_crc32(self):
  48. foo = 'abcdefghijklmnop'
  49. self.assertEqual(binascii.crc32(foo), zlib.crc32(foo))
  50. self.assertEqual(binascii.crc32('spam'), zlib.crc32('spam'))
  51. def test_negative_crc_iv_input(self):
  52. # The range of valid input values for the crc state should be
  53. # -2**31 through 2**32-1 to allow inputs artifically constrained
  54. # to a signed 32-bit integer.
  55. self.assertEqual(zlib.crc32('ham', -1), zlib.crc32('ham', 0xffffffffL))
  56. self.assertEqual(zlib.crc32('spam', -3141593),
  57. zlib.crc32('spam', 0xffd01027L))
  58. self.assertEqual(zlib.crc32('spam', -(2**31)),
  59. zlib.crc32('spam', (2**31)))
  60. class ExceptionTestCase(unittest.TestCase):
  61. # make sure we generate some expected errors
  62. def test_badlevel(self):
  63. # specifying compression level out of range causes an error
  64. # (but -1 is Z_DEFAULT_COMPRESSION and apparently the zlib
  65. # accepts 0 too)
  66. self.assertRaises(zlib.error, zlib.compress, 'ERROR', 10)
  67. def test_badcompressobj(self):
  68. # verify failure on building compress object with bad params
  69. self.assertRaises(ValueError, zlib.compressobj, 1, zlib.DEFLATED, 0)
  70. # specifying total bits too large causes an error
  71. self.assertRaises(ValueError,
  72. zlib.compressobj, 1, zlib.DEFLATED, zlib.MAX_WBITS + 1)
  73. def test_baddecompressobj(self):
  74. # verify failure on building decompress object with bad params
  75. self.assertRaises(ValueError, zlib.decompressobj, -1)
  76. def test_decompressobj_badflush(self):
  77. # verify failure on calling decompressobj.flush with bad params
  78. self.assertRaises(ValueError, zlib.decompressobj().flush, 0)
  79. self.assertRaises(ValueError, zlib.decompressobj().flush, -1)
  80. class BaseCompressTestCase(object):
  81. def check_big_compress_buffer(self, size, compress_func):
  82. _1M = 1024 * 1024
  83. fmt = "%%0%dx" % (2 * _1M)
  84. # Generate 10MB worth of random, and expand it by repeating it.
  85. # The assumption is that zlib's memory is not big enough to exploit
  86. # such spread out redundancy.
  87. data = ''.join([binascii.a2b_hex(fmt % random.getrandbits(8 * _1M))
  88. for i in range(10)])
  89. data = data * (size // len(data) + 1)
  90. try:
  91. compress_func(data)
  92. finally:
  93. # Release memory
  94. data = None
  95. def check_big_decompress_buffer(self, size, decompress_func):
  96. data = 'x' * size
  97. try:
  98. compressed = zlib.compress(data, 1)
  99. finally:
  100. # Release memory
  101. data = None
  102. data = decompress_func(compressed)
  103. # Sanity check
  104. try:
  105. self.assertEqual(len(data), size)
  106. self.assertEqual(len(data.strip('x')), 0)
  107. finally:
  108. data = None
  109. class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
  110. # Test compression in one go (whole message compression)
  111. def test_speech(self):
  112. x = zlib.compress(HAMLET_SCENE)
  113. self.assertEqual(zlib.decompress(x), HAMLET_SCENE)
  114. def test_speech128(self):
  115. # compress more data
  116. data = HAMLET_SCENE * 128
  117. x = zlib.compress(data)
  118. self.assertEqual(zlib.decompress(x), data)
  119. def test_incomplete_stream(self):
  120. # An useful error message is given
  121. x = zlib.compress(HAMLET_SCENE)
  122. self.assertRaisesRegexp(zlib.error,
  123. "Error -5 while decompressing data: incomplete or truncated stream",
  124. zlib.decompress, x[:-1])
  125. # Memory use of the following functions takes into account overallocation
  126. @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
  127. def test_big_compress_buffer(self, size):
  128. compress = lambda s: zlib.compress(s, 1)
  129. self.check_big_compress_buffer(size, compress)
  130. @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
  131. def test_big_decompress_buffer(self, size):
  132. self.check_big_decompress_buffer(size, zlib.decompress)
  133. class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
  134. # Test compression object
  135. def test_pair(self):
  136. # straightforward compress/decompress objects
  137. data = HAMLET_SCENE * 128
  138. co = zlib.compressobj()
  139. x1 = co.compress(data)
  140. x2 = co.flush()
  141. self.assertRaises(zlib.error, co.flush) # second flush should not work
  142. dco = zlib.decompressobj()
  143. y1 = dco.decompress(x1 + x2)
  144. y2 = dco.flush()
  145. self.assertEqual(data, y1 + y2)
  146. def test_compressoptions(self):
  147. # specify lots of options to compressobj()
  148. level = 2
  149. method = zlib.DEFLATED
  150. wbits = -12
  151. memlevel = 9
  152. strategy = zlib.Z_FILTERED
  153. co = zlib.compressobj(level, method, wbits, memlevel, strategy)
  154. x1 = co.compress(HAMLET_SCENE)
  155. x2 = co.flush()
  156. dco = zlib.decompressobj(wbits)
  157. y1 = dco.decompress(x1 + x2)
  158. y2 = dco.flush()
  159. self.assertEqual(HAMLET_SCENE, y1 + y2)
  160. def test_compressincremental(self):
  161. # compress object in steps, decompress object as one-shot
  162. data = HAMLET_SCENE * 128
  163. co = zlib.compressobj()
  164. bufs = []
  165. for i in range(0, len(data), 256):
  166. bufs.append(co.compress(data[i:i+256]))
  167. bufs.append(co.flush())
  168. combuf = ''.join(bufs)
  169. dco = zlib.decompressobj()
  170. y1 = dco.decompress(''.join(bufs))
  171. y2 = dco.flush()
  172. self.assertEqual(data, y1 + y2)
  173. def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):
  174. # compress object in steps, decompress object in steps
  175. source = source or HAMLET_SCENE
  176. data = source * 128
  177. co = zlib.compressobj()
  178. bufs = []
  179. for i in range(0, len(data), cx):
  180. bufs.append(co.compress(data[i:i+cx]))
  181. bufs.append(co.flush())
  182. combuf = ''.join(bufs)
  183. self.assertEqual(data, zlib.decompress(combuf))
  184. dco = zlib.decompressobj()
  185. bufs = []
  186. for i in range(0, len(combuf), dcx):
  187. bufs.append(dco.decompress(combuf[i:i+dcx]))
  188. self.assertEqual('', dco.unconsumed_tail, ########
  189. "(A) uct should be '': not %d long" %
  190. len(dco.unconsumed_tail))
  191. if flush:
  192. bufs.append(dco.flush())
  193. else:
  194. while True:
  195. chunk = dco.decompress('')
  196. if chunk:
  197. bufs.append(chunk)
  198. else:
  199. break
  200. self.assertEqual('', dco.unconsumed_tail, ########
  201. "(B) uct should be '': not %d long" %
  202. len(dco.unconsumed_tail))
  203. self.assertEqual(data, ''.join(bufs))
  204. # Failure means: "decompressobj with init options failed"
  205. def test_decompincflush(self):
  206. self.test_decompinc(flush=True)
  207. def test_decompimax(self, source=None, cx=256, dcx=64):
  208. # compress in steps, decompress in length-restricted steps
  209. source = source or HAMLET_SCENE
  210. # Check a decompression object with max_length specified
  211. data = source * 128
  212. co = zlib.compressobj()
  213. bufs = []
  214. for i in range(0, len(data), cx):
  215. bufs.append(co.compress(data[i:i+cx]))
  216. bufs.append(co.flush())
  217. combuf = ''.join(bufs)
  218. self.assertEqual(data, zlib.decompress(combuf),
  219. 'compressed data failure')
  220. dco = zlib.decompressobj()
  221. bufs = []
  222. cb = combuf
  223. while cb:
  224. #max_length = 1 + len(cb)//10
  225. chunk = dco.decompress(cb, dcx)
  226. self.assertFalse(len(chunk) > dcx,
  227. 'chunk too big (%d>%d)' % (len(chunk), dcx))
  228. bufs.append(chunk)
  229. cb = dco.unconsumed_tail
  230. bufs.append(dco.flush())
  231. self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
  232. def test_decompressmaxlen(self, flush=False):
  233. # Check a decompression object with max_length specified
  234. data = HAMLET_SCENE * 128
  235. co = zlib.compressobj()
  236. bufs = []
  237. for i in range(0, len(data), 256):
  238. bufs.append(co.compress(data[i:i+256]))
  239. bufs.append(co.flush())
  240. combuf = ''.join(bufs)
  241. self.assertEqual(data, zlib.decompress(combuf),
  242. 'compressed data failure')
  243. dco = zlib.decompressobj()
  244. bufs = []
  245. cb = combuf
  246. while cb:
  247. max_length = 1 + len(cb)//10
  248. chunk = dco.decompress(cb, max_length)
  249. self.assertFalse(len(chunk) > max_length,
  250. 'chunk too big (%d>%d)' % (len(chunk),max_length))
  251. bufs.append(chunk)
  252. cb = dco.unconsumed_tail
  253. if flush:
  254. bufs.append(dco.flush())
  255. else:
  256. while chunk:
  257. chunk = dco.decompress('', max_length)
  258. self.assertFalse(len(chunk) > max_length,
  259. 'chunk too big (%d>%d)' % (len(chunk),max_length))
  260. bufs.append(chunk)
  261. self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
  262. def test_decompressmaxlenflush(self):
  263. self.test_decompressmaxlen(flush=True)
  264. def test_maxlenmisc(self):
  265. # Misc tests of max_length
  266. dco = zlib.decompressobj()
  267. self.assertRaises(ValueError, dco.decompress, "", -1)
  268. self.assertEqual('', dco.unconsumed_tail)
  269. def test_clear_unconsumed_tail(self):
  270. # Issue #12050: calling decompress() without providing max_length
  271. # should clear the unconsumed_tail attribute.
  272. cdata = "x\x9cKLJ\x06\x00\x02M\x01" # "abc"
  273. dco = zlib.decompressobj()
  274. ddata = dco.decompress(cdata, 1)
  275. ddata += dco.decompress(dco.unconsumed_tail)
  276. self.assertEqual(dco.unconsumed_tail, "")
  277. def test_flushes(self):
  278. # Test flush() with the various options, using all the
  279. # different levels in order to provide more variations.
  280. sync_opt = ['Z_NO_FLUSH', 'Z_SYNC_FLUSH', 'Z_FULL_FLUSH']
  281. sync_opt = [getattr(zlib, opt) for opt in sync_opt
  282. if hasattr(zlib, opt)]
  283. data = HAMLET_SCENE * 8
  284. for sync in sync_opt:
  285. for level in range(10):
  286. obj = zlib.compressobj( level )
  287. a = obj.compress( data[:3000] )
  288. b = obj.flush( sync )
  289. c = obj.compress( data[3000:] )
  290. d = obj.flush()
  291. self.assertEqual(zlib.decompress(''.join([a,b,c,d])),
  292. data, ("Decompress failed: flush "
  293. "mode=%i, level=%i") % (sync, level))
  294. del obj
  295. def test_odd_flush(self):
  296. # Test for odd flushing bugs noted in 2.0, and hopefully fixed in 2.1
  297. import random
  298. if hasattr(zlib, 'Z_SYNC_FLUSH'):
  299. # Testing on 17K of "random" data
  300. # Create compressor and decompressor objects
  301. co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
  302. dco = zlib.decompressobj()
  303. # Try 17K of data
  304. # generate random data stream
  305. try:
  306. # In 2.3 and later, WichmannHill is the RNG of the bug report
  307. gen = random.WichmannHill()
  308. except AttributeError:
  309. try:
  310. # 2.2 called it Random
  311. gen = random.Random()
  312. except AttributeError:
  313. # others might simply have a single RNG
  314. gen = random
  315. gen.seed(1)
  316. data = genblock(1, 17 * 1024, generator=gen)
  317. # compress, sync-flush, and decompress
  318. first = co.compress(data)
  319. second = co.flush(zlib.Z_SYNC_FLUSH)
  320. expanded = dco.decompress(first + second)
  321. # if decompressed data is different from the input data, choke.
  322. self.assertEqual(expanded, data, "17K random source doesn't match")
  323. def test_empty_flush(self):
  324. # Test that calling .flush() on unused objects works.
  325. # (Bug #1083110 -- calling .flush() on decompress objects
  326. # caused a core dump.)
  327. co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
  328. self.assertTrue(co.flush()) # Returns a zlib header
  329. dco = zlib.decompressobj()
  330. self.assertEqual(dco.flush(), "") # Returns nothing
  331. def test_decompress_incomplete_stream(self):
  332. # This is 'foo', deflated
  333. x = 'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'
  334. # For the record
  335. self.assertEqual(zlib.decompress(x), 'foo')
  336. self.assertRaises(zlib.error, zlib.decompress, x[:-5])
  337. # Omitting the stream end works with decompressor objects
  338. # (see issue #8672).
  339. dco = zlib.decompressobj()
  340. y = dco.decompress(x[:-5])
  341. y += dco.flush()
  342. self.assertEqual(y, 'foo')
  343. if hasattr(zlib.compressobj(), "copy"):
  344. def test_compresscopy(self):
  345. # Test copying a compression object
  346. data0 = HAMLET_SCENE
  347. data1 = HAMLET_SCENE.swapcase()
  348. c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
  349. bufs0 = []
  350. bufs0.append(c0.compress(data0))
  351. c1 = c0.copy()
  352. bufs1 = bufs0[:]
  353. bufs0.append(c0.compress(data0))
  354. bufs0.append(c0.flush())
  355. s0 = ''.join(bufs0)
  356. bufs1.append(c1.compress(data1))
  357. bufs1.append(c1.flush())
  358. s1 = ''.join(bufs1)
  359. self.assertEqual(zlib.decompress(s0),data0+data0)
  360. self.assertEqual(zlib.decompress(s1),data0+data1)
  361. def test_badcompresscopy(self):
  362. # Test copying a compression object in an inconsistent state
  363. c = zlib.compressobj()
  364. c.compress(HAMLET_SCENE)
  365. c.flush()
  366. self.assertRaises(ValueError, c.copy)
  367. if hasattr(zlib.decompressobj(), "copy"):
  368. def test_decompresscopy(self):
  369. # Test copying a decompression object
  370. data = HAMLET_SCENE
  371. comp = zlib.compress(data)
  372. d0 = zlib.decompressobj()
  373. bufs0 = []
  374. bufs0.append(d0.decompress(comp[:32]))
  375. d1 = d0.copy()
  376. bufs1 = bufs0[:]
  377. bufs0.append(d0.decompress(comp[32:]))
  378. s0 = ''.join(bufs0)
  379. bufs1.append(d1.decompress(comp[32:]))
  380. s1 = ''.join(bufs1)
  381. self.assertEqual(s0,s1)
  382. self.assertEqual(s0,data)
  383. def test_baddecompresscopy(self):
  384. # Test copying a compression object in an inconsistent state
  385. data = zlib.compress(HAMLET_SCENE)
  386. d = zlib.decompressobj()
  387. d.decompress(data)
  388. d.flush()
  389. self.assertRaises(ValueError, d.copy)
  390. # Memory use of the following functions takes into account overallocation
  391. @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
  392. def test_big_compress_buffer(self, size):
  393. c = zlib.compressobj(1)
  394. compress = lambda s: c.compress(s) + c.flush()
  395. self.check_big_compress_buffer(size, compress)
  396. @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
  397. def test_big_decompress_buffer(self, size):
  398. d = zlib.decompressobj()
  399. decompress = lambda s: d.decompress(s) + d.flush()
  400. self.check_big_decompress_buffer(size, decompress)
  401. def genblock(seed, length, step=1024, generator=random):
  402. """length-byte stream of random data from a seed (in step-byte blocks)."""
  403. if seed is not None:
  404. generator.seed(seed)
  405. randint = generator.randint
  406. if length < step or step < 2:
  407. step = length
  408. blocks = []
  409. for i in range(0, length, step):
  410. blocks.append(''.join([chr(randint(0,255))
  411. for x in range(step)]))
  412. return ''.join(blocks)[:length]
  413. def choose_lines(source, number, seed=None, generator=random):
  414. """Return a list of number lines randomly chosen from the source"""
  415. if seed is not None:
  416. generator.seed(seed)
  417. sources = source.split('\n')
  418. return [generator.choice(sources) for n in range(number)]
  419. HAMLET_SCENE = """
  420. LAERTES
  421. O, fear me not.
  422. I stay too long: but here my father comes.
  423. Enter POLONIUS
  424. A double blessing is a double grace,
  425. Occasion smiles upon a second leave.
  426. LORD POLONIUS
  427. Yet here, Laertes! aboard, aboard, for shame!
  428. The wind sits in the shoulder of your sail,
  429. And you are stay'd for. There; my blessing with thee!
  430. And these few precepts in thy memory
  431. See thou character. Give thy thoughts no tongue,
  432. Nor any unproportioned thought his act.
  433. Be thou familiar, but by no means vulgar.
  434. Those friends thou hast, and their adoption tried,
  435. Grapple them to thy soul with hoops of steel;
  436. But do not dull thy palm with entertainment
  437. Of each new-hatch'd, unfledged comrade. Beware
  438. Of entrance to a quarrel, but being in,
  439. Bear't that the opposed may beware of thee.
  440. Give every man thy ear, but few thy voice;
  441. Take each man's censure, but reserve thy judgment.
  442. Costly thy habit as thy purse can buy,
  443. But not express'd in fancy; rich, not gaudy;
  444. For the apparel oft proclaims the man,
  445. And they in France of the best rank and station
  446. Are of a most select and generous chief in that.
  447. Neither a borrower nor a lender be;
  448. For loan oft loses both itself and friend,
  449. And borrowing dulls the edge of husbandry.
  450. This above all: to thine ownself be true,
  451. And it must follow, as the night the day,
  452. Thou canst not then be false to any man.
  453. Farewell: my blessing season this in thee!
  454. LAERTES
  455. Most humbly do I take my leave, my lord.
  456. LORD POLONIUS
  457. The time invites you; go; your servants tend.
  458. LAERTES
  459. Farewell, Ophelia; and remember well
  460. What I have said to you.
  461. OPHELIA
  462. 'Tis in my memory lock'd,
  463. And you yourself shall keep the key of it.
  464. LAERTES
  465. Farewell.
  466. """
  467. def test_main():
  468. run_unittest(
  469. ChecksumTestCase,
  470. ExceptionTestCase,
  471. CompressTestCase,
  472. CompressObjectTestCase
  473. )
  474. if __name__ == "__main__":
  475. test_main()