PageRenderTime 73ms CodeModel.GetById 22ms RepoModel.GetById 1ms app.codeStats 0ms

/External.LCA_RESTRICTED/Languages/IronPython/27/Lib/pickle.py

http://github.com/IronLanguages/main
Python | 1397 lines | 1225 code | 66 blank | 106 comment | 49 complexity | 87151569eaebe6c44724c725064a21d4 MD5 | raw file
Possible License(s): CPL-1.0, BSD-3-Clause, ISC, GPL-2.0, MPL-2.0-no-copyleft-exception
  1. """Create portable serialized representations of Python objects.
  2. See module cPickle for a (much) faster implementation.
  3. See module copy_reg for a mechanism for registering custom picklers.
  4. See module pickletools source for extensive comments.
  5. Classes:
  6. Pickler
  7. Unpickler
  8. Functions:
  9. dump(object, file)
  10. dumps(object) -> string
  11. load(file) -> object
  12. loads(string) -> object
  13. Misc variables:
  14. __version__
  15. format_version
  16. compatible_formats
  17. """
  18. __version__ = "$Revision: 72223 $" # Code version
  19. from types import *
  20. from copy_reg import dispatch_table
  21. from copy_reg import _extension_registry, _inverted_registry, _extension_cache
  22. import marshal
  23. import sys
  24. import struct
  25. import re
  26. __all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
  27. "Unpickler", "dump", "dumps", "load", "loads"]
  28. # These are purely informational; no code uses these.
  29. format_version = "2.0" # File format version we write
  30. compatible_formats = ["1.0", # Original protocol 0
  31. "1.1", # Protocol 0 with INST added
  32. "1.2", # Original protocol 1
  33. "1.3", # Protocol 1 with BINFLOAT added
  34. "2.0", # Protocol 2
  35. ] # Old format versions we can read
  36. # Keep in synch with cPickle. This is the highest protocol number we
  37. # know how to read.
  38. HIGHEST_PROTOCOL = 2
  39. # Why use struct.pack() for pickling but marshal.loads() for
  40. # unpickling? struct.pack() is 40% faster than marshal.dumps(), but
  41. # marshal.loads() is twice as fast as struct.unpack()!
  42. mloads = marshal.loads
  43. class PickleError(Exception):
  44. """A common base class for the other pickling exceptions."""
  45. pass
  46. class PicklingError(PickleError):
  47. """This exception is raised when an unpicklable object is passed to the
  48. dump() method.
  49. """
  50. pass
  51. class UnpicklingError(PickleError):
  52. """This exception is raised when there is a problem unpickling an object,
  53. such as a security violation.
  54. Note that other exceptions may also be raised during unpickling, including
  55. (but not necessarily limited to) AttributeError, EOFError, ImportError,
  56. and IndexError.
  57. """
  58. pass
  59. # An instance of _Stop is raised by Unpickler.load_stop() in response to
  60. # the STOP opcode, passing the object that is the result of unpickling.
  61. class _Stop(Exception):
  62. def __init__(self, value):
  63. self.value = value
  64. # Jython has PyStringMap; it's a dict subclass with string keys
  65. try:
  66. from org.python.core import PyStringMap
  67. except ImportError:
  68. PyStringMap = None
  69. # UnicodeType may or may not be exported (normally imported from types)
  70. try:
  71. UnicodeType
  72. except NameError:
  73. UnicodeType = None
  74. # Pickle opcodes. See pickletools.py for extensive docs. The listing
  75. # here is in kind-of alphabetical order of 1-character pickle code.
  76. # pickletools groups them by purpose.
  77. MARK = '(' # push special markobject on stack
  78. STOP = '.' # every pickle ends with STOP
  79. POP = '0' # discard topmost stack item
  80. POP_MARK = '1' # discard stack top through topmost markobject
  81. DUP = '2' # duplicate top stack item
  82. FLOAT = 'F' # push float object; decimal string argument
  83. INT = 'I' # push integer or bool; decimal string argument
  84. BININT = 'J' # push four-byte signed int
  85. BININT1 = 'K' # push 1-byte unsigned int
  86. LONG = 'L' # push long; decimal string argument
  87. BININT2 = 'M' # push 2-byte unsigned int
  88. NONE = 'N' # push None
  89. PERSID = 'P' # push persistent object; id is taken from string arg
  90. BINPERSID = 'Q' # " " " ; " " " " stack
  91. REDUCE = 'R' # apply callable to argtuple, both on stack
  92. STRING = 'S' # push string; NL-terminated string argument
  93. BINSTRING = 'T' # push string; counted binary string argument
  94. SHORT_BINSTRING = 'U' # " " ; " " " " < 256 bytes
  95. UNICODE = 'V' # push Unicode string; raw-unicode-escaped'd argument
  96. BINUNICODE = 'X' # " " " ; counted UTF-8 string argument
  97. APPEND = 'a' # append stack top to list below it
  98. BUILD = 'b' # call __setstate__ or __dict__.update()
  99. GLOBAL = 'c' # push self.find_class(modname, name); 2 string args
  100. DICT = 'd' # build a dict from stack items
  101. EMPTY_DICT = '}' # push empty dict
  102. APPENDS = 'e' # extend list on stack by topmost stack slice
  103. GET = 'g' # push item from memo on stack; index is string arg
  104. BINGET = 'h' # " " " " " " ; " " 1-byte arg
  105. INST = 'i' # build & push class instance
  106. LONG_BINGET = 'j' # push item from memo on stack; index is 4-byte arg
  107. LIST = 'l' # build list from topmost stack items
  108. EMPTY_LIST = ']' # push empty list
  109. OBJ = 'o' # build & push class instance
  110. PUT = 'p' # store stack top in memo; index is string arg
  111. BINPUT = 'q' # " " " " " ; " " 1-byte arg
  112. LONG_BINPUT = 'r' # " " " " " ; " " 4-byte arg
  113. SETITEM = 's' # add key+value pair to dict
  114. TUPLE = 't' # build tuple from topmost stack items
  115. EMPTY_TUPLE = ')' # push empty tuple
  116. SETITEMS = 'u' # modify dict by adding topmost key+value pairs
  117. BINFLOAT = 'G' # push float; arg is 8-byte float encoding
  118. TRUE = 'I01\n' # not an opcode; see INT docs in pickletools.py
  119. FALSE = 'I00\n' # not an opcode; see INT docs in pickletools.py
  120. # Protocol 2
  121. PROTO = '\x80' # identify pickle protocol
  122. NEWOBJ = '\x81' # build object by applying cls.__new__ to argtuple
  123. EXT1 = '\x82' # push object from extension registry; 1-byte index
  124. EXT2 = '\x83' # ditto, but 2-byte index
  125. EXT4 = '\x84' # ditto, but 4-byte index
  126. TUPLE1 = '\x85' # build 1-tuple from stack top
  127. TUPLE2 = '\x86' # build 2-tuple from two topmost stack items
  128. TUPLE3 = '\x87' # build 3-tuple from three topmost stack items
  129. NEWTRUE = '\x88' # push True
  130. NEWFALSE = '\x89' # push False
  131. LONG1 = '\x8a' # push long from < 256 bytes
  132. LONG4 = '\x8b' # push really big long
  133. _tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
  134. __all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
  135. del x
  136. # Pickling machinery
  137. class Pickler:
  138. def __init__(self, file, protocol=None):
  139. """This takes a file-like object for writing a pickle data stream.
  140. The optional protocol argument tells the pickler to use the
  141. given protocol; supported protocols are 0, 1, 2. The default
  142. protocol is 0, to be backwards compatible. (Protocol 0 is the
  143. only protocol that can be written to a file opened in text
  144. mode and read back successfully. When using a protocol higher
  145. than 0, make sure the file is opened in binary mode, both when
  146. pickling and unpickling.)
  147. Protocol 1 is more efficient than protocol 0; protocol 2 is
  148. more efficient than protocol 1.
  149. Specifying a negative protocol version selects the highest
  150. protocol version supported. The higher the protocol used, the
  151. more recent the version of Python needed to read the pickle
  152. produced.
  153. The file parameter must have a write() method that accepts a single
  154. string argument. It can thus be an open file object, a StringIO
  155. object, or any other custom object that meets this interface.
  156. """
  157. if protocol is None:
  158. protocol = 0
  159. if protocol < 0:
  160. protocol = HIGHEST_PROTOCOL
  161. elif not 0 <= protocol <= HIGHEST_PROTOCOL:
  162. raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
  163. self.write = file.write
  164. self.memo = {}
  165. self.proto = int(protocol)
  166. self.bin = protocol >= 1
  167. self.fast = 0
  168. def clear_memo(self):
  169. """Clears the pickler's "memo".
  170. The memo is the data structure that remembers which objects the
  171. pickler has already seen, so that shared or recursive objects are
  172. pickled by reference and not by value. This method is useful when
  173. re-using picklers.
  174. """
  175. self.memo.clear()
  176. def dump(self, obj):
  177. """Write a pickled representation of obj to the open file."""
  178. if self.proto >= 2:
  179. self.write(PROTO + chr(self.proto))
  180. self.save(obj)
  181. self.write(STOP)
  182. def memoize(self, obj):
  183. """Store an object in the memo."""
  184. # The Pickler memo is a dictionary mapping object ids to 2-tuples
  185. # that contain the Unpickler memo key and the object being memoized.
  186. # The memo key is written to the pickle and will become
  187. # the key in the Unpickler's memo. The object is stored in the
  188. # Pickler memo so that transient objects are kept alive during
  189. # pickling.
  190. # The use of the Unpickler memo length as the memo key is just a
  191. # convention. The only requirement is that the memo values be unique.
  192. # But there appears no advantage to any other scheme, and this
  193. # scheme allows the Unpickler memo to be implemented as a plain (but
  194. # growable) array, indexed by memo key.
  195. if self.fast:
  196. return
  197. assert id(obj) not in self.memo
  198. memo_len = len(self.memo)
  199. self.write(self.put(memo_len))
  200. self.memo[id(obj)] = memo_len, obj
  201. # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
  202. def put(self, i, pack=struct.pack):
  203. if self.bin:
  204. if i < 256:
  205. return BINPUT + chr(i)
  206. else:
  207. return LONG_BINPUT + pack("<i", i)
  208. return PUT + repr(i) + '\n'
  209. # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
  210. def get(self, i, pack=struct.pack):
  211. if self.bin:
  212. if i < 256:
  213. return BINGET + chr(i)
  214. else:
  215. return LONG_BINGET + pack("<i", i)
  216. return GET + repr(i) + '\n'
  217. def save(self, obj):
  218. # Check for persistent id (defined by a subclass)
  219. pid = self.persistent_id(obj)
  220. if pid is not None:
  221. self.save_pers(pid)
  222. return
  223. # Check the memo
  224. x = self.memo.get(id(obj))
  225. if x:
  226. self.write(self.get(x[0]))
  227. return
  228. # Check the type dispatch table
  229. t = type(obj)
  230. f = self.dispatch.get(t)
  231. if f:
  232. f(self, obj) # Call unbound method with explicit self
  233. return
  234. # Check copy_reg.dispatch_table
  235. reduce = dispatch_table.get(t)
  236. if reduce:
  237. rv = reduce(obj)
  238. else:
  239. # Check for a class with a custom metaclass; treat as regular class
  240. try:
  241. issc = issubclass(t, TypeType)
  242. except TypeError: # t is not a class (old Boost; see SF #502085)
  243. issc = 0
  244. if issc:
  245. self.save_global(obj)
  246. return
  247. # Check for a __reduce_ex__ method, fall back to __reduce__
  248. reduce = getattr(obj, "__reduce_ex__", None)
  249. if reduce:
  250. rv = reduce(self.proto)
  251. else:
  252. reduce = getattr(obj, "__reduce__", None)
  253. if reduce:
  254. rv = reduce()
  255. else:
  256. raise PicklingError("Can't pickle %r object: %r" %
  257. (t.__name__, obj))
  258. # Check for string returned by reduce(), meaning "save as global"
  259. if type(rv) is StringType:
  260. self.save_global(obj, rv)
  261. return
  262. # Assert that reduce() returned a tuple
  263. if type(rv) is not TupleType:
  264. raise PicklingError("%s must return string or tuple" % reduce)
  265. # Assert that it returned an appropriately sized tuple
  266. l = len(rv)
  267. if not (2 <= l <= 5):
  268. raise PicklingError("Tuple returned by %s must have "
  269. "two to five elements" % reduce)
  270. # Save the reduce() output and finally memoize the object
  271. self.save_reduce(obj=obj, *rv)
  272. def persistent_id(self, obj):
  273. # This exists so a subclass can override it
  274. return None
  275. def save_pers(self, pid):
  276. # Save a persistent id reference
  277. if self.bin:
  278. self.save(pid)
  279. self.write(BINPERSID)
  280. else:
  281. self.write(PERSID + str(pid) + '\n')
  282. def save_reduce(self, func, args, state=None,
  283. listitems=None, dictitems=None, obj=None):
  284. # This API is called by some subclasses
  285. # Assert that args is a tuple or None
  286. if not isinstance(args, TupleType):
  287. raise PicklingError("args from reduce() should be a tuple")
  288. # Assert that func is callable
  289. if not hasattr(func, '__call__'):
  290. raise PicklingError("func from reduce should be callable")
  291. save = self.save
  292. write = self.write
  293. # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
  294. if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
  295. # A __reduce__ implementation can direct protocol 2 to
  296. # use the more efficient NEWOBJ opcode, while still
  297. # allowing protocol 0 and 1 to work normally. For this to
  298. # work, the function returned by __reduce__ should be
  299. # called __newobj__, and its first argument should be a
  300. # new-style class. The implementation for __newobj__
  301. # should be as follows, although pickle has no way to
  302. # verify this:
  303. #
  304. # def __newobj__(cls, *args):
  305. # return cls.__new__(cls, *args)
  306. #
  307. # Protocols 0 and 1 will pickle a reference to __newobj__,
  308. # while protocol 2 (and above) will pickle a reference to
  309. # cls, the remaining args tuple, and the NEWOBJ code,
  310. # which calls cls.__new__(cls, *args) at unpickling time
  311. # (see load_newobj below). If __reduce__ returns a
  312. # three-tuple, the state from the third tuple item will be
  313. # pickled regardless of the protocol, calling __setstate__
  314. # at unpickling time (see load_build below).
  315. #
  316. # Note that no standard __newobj__ implementation exists;
  317. # you have to provide your own. This is to enforce
  318. # compatibility with Python 2.2 (pickles written using
  319. # protocol 0 or 1 in Python 2.3 should be unpicklable by
  320. # Python 2.2).
  321. cls = args[0]
  322. if not hasattr(cls, "__new__"):
  323. raise PicklingError(
  324. "args[0] from __newobj__ args has no __new__")
  325. if obj is not None and cls is not obj.__class__:
  326. raise PicklingError(
  327. "args[0] from __newobj__ args has the wrong class")
  328. args = args[1:]
  329. save(cls)
  330. save(args)
  331. write(NEWOBJ)
  332. else:
  333. save(func)
  334. save(args)
  335. write(REDUCE)
  336. if obj is not None:
  337. # If the object is already in the memo, this means it is
  338. # recursive. In this case, throw away everything we put on the
  339. # stack, and fetch the object back from the memo.
  340. if id(obj) in self.memo:
  341. write(POP + self.get(self.memo[id(obj)][0]))
  342. else:
  343. self.memoize(obj)
  344. # More new special cases (that work with older protocols as
  345. # well): when __reduce__ returns a tuple with 4 or 5 items,
  346. # the 4th and 5th item should be iterators that provide list
  347. # items and dict items (as (key, value) tuples), or None.
  348. if listitems is not None:
  349. self._batch_appends(listitems)
  350. if dictitems is not None:
  351. self._batch_setitems(dictitems)
  352. if state is not None:
  353. save(state)
  354. write(BUILD)
  355. # Methods below this point are dispatched through the dispatch table
  356. dispatch = {}
  357. def save_none(self, obj):
  358. self.write(NONE)
  359. dispatch[NoneType] = save_none
  360. def save_bool(self, obj):
  361. if self.proto >= 2:
  362. self.write(obj and NEWTRUE or NEWFALSE)
  363. else:
  364. self.write(obj and TRUE or FALSE)
  365. dispatch[bool] = save_bool
  366. def save_int(self, obj, pack=struct.pack):
  367. if self.bin:
  368. # If the int is small enough to fit in a signed 4-byte 2's-comp
  369. # format, we can store it more efficiently than the general
  370. # case.
  371. # First one- and two-byte unsigned ints:
  372. if obj >= 0:
  373. if obj <= 0xff:
  374. self.write(BININT1 + chr(obj))
  375. return
  376. if obj <= 0xffff:
  377. self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
  378. return
  379. # Next check for 4-byte signed ints:
  380. high_bits = obj >> 31 # note that Python shift sign-extends
  381. if high_bits == 0 or high_bits == -1:
  382. # All high bits are copies of bit 2**31, so the value
  383. # fits in a 4-byte signed int.
  384. self.write(BININT + pack("<i", obj))
  385. return
  386. # Text pickle, or int too big to fit in signed 4-byte format.
  387. self.write(INT + repr(obj) + '\n')
  388. dispatch[IntType] = save_int
  389. def save_long(self, obj, pack=struct.pack):
  390. if self.proto >= 2:
  391. bytes = encode_long(obj)
  392. n = len(bytes)
  393. if n < 256:
  394. self.write(LONG1 + chr(n) + bytes)
  395. else:
  396. self.write(LONG4 + pack("<i", n) + bytes)
  397. return
  398. self.write(LONG + repr(obj) + '\n')
  399. dispatch[LongType] = save_long
  400. def save_float(self, obj, pack=struct.pack):
  401. if self.bin:
  402. self.write(BINFLOAT + pack('>d', obj))
  403. else:
  404. self.write(FLOAT + repr(obj) + '\n')
  405. dispatch[FloatType] = save_float
  406. def save_string(self, obj, pack=struct.pack):
  407. if self.bin:
  408. n = len(obj)
  409. if n < 256:
  410. self.write(SHORT_BINSTRING + chr(n) + obj)
  411. else:
  412. self.write(BINSTRING + pack("<i", n) + obj)
  413. else:
  414. self.write(STRING + repr(obj) + '\n')
  415. self.memoize(obj)
  416. dispatch[StringType] = save_string
  417. def save_unicode(self, obj, pack=struct.pack):
  418. if self.bin:
  419. encoding = obj.encode('utf-8')
  420. n = len(encoding)
  421. self.write(BINUNICODE + pack("<i", n) + encoding)
  422. else:
  423. obj = obj.replace("\\", "\\u005c")
  424. obj = obj.replace("\n", "\\u000a")
  425. self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
  426. self.memoize(obj)
  427. dispatch[UnicodeType] = save_unicode
  428. if StringType is UnicodeType:
  429. # This is true for Jython
  430. def save_string(self, obj, pack=struct.pack):
  431. unicode = obj.isunicode()
  432. if self.bin:
  433. if unicode:
  434. obj = obj.encode("utf-8")
  435. l = len(obj)
  436. if l < 256 and not unicode:
  437. self.write(SHORT_BINSTRING + chr(l) + obj)
  438. else:
  439. s = pack("<i", l)
  440. if unicode:
  441. self.write(BINUNICODE + s + obj)
  442. else:
  443. self.write(BINSTRING + s + obj)
  444. else:
  445. if unicode:
  446. obj = obj.replace("\\", "\\u005c")
  447. obj = obj.replace("\n", "\\u000a")
  448. obj = obj.encode('raw-unicode-escape')
  449. self.write(UNICODE + obj + '\n')
  450. else:
  451. self.write(STRING + repr(obj) + '\n')
  452. self.memoize(obj)
  453. dispatch[StringType] = save_string
  454. def save_tuple(self, obj):
  455. write = self.write
  456. proto = self.proto
  457. n = len(obj)
  458. if n == 0:
  459. if proto:
  460. write(EMPTY_TUPLE)
  461. else:
  462. write(MARK + TUPLE)
  463. return
  464. save = self.save
  465. memo = self.memo
  466. if n <= 3 and proto >= 2:
  467. for element in obj:
  468. save(element)
  469. # Subtle. Same as in the big comment below.
  470. if id(obj) in memo:
  471. get = self.get(memo[id(obj)][0])
  472. write(POP * n + get)
  473. else:
  474. write(_tuplesize2code[n])
  475. self.memoize(obj)
  476. return
  477. # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
  478. # has more than 3 elements.
  479. write(MARK)
  480. for element in obj:
  481. save(element)
  482. if id(obj) in memo:
  483. # Subtle. d was not in memo when we entered save_tuple(), so
  484. # the process of saving the tuple's elements must have saved
  485. # the tuple itself: the tuple is recursive. The proper action
  486. # now is to throw away everything we put on the stack, and
  487. # simply GET the tuple (it's already constructed). This check
  488. # could have been done in the "for element" loop instead, but
  489. # recursive tuples are a rare thing.
  490. get = self.get(memo[id(obj)][0])
  491. if proto:
  492. write(POP_MARK + get)
  493. else: # proto 0 -- POP_MARK not available
  494. write(POP * (n+1) + get)
  495. return
  496. # No recursion.
  497. self.write(TUPLE)
  498. self.memoize(obj)
  499. dispatch[TupleType] = save_tuple
  500. # save_empty_tuple() isn't used by anything in Python 2.3. However, I
  501. # found a Pickler subclass in Zope3 that calls it, so it's not harmless
  502. # to remove it.
  503. def save_empty_tuple(self, obj):
  504. self.write(EMPTY_TUPLE)
  505. def save_list(self, obj):
  506. write = self.write
  507. if self.bin:
  508. write(EMPTY_LIST)
  509. else: # proto 0 -- can't use EMPTY_LIST
  510. write(MARK + LIST)
  511. self.memoize(obj)
  512. self._batch_appends(iter(obj))
  513. dispatch[ListType] = save_list
  514. # Keep in synch with cPickle's BATCHSIZE. Nothing will break if it gets
  515. # out of synch, though.
  516. _BATCHSIZE = 1000
  517. def _batch_appends(self, items):
  518. # Helper to batch up APPENDS sequences
  519. save = self.save
  520. write = self.write
  521. if not self.bin:
  522. for x in items:
  523. save(x)
  524. write(APPEND)
  525. return
  526. r = xrange(self._BATCHSIZE)
  527. while items is not None:
  528. tmp = []
  529. for i in r:
  530. try:
  531. x = items.next()
  532. tmp.append(x)
  533. except StopIteration:
  534. items = None
  535. break
  536. n = len(tmp)
  537. if n > 1:
  538. write(MARK)
  539. for x in tmp:
  540. save(x)
  541. write(APPENDS)
  542. elif n:
  543. save(tmp[0])
  544. write(APPEND)
  545. # else tmp is empty, and we're done
  546. def save_dict(self, obj):
  547. write = self.write
  548. if self.bin:
  549. write(EMPTY_DICT)
  550. else: # proto 0 -- can't use EMPTY_DICT
  551. write(MARK + DICT)
  552. self.memoize(obj)
  553. self._batch_setitems(obj.iteritems())
  554. dispatch[DictionaryType] = save_dict
  555. if not PyStringMap is None:
  556. dispatch[PyStringMap] = save_dict
  557. def _batch_setitems(self, items):
  558. # Helper to batch up SETITEMS sequences; proto >= 1 only
  559. save = self.save
  560. write = self.write
  561. if not self.bin:
  562. for k, v in items:
  563. save(k)
  564. save(v)
  565. write(SETITEM)
  566. return
  567. r = xrange(self._BATCHSIZE)
  568. while items is not None:
  569. tmp = []
  570. for i in r:
  571. try:
  572. tmp.append(items.next())
  573. except StopIteration:
  574. items = None
  575. break
  576. n = len(tmp)
  577. if n > 1:
  578. write(MARK)
  579. for k, v in tmp:
  580. save(k)
  581. save(v)
  582. write(SETITEMS)
  583. elif n:
  584. k, v = tmp[0]
  585. save(k)
  586. save(v)
  587. write(SETITEM)
  588. # else tmp is empty, and we're done
  589. def save_inst(self, obj):
  590. cls = obj.__class__
  591. memo = self.memo
  592. write = self.write
  593. save = self.save
  594. if hasattr(obj, '__getinitargs__'):
  595. args = obj.__getinitargs__()
  596. len(args) # XXX Assert it's a sequence
  597. _keep_alive(args, memo)
  598. else:
  599. args = ()
  600. write(MARK)
  601. if self.bin:
  602. save(cls)
  603. for arg in args:
  604. save(arg)
  605. write(OBJ)
  606. else:
  607. for arg in args:
  608. save(arg)
  609. write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
  610. self.memoize(obj)
  611. try:
  612. getstate = obj.__getstate__
  613. except AttributeError:
  614. stuff = obj.__dict__
  615. else:
  616. stuff = getstate()
  617. _keep_alive(stuff, memo)
  618. save(stuff)
  619. write(BUILD)
  620. dispatch[InstanceType] = save_inst
  621. def save_global(self, obj, name=None, pack=struct.pack):
  622. write = self.write
  623. memo = self.memo
  624. if name is None:
  625. name = obj.__name__
  626. module = getattr(obj, "__module__", None)
  627. if module is None:
  628. module = whichmodule(obj, name)
  629. try:
  630. __import__(module)
  631. mod = sys.modules[module]
  632. klass = getattr(mod, name)
  633. except (ImportError, KeyError, AttributeError):
  634. raise PicklingError(
  635. "Can't pickle %r: it's not found as %s.%s" %
  636. (obj, module, name))
  637. else:
  638. if klass is not obj:
  639. raise PicklingError(
  640. "Can't pickle %r: it's not the same object as %s.%s" %
  641. (obj, module, name))
  642. if self.proto >= 2:
  643. code = _extension_registry.get((module, name))
  644. if code:
  645. assert code > 0
  646. if code <= 0xff:
  647. write(EXT1 + chr(code))
  648. elif code <= 0xffff:
  649. write("%c%c%c" % (EXT2, code&0xff, code>>8))
  650. else:
  651. write(EXT4 + pack("<i", code))
  652. return
  653. write(GLOBAL + module + '\n' + name + '\n')
  654. self.memoize(obj)
  655. dispatch[ClassType] = save_global
  656. dispatch[FunctionType] = save_global
  657. dispatch[BuiltinFunctionType] = save_global
  658. dispatch[TypeType] = save_global
  659. # Pickling helpers
  660. def _keep_alive(x, memo):
  661. """Keeps a reference to the object x in the memo.
  662. Because we remember objects by their id, we have
  663. to assure that possibly temporary objects are kept
  664. alive by referencing them.
  665. We store a reference at the id of the memo, which should
  666. normally not be used unless someone tries to deepcopy
  667. the memo itself...
  668. """
  669. try:
  670. memo[id(memo)].append(x)
  671. except KeyError:
  672. # aha, this is the first one :-)
  673. memo[id(memo)]=[x]
  674. # A cache for whichmodule(), mapping a function object to the name of
  675. # the module in which the function was found.
  676. classmap = {} # called classmap for backwards compatibility
  677. def whichmodule(func, funcname):
  678. """Figure out the module in which a function occurs.
  679. Search sys.modules for the module.
  680. Cache in classmap.
  681. Return a module name.
  682. If the function cannot be found, return "__main__".
  683. """
  684. # Python functions should always get an __module__ from their globals.
  685. mod = getattr(func, "__module__", None)
  686. if mod is not None:
  687. return mod
  688. if func in classmap:
  689. return classmap[func]
  690. for name, module in sys.modules.items():
  691. if module is None:
  692. continue # skip dummy package entries
  693. if name != '__main__' and getattr(module, funcname, None) is func:
  694. break
  695. else:
  696. name = '__main__'
  697. classmap[func] = name
  698. return name
  699. # Unpickling machinery
  700. class Unpickler:
  701. def __init__(self, file):
  702. """This takes a file-like object for reading a pickle data stream.
  703. The protocol version of the pickle is detected automatically, so no
  704. proto argument is needed.
  705. The file-like object must have two methods, a read() method that
  706. takes an integer argument, and a readline() method that requires no
  707. arguments. Both methods should return a string. Thus file-like
  708. object can be a file object opened for reading, a StringIO object,
  709. or any other custom object that meets this interface.
  710. """
  711. self.readline = file.readline
  712. self.read = file.read
  713. self.memo = {}
  714. def load(self):
  715. """Read a pickled object representation from the open file.
  716. Return the reconstituted object hierarchy specified in the file.
  717. """
  718. self.mark = object() # any new unique object
  719. self.stack = []
  720. self.append = self.stack.append
  721. read = self.read
  722. dispatch = self.dispatch
  723. try:
  724. while 1:
  725. key = read(1)
  726. dispatch[key](self)
  727. except _Stop, stopinst:
  728. return stopinst.value
  729. # Return largest index k such that self.stack[k] is self.mark.
  730. # If the stack doesn't contain a mark, eventually raises IndexError.
  731. # This could be sped by maintaining another stack, of indices at which
  732. # the mark appears. For that matter, the latter stack would suffice,
  733. # and we wouldn't need to push mark objects on self.stack at all.
  734. # Doing so is probably a good thing, though, since if the pickle is
  735. # corrupt (or hostile) we may get a clue from finding self.mark embedded
  736. # in unpickled objects.
  737. def marker(self):
  738. stack = self.stack
  739. mark = self.mark
  740. k = len(stack)-1
  741. while stack[k] is not mark: k = k-1
  742. return k
  743. dispatch = {}
  744. def load_eof(self):
  745. raise EOFError
  746. dispatch[''] = load_eof
  747. def load_proto(self):
  748. proto = ord(self.read(1))
  749. if not 0 <= proto <= 2:
  750. raise ValueError, "unsupported pickle protocol: %d" % proto
  751. dispatch[PROTO] = load_proto
  752. def load_persid(self):
  753. pid = self.readline()[:-1]
  754. self.append(self.persistent_load(pid))
  755. dispatch[PERSID] = load_persid
  756. def load_binpersid(self):
  757. pid = self.stack.pop()
  758. self.append(self.persistent_load(pid))
  759. dispatch[BINPERSID] = load_binpersid
  760. def load_none(self):
  761. self.append(None)
  762. dispatch[NONE] = load_none
  763. def load_false(self):
  764. self.append(False)
  765. dispatch[NEWFALSE] = load_false
  766. def load_true(self):
  767. self.append(True)
  768. dispatch[NEWTRUE] = load_true
  769. def load_int(self):
  770. data = self.readline()
  771. if data == FALSE[1:]:
  772. val = False
  773. elif data == TRUE[1:]:
  774. val = True
  775. else:
  776. try:
  777. val = int(data)
  778. except ValueError:
  779. val = long(data)
  780. self.append(val)
  781. dispatch[INT] = load_int
  782. def load_binint(self):
  783. self.append(mloads('i' + self.read(4)))
  784. dispatch[BININT] = load_binint
  785. def load_binint1(self):
  786. self.append(ord(self.read(1)))
  787. dispatch[BININT1] = load_binint1
  788. def load_binint2(self):
  789. self.append(mloads('i' + self.read(2) + '\000\000'))
  790. dispatch[BININT2] = load_binint2
  791. def load_long(self):
  792. self.append(long(self.readline()[:-1], 0))
  793. dispatch[LONG] = load_long
  794. def load_long1(self):
  795. n = ord(self.read(1))
  796. bytes = self.read(n)
  797. self.append(decode_long(bytes))
  798. dispatch[LONG1] = load_long1
  799. def load_long4(self):
  800. n = mloads('i' + self.read(4))
  801. bytes = self.read(n)
  802. self.append(decode_long(bytes))
  803. dispatch[LONG4] = load_long4
  804. def load_float(self):
  805. self.append(float(self.readline()[:-1]))
  806. dispatch[FLOAT] = load_float
  807. def load_binfloat(self, unpack=struct.unpack):
  808. self.append(unpack('>d', self.read(8))[0])
  809. dispatch[BINFLOAT] = load_binfloat
  810. def load_string(self):
  811. rep = self.readline()[:-1]
  812. for q in "\"'": # double or single quote
  813. if rep.startswith(q):
  814. if len(rep) < 2 or not rep.endswith(q):
  815. raise ValueError, "insecure string pickle"
  816. rep = rep[len(q):-len(q)]
  817. break
  818. else:
  819. raise ValueError, "insecure string pickle"
  820. self.append(rep.decode("string-escape"))
  821. dispatch[STRING] = load_string
  822. def load_binstring(self):
  823. len = mloads('i' + self.read(4))
  824. self.append(self.read(len))
  825. dispatch[BINSTRING] = load_binstring
  826. def load_unicode(self):
  827. self.append(unicode(self.readline()[:-1],'raw-unicode-escape'))
  828. dispatch[UNICODE] = load_unicode
  829. def load_binunicode(self):
  830. len = mloads('i' + self.read(4))
  831. self.append(unicode(self.read(len),'utf-8'))
  832. dispatch[BINUNICODE] = load_binunicode
  833. def load_short_binstring(self):
  834. len = ord(self.read(1))
  835. self.append(self.read(len))
  836. dispatch[SHORT_BINSTRING] = load_short_binstring
  837. def load_tuple(self):
  838. k = self.marker()
  839. self.stack[k:] = [tuple(self.stack[k+1:])]
  840. dispatch[TUPLE] = load_tuple
  841. def load_empty_tuple(self):
  842. self.stack.append(())
  843. dispatch[EMPTY_TUPLE] = load_empty_tuple
  844. def load_tuple1(self):
  845. self.stack[-1] = (self.stack[-1],)
  846. dispatch[TUPLE1] = load_tuple1
  847. def load_tuple2(self):
  848. self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
  849. dispatch[TUPLE2] = load_tuple2
  850. def load_tuple3(self):
  851. self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
  852. dispatch[TUPLE3] = load_tuple3
  853. def load_empty_list(self):
  854. self.stack.append([])
  855. dispatch[EMPTY_LIST] = load_empty_list
  856. def load_empty_dictionary(self):
  857. self.stack.append({})
  858. dispatch[EMPTY_DICT] = load_empty_dictionary
  859. def load_list(self):
  860. k = self.marker()
  861. self.stack[k:] = [self.stack[k+1:]]
  862. dispatch[LIST] = load_list
  863. def load_dict(self):
  864. k = self.marker()
  865. d = {}
  866. items = self.stack[k+1:]
  867. for i in range(0, len(items), 2):
  868. key = items[i]
  869. value = items[i+1]
  870. d[key] = value
  871. self.stack[k:] = [d]
  872. dispatch[DICT] = load_dict
  873. # INST and OBJ differ only in how they get a class object. It's not
  874. # only sensible to do the rest in a common routine, the two routines
  875. # previously diverged and grew different bugs.
  876. # klass is the class to instantiate, and k points to the topmost mark
  877. # object, following which are the arguments for klass.__init__.
  878. def _instantiate(self, klass, k):
  879. args = tuple(self.stack[k+1:])
  880. del self.stack[k:]
  881. instantiated = 0
  882. if (not args and
  883. type(klass) is ClassType and
  884. not hasattr(klass, "__getinitargs__")):
  885. try:
  886. value = _EmptyClass()
  887. value.__class__ = klass
  888. instantiated = 1
  889. except RuntimeError:
  890. # In restricted execution, assignment to inst.__class__ is
  891. # prohibited
  892. pass
  893. if not instantiated:
  894. try:
  895. value = klass(*args)
  896. except TypeError, err:
  897. raise TypeError, "in constructor for %s: %s" % (
  898. klass.__name__, str(err)), sys.exc_info()[2]
  899. self.append(value)
  900. def load_inst(self):
  901. module = self.readline()[:-1]
  902. name = self.readline()[:-1]
  903. klass = self.find_class(module, name)
  904. self._instantiate(klass, self.marker())
  905. dispatch[INST] = load_inst
  906. def load_obj(self):
  907. # Stack is ... markobject classobject arg1 arg2 ...
  908. k = self.marker()
  909. klass = self.stack.pop(k+1)
  910. self._instantiate(klass, k)
  911. dispatch[OBJ] = load_obj
  912. def load_newobj(self):
  913. args = self.stack.pop()
  914. cls = self.stack[-1]
  915. obj = cls.__new__(cls, *args)
  916. self.stack[-1] = obj
  917. dispatch[NEWOBJ] = load_newobj
  918. def load_global(self):
  919. module = self.readline()[:-1]
  920. name = self.readline()[:-1]
  921. klass = self.find_class(module, name)
  922. self.append(klass)
  923. dispatch[GLOBAL] = load_global
  924. def load_ext1(self):
  925. code = ord(self.read(1))
  926. self.get_extension(code)
  927. dispatch[EXT1] = load_ext1
  928. def load_ext2(self):
  929. code = mloads('i' + self.read(2) + '\000\000')
  930. self.get_extension(code)
  931. dispatch[EXT2] = load_ext2
  932. def load_ext4(self):
  933. code = mloads('i' + self.read(4))
  934. self.get_extension(code)
  935. dispatch[EXT4] = load_ext4
  936. def get_extension(self, code):
  937. nil = []
  938. obj = _extension_cache.get(code, nil)
  939. if obj is not nil:
  940. self.append(obj)
  941. return
  942. key = _inverted_registry.get(code)
  943. if not key:
  944. raise ValueError("unregistered extension code %d" % code)
  945. obj = self.find_class(*key)
  946. _extension_cache[code] = obj
  947. self.append(obj)
  948. def find_class(self, module, name):
  949. # Subclasses may override this
  950. __import__(module)
  951. mod = sys.modules[module]
  952. klass = getattr(mod, name)
  953. return klass
  954. def load_reduce(self):
  955. stack = self.stack
  956. args = stack.pop()
  957. func = stack[-1]
  958. value = func(*args)
  959. stack[-1] = value
  960. dispatch[REDUCE] = load_reduce
  961. def load_pop(self):
  962. del self.stack[-1]
  963. dispatch[POP] = load_pop
  964. def load_pop_mark(self):
  965. k = self.marker()
  966. del self.stack[k:]
  967. dispatch[POP_MARK] = load_pop_mark
  968. def load_dup(self):
  969. self.append(self.stack[-1])
  970. dispatch[DUP] = load_dup
  971. def load_get(self):
  972. self.append(self.memo[self.readline()[:-1]])
  973. dispatch[GET] = load_get
  974. def load_binget(self):
  975. i = ord(self.read(1))
  976. self.append(self.memo[repr(i)])
  977. dispatch[BINGET] = load_binget
  978. def load_long_binget(self):
  979. i = mloads('i' + self.read(4))
  980. self.append(self.memo[repr(i)])
  981. dispatch[LONG_BINGET] = load_long_binget
  982. def load_put(self):
  983. self.memo[self.readline()[:-1]] = self.stack[-1]
  984. dispatch[PUT] = load_put
  985. def load_binput(self):
  986. i = ord(self.read(1))
  987. self.memo[repr(i)] = self.stack[-1]
  988. dispatch[BINPUT] = load_binput
  989. def load_long_binput(self):
  990. i = mloads('i' + self.read(4))
  991. self.memo[repr(i)] = self.stack[-1]
  992. dispatch[LONG_BINPUT] = load_long_binput
  993. def load_append(self):
  994. stack = self.stack
  995. value = stack.pop()
  996. list = stack[-1]
  997. list.append(value)
  998. dispatch[APPEND] = load_append
  999. def load_appends(self):
  1000. stack = self.stack
  1001. mark = self.marker()
  1002. list = stack[mark - 1]
  1003. list.extend(stack[mark + 1:])
  1004. del stack[mark:]
  1005. dispatch[APPENDS] = load_appends
  1006. def load_setitem(self):
  1007. stack = self.stack
  1008. value = stack.pop()
  1009. key = stack.pop()
  1010. dict = stack[-1]
  1011. dict[key] = value
  1012. dispatch[SETITEM] = load_setitem
  1013. def load_setitems(self):
  1014. stack = self.stack
  1015. mark = self.marker()
  1016. dict = stack[mark - 1]
  1017. for i in range(mark + 1, len(stack), 2):
  1018. dict[stack[i]] = stack[i + 1]
  1019. del stack[mark:]
  1020. dispatch[SETITEMS] = load_setitems
  1021. def load_build(self):
  1022. stack = self.stack
  1023. state = stack.pop()
  1024. inst = stack[-1]
  1025. setstate = getattr(inst, "__setstate__", None)
  1026. if setstate:
  1027. setstate(state)
  1028. return
  1029. slotstate = None
  1030. if isinstance(state, tuple) and len(state) == 2:
  1031. state, slotstate = state
  1032. if state:
  1033. try:
  1034. d = inst.__dict__
  1035. try:
  1036. for k, v in state.iteritems():
  1037. d[intern(k)] = v
  1038. # keys in state don't have to be strings
  1039. # don't blow up, but don't go out of our way
  1040. except TypeError:
  1041. d.update(state)
  1042. except RuntimeError:
  1043. # XXX In restricted execution, the instance's __dict__
  1044. # is not accessible. Use the old way of unpickling
  1045. # the instance variables. This is a semantic
  1046. # difference when unpickling in restricted
  1047. # vs. unrestricted modes.
  1048. # Note, however, that cPickle has never tried to do the
  1049. # .update() business, and always uses
  1050. # PyObject_SetItem(inst.__dict__, key, value) in a
  1051. # loop over state.items().
  1052. for k, v in state.items():
  1053. setattr(inst, k, v)
  1054. if slotstate:
  1055. for k, v in slotstate.items():
  1056. setattr(inst, k, v)
  1057. dispatch[BUILD] = load_build
  1058. def load_mark(self):
  1059. self.append(self.mark)
  1060. dispatch[MARK] = load_mark
  1061. def load_stop(self):
  1062. value = self.stack.pop()
  1063. raise _Stop(value)
  1064. dispatch[STOP] = load_stop
  1065. # Helper class for load_inst/load_obj
  1066. class _EmptyClass:
  1067. pass
  1068. # Encode/decode longs in linear time.
  1069. import binascii as _binascii
  1070. def encode_long(x):
  1071. r"""Encode a long to a two's complement little-endian binary string.
  1072. Note that 0L is a special case, returning an empty string, to save a
  1073. byte in the LONG1 pickling context.
  1074. >>> encode_long(0L)
  1075. ''
  1076. >>> encode_long(255L)
  1077. '\xff\x00'
  1078. >>> encode_long(32767L)
  1079. '\xff\x7f'
  1080. >>> encode_long(-256L)
  1081. '\x00\xff'
  1082. >>> encode_long(-32768L)
  1083. '\x00\x80'
  1084. >>> encode_long(-128L)
  1085. '\x80'
  1086. >>> encode_long(127L)
  1087. '\x7f'
  1088. >>>
  1089. """
  1090. if x == 0:
  1091. return ''
  1092. if x > 0:
  1093. ashex = hex(x)
  1094. assert ashex.startswith("0x")
  1095. njunkchars = 2 + ashex.endswith('L')
  1096. nibbles = len(ashex) - njunkchars
  1097. if nibbles & 1:
  1098. # need an even # of nibbles for unhexlify
  1099. ashex = "0x0" + ashex[2:]
  1100. elif int(ashex[2], 16) >= 8:
  1101. # "looks negative", so need a byte of sign bits
  1102. ashex = "0x00" + ashex[2:]
  1103. else:
  1104. # Build the 256's-complement: (1L << nbytes) + x. The trick is
  1105. # to find the number of bytes in linear time (although that should
  1106. # really be a constant-time task).
  1107. ashex = hex(-x)
  1108. assert ashex.startswith("0x")
  1109. njunkchars = 2 + ashex.endswith('L')
  1110. nibbles = len(ashex) - njunkchars
  1111. if nibbles & 1:
  1112. # Extend to a full byte.
  1113. nibbles += 1
  1114. nbits = nibbles * 4
  1115. x += 1L << nbits
  1116. assert x > 0
  1117. ashex = hex(x)
  1118. njunkchars = 2 + ashex.endswith('L')
  1119. newnibbles = len(ashex) - njunkchars
  1120. if newnibbles < nibbles:
  1121. ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:]
  1122. if int(ashex[2], 16) < 8:
  1123. # "looks positive", so need a byte of sign bits
  1124. ashex = "0xff" + ashex[2:]
  1125. if ashex.endswith('L'):
  1126. ashex = ashex[2:-1]
  1127. else:
  1128. ashex = ashex[2:]
  1129. assert len(ashex) & 1 == 0, (x, ashex)
  1130. binary = _binascii.unhexlify(ashex)
  1131. return binary[::-1]
  1132. def decode_long(data):
  1133. r"""Decode a long from a two's complement little-endian binary string.
  1134. >>> decode_long('')
  1135. 0L
  1136. >>> decode_long("\xff\x00")
  1137. 255L
  1138. >>> decode_long("\xff\x7f")
  1139. 32767L
  1140. >>> decode_long("\x00\xff")
  1141. -256L
  1142. >>> decode_long("\x00\x80")
  1143. -32768L
  1144. >>> decode_long("\x80")
  1145. -128L
  1146. >>> decode_long("\x7f")
  1147. 127L
  1148. """
  1149. nbytes = len(data)
  1150. if nbytes == 0:
  1151. return 0L
  1152. ashex = _binascii.hexlify(data[::-1])
  1153. n = long(ashex, 16) # quadratic time before Python 2.3; linear now
  1154. if data[-1] >= '\x80':
  1155. n -= 1L << (nbytes * 8)
  1156. return n
  1157. # Shorthands
  1158. try:
  1159. from cStringIO import StringIO
  1160. except ImportError:
  1161. from StringIO import StringIO
  1162. def dump(obj, file, protocol=None):
  1163. Pickler(file, protocol).dump(obj)
  1164. def dumps(obj, protocol=None):
  1165. file = StringIO()
  1166. Pickler(file, protocol).dump(obj)
  1167. return file.getvalue()
  1168. def load(file):
  1169. return Unpickler(file).load()
  1170. def loads(str):
  1171. file = StringIO(str)
  1172. return Unpickler(file).load()
  1173. # Doctest
  1174. def _test():
  1175. import doctest
  1176. return doctest.testmod()
  1177. if __name__ == "__main__":
  1178. _test()