PageRenderTime 57ms CodeModel.GetById 22ms RepoModel.GetById 1ms app.codeStats 0ms

/rpython/rlib/streamio.py

https://bitbucket.org/pypy/pypy/
Python | 1228 lines | 1185 code | 15 blank | 28 comment | 18 complexity | 7ed80cedf6a09f60ac2c782342ddfe14 MD5 | raw file
Possible License(s): AGPL-3.0, BSD-3-Clause, Apache-2.0
  1. """New standard I/O library.
  2. Based on sio.py from Guido van Rossum.
  3. - This module contains various stream classes which provide a subset of the
  4. classic Python I/O API: read(n), write(s), tell(), seek(offset, whence=0),
  5. readall(), readline(), truncate(size), flush(), close(), peek(),
  6. flushable(), try_to_find_file_descriptor().
  7. - This is not for general usage:
  8. * read(n) may return less than n bytes, just like os.read().
  9. * some other methods also have no default parameters.
  10. * close() should be called exactly once and no further operations performed;
  11. there is no __del__() closing the stream for you.
  12. * some methods may raise MyNotImplementedError.
  13. * peek() returns some (or no) characters that have already been read ahead.
  14. * flushable() returns True/False if flushing that stream is useful/pointless.
  15. - A 'basis stream' provides I/O using a low-level API, like the os, mmap or
  16. socket modules.
  17. - A 'filtering stream' builds on top of another stream. There are filtering
  18. streams for universal newline translation, for unicode translation, and
  19. for buffering.
  20. You typically take a basis stream, place zero or more filtering
  21. streams on top of it, and then top it off with an input-buffering and/or
  22. an outout-buffering stream.
  23. """
  24. # File offsets are all 'r_longlong', but a single read or write cannot
  25. # transfer more data that fits in an RPython 'int' (because that would not
  26. # fit in a single string anyway). This module needs to be careful about
  27. # where r_longlong values end up: as argument to seek() and truncate() and
  28. # return value of tell(), but not as argument to read().
  29. import os, sys, errno
  30. from rpython.rlib.objectmodel import specialize, we_are_translated
  31. from rpython.rlib.rarithmetic import r_longlong, intmask
  32. from rpython.rlib import rposix, nonconst, _rsocket_rffi as _c
  33. from rpython.rlib.rstring import StringBuilder
  34. from os import O_RDONLY, O_WRONLY, O_RDWR, O_CREAT, O_TRUNC, O_APPEND
  35. O_BINARY = getattr(os, "O_BINARY", 0)
  36. # (basemode, plus)
  37. OS_MODE = {('r', False): O_RDONLY,
  38. ('r', True): O_RDWR,
  39. ('w', False): O_WRONLY | O_CREAT | O_TRUNC,
  40. ('w', True): O_RDWR | O_CREAT | O_TRUNC,
  41. ('a', False): O_WRONLY | O_CREAT | O_APPEND,
  42. ('a', True): O_RDWR | O_CREAT | O_APPEND,
  43. }
  44. class MyNotImplementedError(Exception):
  45. """Catching NotImplementedError is not RPython, so we use this custom class
  46. instead of it
  47. """
  48. # ____________________________________________________________
  49. def replace_crlf_with_lf(s):
  50. substrings = s.split("\r")
  51. result = [substrings[0]]
  52. for substring in substrings[1:]:
  53. if not substring:
  54. result.append("")
  55. elif substring[0] == "\n":
  56. result.append(substring[1:])
  57. else:
  58. result.append(substring)
  59. return "\n".join(result)
  60. def replace_char_with_str(string, c, s):
  61. return s.join(string.split(c))
  62. @specialize.argtype(0)
  63. def open_file_as_stream(path, mode="r", buffering=-1, signal_checker=None):
  64. os_flags, universal, reading, writing, basemode, binary = decode_mode(mode)
  65. stream = open_path_helper(path, os_flags, basemode == "a", signal_checker)
  66. return construct_stream_tower(stream, buffering, universal, reading,
  67. writing, binary)
  68. def _setfd_binary(fd):
  69. pass
  70. if hasattr(_c, 'fcntl'):
  71. def _check_fd_mode(fd, reading, writing):
  72. flags = intmask(_c.fcntl(fd, _c.F_GETFL, 0))
  73. if flags & _c.O_RDWR:
  74. return
  75. elif flags & _c.O_WRONLY:
  76. if not reading:
  77. return
  78. else: # O_RDONLY
  79. if not writing:
  80. return
  81. raise OSError(22, "Invalid argument")
  82. else:
  83. def _check_fd_mode(fd, reading, writing):
  84. # XXX
  85. pass
  86. def fdopen_as_stream(fd, mode, buffering=-1, signal_checker=None):
  87. os_flags, universal, reading, writing, basemode, binary = decode_mode(mode)
  88. _check_fd_mode(fd, reading, writing)
  89. _setfd_binary(fd)
  90. stream = DiskFile(fd, signal_checker)
  91. return construct_stream_tower(stream, buffering, universal, reading,
  92. writing, binary)
  93. @specialize.argtype(0)
  94. def open_path_helper(path, os_flags, append, signal_checker=None):
  95. # XXX for now always return DiskFile
  96. fd = rposix.open(path, os_flags, 0666)
  97. if append:
  98. try:
  99. os.lseek(fd, 0, 2)
  100. except OSError:
  101. # XXX does this pass make sense?
  102. pass
  103. return DiskFile(fd, signal_checker)
  104. def decode_mode(mode):
  105. if mode[0] == 'U':
  106. mode = 'r' + mode
  107. basemode = mode[0] # 'r', 'w' or 'a'
  108. plus = False
  109. universal = False
  110. binary = False
  111. for c in mode[1:]:
  112. if c == '+':
  113. plus = True
  114. elif c == 'U':
  115. universal = True
  116. elif c == 'b':
  117. binary = True
  118. else:
  119. break
  120. flag = OS_MODE[basemode, plus]
  121. flag |= O_BINARY
  122. reading = basemode == 'r' or plus
  123. writing = basemode != 'r' or plus
  124. return flag, universal, reading, writing, basemode, binary
  125. def construct_stream_tower(stream, buffering, universal, reading, writing,
  126. binary):
  127. if buffering == 0: # no buffering
  128. pass
  129. elif buffering == 1: # line-buffering
  130. if writing:
  131. stream = LineBufferingOutputStream(stream)
  132. if reading:
  133. stream = BufferingInputStream(stream)
  134. else: # default or explicit buffer sizes
  135. if buffering is not None and buffering < 0:
  136. buffering = -1
  137. if writing:
  138. stream = BufferingOutputStream(stream, buffering)
  139. if reading:
  140. stream = BufferingInputStream(stream, buffering)
  141. if universal: # Wants universal newlines
  142. if writing and os.linesep != '\n':
  143. stream = TextOutputFilter(stream)
  144. if reading:
  145. stream = TextInputFilter(stream)
  146. elif not binary and os.linesep == '\r\n':
  147. stream = TextCRLFFilter(stream)
  148. if nonconst.NonConstant(False):
  149. stream.flush_buffers() # annotation workaround for untranslated tests
  150. return stream
  151. class StreamError(Exception):
  152. def __init__(self, message):
  153. self.message = message
  154. StreamErrors = (OSError, StreamError) # errors that can generally be raised
  155. if sys.platform == "win32":
  156. from rpython.rlib.rwin32 import BOOL, HANDLE, get_osfhandle
  157. from rpython.rlib.rwin32 import GetLastError_saved
  158. from rpython.translator.tool.cbuild import ExternalCompilationInfo
  159. from rpython.rtyper.lltypesystem import rffi
  160. _eci = ExternalCompilationInfo()
  161. _setmode = rffi.llexternal('_setmode', [rffi.INT, rffi.INT], rffi.INT,
  162. compilation_info=_eci)
  163. SetEndOfFile = rffi.llexternal('SetEndOfFile', [HANDLE], BOOL,
  164. compilation_info=_eci,
  165. save_err=rffi.RFFI_SAVE_LASTERROR)
  166. def _setfd_binary(fd):
  167. # Allow this to succeed on invalid fd's
  168. if rposix.is_valid_fd(fd):
  169. _setmode(fd, os.O_BINARY)
  170. def ftruncate_win32(fd, size):
  171. curpos = os.lseek(fd, 0, 1)
  172. try:
  173. # move to the position to be truncated
  174. os.lseek(fd, size, 0)
  175. # Truncate. Note that this may grow the file!
  176. handle = get_osfhandle(fd)
  177. if not SetEndOfFile(handle):
  178. raise OSError(GetLastError_saved(),
  179. "Could not truncate file")
  180. finally:
  181. # we restore the file pointer position in any case
  182. os.lseek(fd, curpos, 0)
  183. class Stream(object):
  184. """Base class for streams. Provides a default implementation of
  185. some methods."""
  186. def read(self, n):
  187. raise MyNotImplementedError
  188. def write(self, data):
  189. raise MyNotImplementedError
  190. def tell(self):
  191. raise MyNotImplementedError
  192. def seek(self, offset, whence):
  193. raise MyNotImplementedError
  194. def readall(self):
  195. bufsize = 8192
  196. result = []
  197. while True:
  198. try:
  199. data = self.read(bufsize)
  200. except OSError:
  201. # like CPython < 3.4, partial results followed by an error
  202. # are returned as data
  203. if not result:
  204. raise
  205. break
  206. if not data:
  207. break
  208. result.append(data)
  209. if bufsize < 4194304: # 4 Megs
  210. bufsize <<= 1
  211. return ''.join(result)
  212. def readline(self):
  213. # very inefficient unless there is a peek()
  214. result = []
  215. while True:
  216. # "peeks" on the underlying stream to see how many characters
  217. # we can safely read without reading past an end-of-line
  218. startindex, peeked = self.peek()
  219. assert 0 <= startindex <= len(peeked)
  220. pn = peeked.find("\n", startindex)
  221. if pn < 0:
  222. pn = len(peeked)
  223. c = self.read(pn - startindex + 1)
  224. if not c:
  225. break
  226. result.append(c)
  227. if c.endswith('\n'):
  228. break
  229. return ''.join(result)
  230. def truncate(self, size):
  231. raise MyNotImplementedError
  232. def flush_buffers(self):
  233. pass
  234. def flush(self):
  235. pass
  236. def flushable(self):
  237. return False
  238. def close(self):
  239. self.close1(True)
  240. def close1(self, closefileno):
  241. pass
  242. def peek(self):
  243. return (0, '')
  244. def count_buffered_bytes(self):
  245. pos, buf = self.peek()
  246. return len(buf) - pos
  247. def try_to_find_file_descriptor(self):
  248. return -1
  249. def getnewlines(self):
  250. return 0
  251. class DiskFile(Stream):
  252. """Standard I/O basis stream using os.open/close/read/write/lseek"""
  253. def __init__(self, fd, signal_checker=None):
  254. self.fd = fd
  255. self.signal_checker = signal_checker
  256. def seek(self, offset, whence):
  257. os.lseek(self.fd, offset, whence)
  258. def tell(self):
  259. return os.lseek(self.fd, 0, 1)
  260. def read(self, n):
  261. assert isinstance(n, int)
  262. while True:
  263. try:
  264. return os.read(self.fd, n)
  265. except OSError as e:
  266. if e.errno != errno.EINTR:
  267. raise
  268. if self.signal_checker is not None:
  269. self.signal_checker()
  270. # else try again
  271. def readline(self):
  272. # mostly inefficient, but not as laughably bad as with the default
  273. # readline() from Stream
  274. result = StringBuilder()
  275. while True:
  276. try:
  277. c = os.read(self.fd, 1)
  278. except OSError as e:
  279. if e.errno != errno.EINTR:
  280. raise
  281. if self.signal_checker is not None:
  282. self.signal_checker()
  283. continue # try again
  284. if not c:
  285. break
  286. c = c[0]
  287. result.append(c)
  288. if c == '\n':
  289. break
  290. return result.build()
  291. def write(self, data):
  292. while data:
  293. try:
  294. n = os.write(self.fd, data)
  295. except OSError as e:
  296. if e.errno != errno.EINTR:
  297. raise
  298. if self.signal_checker is not None:
  299. self.signal_checker()
  300. else:
  301. data = data[n:]
  302. def close1(self, closefileno):
  303. if closefileno:
  304. os.close(self.fd)
  305. if sys.platform == "win32":
  306. def truncate(self, size):
  307. ftruncate_win32(self.fd, size)
  308. else:
  309. def truncate(self, size):
  310. # Note: for consistency, in translated programs a failing
  311. # os.ftruncate() raises OSError. However, on top of
  312. # CPython, we get an IOError. As it is (as far as I know)
  313. # the only place that have this behavior, we just convert it
  314. # to an OSError instead of adding IOError to StreamErrors.
  315. if we_are_translated():
  316. os.ftruncate(self.fd, size)
  317. else:
  318. try:
  319. os.ftruncate(self.fd, size)
  320. except IOError as e:
  321. raise OSError(*e.args)
  322. def try_to_find_file_descriptor(self):
  323. return self.fd
  324. # next class is not RPython
  325. class MMapFile(Stream):
  326. """Standard I/O basis stream using mmap."""
  327. def __init__(self, fd, mmapaccess):
  328. """NOT_RPYTHON"""
  329. self.fd = fd
  330. self.access = mmapaccess
  331. self.pos = 0
  332. self.remapfile()
  333. def remapfile(self):
  334. import mmap
  335. size = os.fstat(self.fd).st_size
  336. self.mm = mmap.mmap(self.fd, size, access=self.access)
  337. def close1(self, closefileno):
  338. self.mm.close()
  339. if closefileno:
  340. os.close(self.fd)
  341. def tell(self):
  342. return self.pos
  343. def seek(self, offset, whence):
  344. if whence == 0:
  345. self.pos = max(0, offset)
  346. elif whence == 1:
  347. self.pos = max(0, self.pos + offset)
  348. elif whence == 2:
  349. self.pos = max(0, self.mm.size() + offset)
  350. else:
  351. raise StreamError("seek(): whence must be 0, 1 or 2")
  352. def readall(self):
  353. filesize = self.mm.size() # Actual file size, may be more than mapped
  354. n = filesize - self.pos
  355. data = self.mm[self.pos:]
  356. if len(data) < n:
  357. del data
  358. # File grew since opened; remap to get the new data
  359. self.remapfile()
  360. data = self.mm[self.pos:]
  361. self.pos += len(data)
  362. return data
  363. def read(self, n):
  364. assert isinstance(n, int)
  365. end = self.pos + n
  366. data = self.mm[self.pos:end]
  367. if not data:
  368. # is there more data to read?
  369. filesize = self.mm.size() #Actual file size, may be more than mapped
  370. if filesize > self.pos:
  371. # File grew since opened; remap to get the new data
  372. self.remapfile()
  373. data = self.mm[self.pos:end]
  374. self.pos += len(data)
  375. return data
  376. def readline(self):
  377. hit = self.mm.find("\n", self.pos) + 1
  378. if not hit:
  379. # is there more data to read?
  380. filesize = self.mm.size() #Actual file size, may be more than mapped
  381. if filesize > len(self.mm):
  382. # File grew since opened; remap to get the new data
  383. self.remapfile()
  384. hit = self.mm.find("\n", self.pos) + 1
  385. if hit:
  386. # Got a whole line
  387. data = self.mm[self.pos:hit]
  388. self.pos = hit
  389. else:
  390. # Read whatever we've got -- may be empty
  391. data = self.mm[self.pos:]
  392. self.pos += len(data)
  393. return data
  394. def write(self, data):
  395. end = self.pos + len(data)
  396. try:
  397. self.mm[self.pos:end] = data
  398. # This can raise IndexError on Windows, ValueError on Unix
  399. except (IndexError, ValueError):
  400. # XXX On Unix, this resize() call doesn't work
  401. self.mm.resize(end)
  402. self.mm[self.pos:end] = data
  403. self.pos = end
  404. def flush(self):
  405. self.mm.flush()
  406. def flushable(self):
  407. import mmap
  408. return self.access == mmap.ACCESS_WRITE
  409. def try_to_find_file_descriptor(self):
  410. return self.fd
  411. # ____________________________________________________________
  412. STREAM_METHODS = dict([
  413. ("read", [int]),
  414. ("write", [str]),
  415. ("tell", []),
  416. ("seek", [r_longlong, int]),
  417. ("readall", []),
  418. ("readline", []),
  419. ("truncate", [r_longlong]),
  420. ("flush", []),
  421. ("flushable", []),
  422. ("close1", [int]),
  423. ("peek", []),
  424. ("try_to_find_file_descriptor", []),
  425. ("getnewlines", []),
  426. ])
  427. def PassThrough(meth_name, flush_buffers):
  428. if meth_name in STREAM_METHODS:
  429. signature = STREAM_METHODS[meth_name]
  430. args = ", ".join(["v%s" % (i, ) for i in range(len(signature))])
  431. else:
  432. assert 0, "not a good idea"
  433. args = "*args"
  434. if flush_buffers:
  435. code = """def %s(self, %s):
  436. self.flush_buffers()
  437. return self.base.%s(%s)
  438. """
  439. else:
  440. code = """def %s(self, %s):
  441. return self.base.%s(%s)
  442. """
  443. d = {}
  444. exec code % (meth_name, args, meth_name, args) in d
  445. return d[meth_name]
  446. def offset2int(offset):
  447. intoffset = intmask(offset)
  448. if intoffset != offset:
  449. raise StreamError("seek() from a non-seekable source:"
  450. " this would read and discard more"
  451. " than sys.maxint bytes")
  452. return intoffset
  453. class BufferingInputStream(Stream):
  454. """Standard buffering input stream.
  455. This, and BufferingOutputStream if needed, are typically at the top of
  456. the stack of streams.
  457. """
  458. bigsize = 2**19 # Half a Meg
  459. bufsize = 2**13 # 8 K
  460. def __init__(self, base, bufsize=-1):
  461. self.base = base
  462. self.do_read = base.read # function to fill buffer some more
  463. self.do_tell = base.tell # return a byte offset
  464. self.do_seek = base.seek # seek to a byte offset
  465. if bufsize == -1: # Get default from the class
  466. bufsize = self.bufsize
  467. self.bufsize = bufsize # buffer size (hint only)
  468. self.buf = "" # raw data
  469. self.pos = 0
  470. def flush_buffers(self):
  471. if self.buf:
  472. try:
  473. self.do_seek(self.pos - len(self.buf), 1)
  474. except (MyNotImplementedError, OSError):
  475. pass
  476. else:
  477. self.buf = ""
  478. self.pos = 0
  479. def tell(self):
  480. tellpos = self.do_tell() # This may fail
  481. # Best-effort: to avoid extra system calls to tell() all the
  482. # time, and a more complicated logic in this class, we can
  483. # only assume that nobody changed the underlying file
  484. # descriptor position while we have buffered data. If they
  485. # do, we might get bogus results here (and the following
  486. # read() will still return the data cached at the old
  487. # position). Just make sure that we don't fail an assert.
  488. offset = len(self.buf) - self.pos
  489. if tellpos < offset:
  490. # bug! someone changed the fd position under our feet,
  491. # and moved it at or very close to the beginning of the
  492. # file, so that we have more buffered data than the
  493. # current offset.
  494. self.buf = ""
  495. self.pos = 0
  496. offset = 0
  497. return tellpos - offset
  498. def seek(self, offset, whence):
  499. # This may fail on the do_seek() or on the tell() call.
  500. # But it won't depend on either on a relative forward seek.
  501. # Nor on a seek to the very end.
  502. if whence == 0 or whence == 1:
  503. if whence == 0:
  504. difpos = offset - self.tell() # may clean up self.buf/self.pos
  505. else:
  506. difpos = offset
  507. currentsize = len(self.buf) - self.pos
  508. if -self.pos <= difpos <= currentsize:
  509. self.pos += intmask(difpos)
  510. return
  511. if whence == 1:
  512. offset -= currentsize
  513. try:
  514. self.do_seek(offset, whence)
  515. except MyNotImplementedError:
  516. self.buf = ""
  517. self.pos = 0
  518. if difpos < 0:
  519. raise
  520. if whence == 0:
  521. offset = difpos - currentsize
  522. intoffset = offset2int(offset)
  523. self.read(intoffset)
  524. else:
  525. self.buf = ""
  526. self.pos = 0
  527. return
  528. if whence == 2:
  529. try:
  530. self.do_seek(offset, 2)
  531. except MyNotImplementedError:
  532. pass
  533. else:
  534. self.pos = 0
  535. self.buf = ""
  536. return
  537. # Skip relative to EOF by reading and saving only just as
  538. # much as needed
  539. intoffset = offset2int(offset)
  540. pos = self.pos
  541. assert pos >= 0
  542. buffers = [self.buf[pos:]]
  543. total = len(buffers[0])
  544. self.buf = ""
  545. self.pos = 0
  546. while 1:
  547. data = self.do_read(self.bufsize)
  548. if not data:
  549. break
  550. buffers.append(data)
  551. total += len(data)
  552. while buffers and total >= len(buffers[0]) - intoffset:
  553. total -= len(buffers[0])
  554. del buffers[0]
  555. cutoff = total + intoffset
  556. if cutoff < 0:
  557. raise StreamError("cannot seek back")
  558. if buffers:
  559. assert cutoff >= 0
  560. buffers[0] = buffers[0][cutoff:]
  561. self.buf = "".join(buffers)
  562. return
  563. raise StreamError("whence should be 0, 1 or 2")
  564. def readall(self):
  565. pos = self.pos
  566. assert pos >= 0
  567. if self.buf:
  568. chunks = [self.buf[pos:]]
  569. else:
  570. chunks = []
  571. self.buf = ""
  572. self.pos = 0
  573. bufsize = self.bufsize
  574. while 1:
  575. try:
  576. data = self.do_read(bufsize)
  577. except OSError as o:
  578. # like CPython < 3.4, partial results followed by an error
  579. # are returned as data
  580. if not chunks:
  581. raise
  582. break
  583. if not data:
  584. break
  585. chunks.append(data)
  586. bufsize = min(bufsize*2, self.bigsize)
  587. return "".join(chunks)
  588. def read(self, n=-1):
  589. assert isinstance(n, int)
  590. if n < 0:
  591. return self.readall()
  592. currentsize = len(self.buf) - self.pos
  593. start = self.pos
  594. assert start >= 0
  595. if n <= currentsize:
  596. stop = start + n
  597. assert stop >= 0
  598. result = self.buf[start:stop]
  599. self.pos += n
  600. return result
  601. else:
  602. chunks = [self.buf[start:]]
  603. while 1:
  604. self.buf = self.do_read(self.bufsize)
  605. if not self.buf:
  606. self.pos = 0
  607. break
  608. currentsize += len(self.buf)
  609. if currentsize >= n:
  610. self.pos = len(self.buf) - (currentsize - n)
  611. stop = self.pos
  612. assert stop >= 0
  613. chunks.append(self.buf[:stop])
  614. break
  615. chunks.append(self.buf)
  616. return ''.join(chunks)
  617. def readline(self):
  618. pos = self.pos
  619. assert pos >= 0
  620. i = self.buf.find("\n", pos)
  621. start = self.pos
  622. assert start >= 0
  623. if i >= 0: # new line found
  624. i += 1
  625. result = self.buf[start:i]
  626. self.pos = i
  627. return result
  628. temp = self.buf[start:]
  629. # read one buffer and most of the time a new line will be found
  630. self.buf = self.do_read(self.bufsize)
  631. i = self.buf.find("\n")
  632. if i >= 0: # new line found
  633. i += 1
  634. result = temp + self.buf[:i]
  635. self.pos = i
  636. return result
  637. if not self.buf:
  638. self.pos = 0
  639. return temp
  640. # need to keep getting data until we find a new line
  641. chunks = [temp, self.buf]
  642. while 1:
  643. self.buf = self.do_read(self.bufsize)
  644. if not self.buf:
  645. self.pos = 0
  646. break
  647. i = self.buf.find("\n")
  648. if i >= 0:
  649. i += 1
  650. chunks.append(self.buf[:i])
  651. self.pos = i
  652. break
  653. chunks.append(self.buf)
  654. return "".join(chunks)
  655. def peek(self):
  656. return (self.pos, self.buf)
  657. write = PassThrough("write", flush_buffers=True)
  658. truncate = PassThrough("truncate", flush_buffers=True)
  659. flush = PassThrough("flush", flush_buffers=True)
  660. flushable = PassThrough("flushable", flush_buffers=False)
  661. close1 = PassThrough("close1", flush_buffers=False)
  662. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  663. flush_buffers=False)
  664. class BufferingOutputStream(Stream):
  665. """Standard buffering output stream.
  666. This, and BufferingInputStream if needed, are typically at the top of
  667. the stack of streams.
  668. """
  669. bigsize = 2**19 # Half a Meg
  670. bufsize = 2**13 # 8 K
  671. def __init__(self, base, bufsize=-1):
  672. self.base = base
  673. self.do_tell = base.tell # return a byte offset
  674. if bufsize == -1: # Get default from the class
  675. bufsize = self.bufsize
  676. self.bufsize = bufsize # buffer size (hint only)
  677. self.buf = []
  678. self.buflen = 0
  679. self.error = False
  680. def do_write(self, data):
  681. try:
  682. self.base.write(data)
  683. except:
  684. self.error = True
  685. raise
  686. def flush_buffers(self):
  687. if self.buf and not self.error:
  688. self.do_write(''.join(self.buf))
  689. self.buf = []
  690. self.buflen = 0
  691. def tell(self):
  692. return self.do_tell() + self.buflen
  693. def write(self, data):
  694. self.error = False
  695. buflen = self.buflen
  696. datalen = len(data)
  697. if datalen + buflen < self.bufsize:
  698. self.buf.append(data)
  699. self.buflen += datalen
  700. elif buflen:
  701. self.buf.append(data)
  702. self.do_write(''.join(self.buf))
  703. self.buf = []
  704. self.buflen = 0
  705. else:
  706. self.do_write(data)
  707. read = PassThrough("read", flush_buffers=True)
  708. readall = PassThrough("readall", flush_buffers=True)
  709. readline = PassThrough("readline", flush_buffers=True)
  710. seek = PassThrough("seek", flush_buffers=True)
  711. truncate = PassThrough("truncate", flush_buffers=True)
  712. flush = PassThrough("flush", flush_buffers=True)
  713. close1 = PassThrough("close1", flush_buffers=True)
  714. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  715. flush_buffers=False)
  716. def flushable(self):
  717. return True
  718. class LineBufferingOutputStream(BufferingOutputStream):
  719. """Line buffering output stream.
  720. This is typically the top of the stack.
  721. """
  722. def write(self, data):
  723. self.error = False
  724. p = data.rfind('\n') + 1
  725. assert p >= 0
  726. if self.buflen + len(data) < self.bufsize:
  727. if p == 0:
  728. self.buf.append(data)
  729. self.buflen += len(data)
  730. else:
  731. if self.buflen:
  732. self.buf.append(data[:p])
  733. self.do_write(''.join(self.buf))
  734. else:
  735. self.do_write(data[:p])
  736. self.buf = [data[p:]]
  737. self.buflen = len(self.buf[0])
  738. else:
  739. if self.buflen + p < self.bufsize:
  740. p = self.bufsize - self.buflen
  741. if self.buflen:
  742. self.do_write(''.join(self.buf))
  743. assert p >= 0
  744. self.do_write(data[:p])
  745. self.buf = [data[p:]]
  746. self.buflen = len(self.buf[0])
  747. # ____________________________________________________________
  748. class CRLFFilter(Stream):
  749. """Filtering stream for universal newlines.
  750. TextInputFilter is more general, but this is faster when you don't
  751. need tell/seek.
  752. """
  753. def __init__(self, base):
  754. self.base = base
  755. self.do_read = base.read
  756. self.atcr = False
  757. def read(self, n):
  758. data = self.do_read(n)
  759. if self.atcr:
  760. if data.startswith("\n"):
  761. data = data[1:] # Very rare case: in the middle of "\r\n"
  762. self.atcr = False
  763. if "\r" in data:
  764. self.atcr = data.endswith("\r") # Test this before removing \r
  765. data = replace_crlf_with_lf(data)
  766. return data
  767. flush = PassThrough("flush", flush_buffers=False)
  768. flushable= PassThrough("flushable", flush_buffers=False)
  769. close1 = PassThrough("close1", flush_buffers=False)
  770. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  771. flush_buffers=False)
  772. class TextCRLFFilter(Stream):
  773. """Filtering stream for universal newlines.
  774. TextInputFilter is more general, but this is faster when you don't
  775. need tell/seek.
  776. """
  777. def __init__(self, base):
  778. self.base = base
  779. self.do_read = base.read
  780. self.do_write = base.write
  781. self.do_flush = base.flush_buffers
  782. self.lfbuffer = ""
  783. def read(self, n=-1):
  784. data = self.lfbuffer + self.do_read(n)
  785. self.lfbuffer = ""
  786. if data.endswith("\r"):
  787. c = self.do_read(1)
  788. if c and c[0] == '\n':
  789. data = data + '\n'
  790. self.lfbuffer = c[1:]
  791. else:
  792. self.lfbuffer = c
  793. result = []
  794. offset = 0
  795. while True:
  796. newoffset = data.find('\r\n', offset)
  797. if newoffset < 0:
  798. result.append(data[offset:])
  799. break
  800. result.append(data[offset:newoffset])
  801. offset = newoffset + 2
  802. return '\n'.join(result)
  803. def readline(self):
  804. line = self.base.readline()
  805. limit = len(line) - 2
  806. if limit >= 0 and line[limit] == '\r' and line[limit + 1] == '\n':
  807. line = line[:limit] + '\n'
  808. return line
  809. def tell(self):
  810. pos = self.base.tell()
  811. return pos - len(self.lfbuffer)
  812. def seek(self, offset, whence):
  813. if whence == 1:
  814. offset -= len(self.lfbuffer) # correct for already-read-ahead character
  815. self.base.seek(offset, whence)
  816. self.lfbuffer = ""
  817. def flush_buffers(self):
  818. if self.lfbuffer:
  819. try:
  820. self.base.seek(-len(self.lfbuffer), 1)
  821. except (MyNotImplementedError, OSError):
  822. return
  823. self.lfbuffer = ""
  824. self.do_flush()
  825. def write(self, data):
  826. data = replace_char_with_str(data, '\n', '\r\n')
  827. self.flush_buffers()
  828. self.do_write(data)
  829. truncate = PassThrough("truncate", flush_buffers=True)
  830. flush = PassThrough("flush", flush_buffers=False)
  831. flushable= PassThrough("flushable", flush_buffers=False)
  832. close1 = PassThrough("close1", flush_buffers=False)
  833. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  834. flush_buffers=False)
  835. class TextInputFilter(Stream):
  836. """Filtering input stream for universal newline translation."""
  837. def __init__(self, base):
  838. self.base = base # must implement read, may implement tell, seek
  839. self.do_read = base.read
  840. self.atcr = False # Set when last char read was \r
  841. self.buf = "" # Optional one-character read-ahead buffer
  842. self.CR = False
  843. self.NL = False
  844. self.CRLF = False
  845. def getnewlines(self):
  846. return self.CR * 1 + self.NL * 2 + self.CRLF * 4
  847. def read(self, n):
  848. """Read up to n bytes."""
  849. if self.buf:
  850. assert not self.atcr
  851. data = self.buf
  852. self.buf = ""
  853. else:
  854. data = self.do_read(n)
  855. # The following whole ugly mess is because we need to keep track of
  856. # exactly which line separators we have seen for self.newlines,
  857. # grumble, grumble. This has an interesting corner-case.
  858. #
  859. # Consider a file consisting of exactly one line ending with '\r'.
  860. # The first time you read(), you will not know whether it is a
  861. # CR separator or half of a CRLF separator. Neither will be marked
  862. # as seen, since you are waiting for your next read to determine
  863. # what you have seen. But there's no more to read ...
  864. if self.atcr:
  865. if data.startswith("\n"):
  866. data = data[1:]
  867. self.CRLF = True
  868. if not data:
  869. data = self.do_read(n)
  870. else:
  871. self.CR = True
  872. self.atcr = False
  873. for i in range(len(data)):
  874. if data[i] == '\n':
  875. if i > 0 and data[i-1] == '\r':
  876. self.CRLF = True
  877. else:
  878. self.NL = True
  879. elif data[i] == '\r':
  880. if i < len(data)-1 and data[i+1] != '\n':
  881. self.CR = True
  882. if "\r" in data:
  883. self.atcr = data.endswith("\r")
  884. data = replace_crlf_with_lf(data)
  885. return data
  886. def readline(self):
  887. result = []
  888. while True:
  889. # "peeks" on the underlying stream to see how many characters
  890. # we can safely read without reading past an end-of-line
  891. startindex, peeked = self.base.peek()
  892. assert 0 <= startindex <= len(peeked)
  893. pn = peeked.find("\n", startindex)
  894. pr = peeked.find("\r", startindex)
  895. if pn < 0: pn = len(peeked)
  896. if pr < 0: pr = len(peeked)
  897. c = self.read(min(pn, pr) - startindex + 1)
  898. if not c:
  899. break
  900. result.append(c)
  901. if c.endswith('\n'):
  902. break
  903. return ''.join(result)
  904. def seek(self, offset, whence):
  905. """Seeks based on knowledge that does not come from a tell()
  906. may go to the wrong place, since the number of
  907. characters seen may not match the number of characters
  908. that are actually in the file (where \r\n is the
  909. line separator). Arithmetics on the result
  910. of a tell() that moves beyond a newline character may in the
  911. same way give the wrong result.
  912. """
  913. if whence == 1:
  914. offset -= len(self.buf) # correct for already-read-ahead character
  915. self.base.seek(offset, whence)
  916. self.atcr = False
  917. self.buf = ""
  918. def tell(self):
  919. pos = self.base.tell()
  920. if self.atcr:
  921. # Must read the next byte to see if it's \n,
  922. # because then we must report the next position.
  923. assert not self.buf
  924. self.buf = self.do_read(1)
  925. pos += 1
  926. self.atcr = False
  927. if self.buf == "\n":
  928. self.CRLF = True
  929. self.buf = ""
  930. return pos - len(self.buf)
  931. def flush_buffers(self):
  932. if self.atcr:
  933. assert not self.buf
  934. self.buf = self.do_read(1)
  935. self.atcr = False
  936. if self.buf == "\n":
  937. self.buf = ""
  938. if self.buf:
  939. try:
  940. self.base.seek(-len(self.buf), 1)
  941. except (MyNotImplementedError, OSError):
  942. pass
  943. else:
  944. self.buf = ""
  945. def peek(self):
  946. return (0, self.buf)
  947. write = PassThrough("write", flush_buffers=True)
  948. truncate = PassThrough("truncate", flush_buffers=True)
  949. flush = PassThrough("flush", flush_buffers=True)
  950. flushable = PassThrough("flushable", flush_buffers=False)
  951. close1 = PassThrough("close1", flush_buffers=False)
  952. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  953. flush_buffers=False)
  954. class TextOutputFilter(Stream):
  955. """Filtering output stream for universal newline translation."""
  956. def __init__(self, base, linesep=os.linesep):
  957. assert linesep in ["\n", "\r\n", "\r"]
  958. self.base = base # must implement write, may implement seek, tell
  959. self.linesep = linesep
  960. def write(self, data):
  961. data = replace_char_with_str(data, "\n", self.linesep)
  962. self.base.write(data)
  963. tell = PassThrough("tell", flush_buffers=False)
  964. seek = PassThrough("seek", flush_buffers=False)
  965. read = PassThrough("read", flush_buffers=False)
  966. readall = PassThrough("readall", flush_buffers=False)
  967. readline = PassThrough("readline", flush_buffers=False)
  968. truncate = PassThrough("truncate", flush_buffers=False)
  969. flush = PassThrough("flush", flush_buffers=False)
  970. flushable = PassThrough("flushable", flush_buffers=False)
  971. close1 = PassThrough("close1", flush_buffers=False)
  972. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  973. flush_buffers=False)
  974. class CallbackReadFilter(Stream):
  975. """Pseudo read filter that invokes a callback before blocking on a read.
  976. """
  977. def __init__(self, base, callback):
  978. self.base = base
  979. self.callback = callback
  980. def flush_buffers(self):
  981. self.callback()
  982. tell = PassThrough("tell", flush_buffers=False)
  983. seek = PassThrough("seek", flush_buffers=False)
  984. read = PassThrough("read", flush_buffers=True)
  985. readall = PassThrough("readall", flush_buffers=True)
  986. readline = PassThrough("readline", flush_buffers=True)
  987. peek = PassThrough("peek", flush_buffers=False)
  988. flush = PassThrough("flush", flush_buffers=False)
  989. flushable = PassThrough("flushable", flush_buffers=False)
  990. close1 = PassThrough("close1", flush_buffers=False)
  991. write = PassThrough("write", flush_buffers=False)
  992. truncate = PassThrough("truncate", flush_buffers=False)
  993. getnewlines= PassThrough("getnewlines",flush_buffers=False)
  994. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  995. flush_buffers=False)
  996. # _________________________________________________
  997. # The following functions are _not_ RPython!
  998. class DecodingInputFilter(Stream):
  999. """Filtering input stream that decodes an encoded file."""
  1000. def __init__(self, base, encoding="utf8", errors="strict"):
  1001. """NOT_RPYTHON"""
  1002. self.base = base
  1003. self.do_read = base.read
  1004. self.encoding = encoding
  1005. self.errors = errors
  1006. def read(self, n):
  1007. """Read *approximately* n bytes, then decode them.
  1008. Under extreme circumstances,
  1009. the return length could be longer than n!
  1010. Always return a unicode string.
  1011. This does *not* translate newlines;
  1012. you can stack TextInputFilter.
  1013. """
  1014. data = self.do_read(n)
  1015. try:
  1016. return data.decode(self.encoding, self.errors)
  1017. except ValueError:
  1018. # XXX Sigh. decode() doesn't handle incomplete strings well.
  1019. # Use the retry strategy from codecs.StreamReader.
  1020. for i in range(9):
  1021. more = self.do_read(1)
  1022. if not more:
  1023. raise
  1024. data += more
  1025. try:
  1026. return data.decode(self.encoding, self.errors)
  1027. except ValueError:
  1028. pass
  1029. raise
  1030. write = PassThrough("write", flush_buffers=False)
  1031. truncate = PassThrough("truncate", flush_buffers=False)
  1032. flush = PassThrough("flush", flush_buffers=False)
  1033. flushable = PassThrough("flushable", flush_buffers=False)
  1034. close1 = PassThrough("close1", flush_buffers=False)
  1035. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  1036. flush_buffers=False)
  1037. class EncodingOutputFilter(Stream):
  1038. """Filtering output stream that writes to an encoded file."""
  1039. def __init__(self, base, encoding="utf8", errors="strict"):
  1040. """NOT_RPYTHON"""
  1041. self.base = base
  1042. self.do_write = base.write
  1043. self.encoding = encoding
  1044. self.errors = errors
  1045. def write(self, chars):
  1046. if isinstance(chars, str):
  1047. chars = unicode(chars) # Fail if it's not ASCII
  1048. self.do_write(chars.encode(self.encoding, self.errors))
  1049. tell = PassThrough("tell", flush_buffers=False)
  1050. seek = PassThrough("seek", flush_buffers=False)
  1051. read = PassThrough("read", flush_buffers=False)
  1052. readall = PassThrough("readall", flush_buffers=False)
  1053. readline = PassThrough("readline", flush_buffers=False)
  1054. truncate = PassThrough("truncate", flush_buffers=False)
  1055. flush = PassThrough("flush", flush_buffers=False)
  1056. flushable = PassThrough("flushable", flush_buffers=False)
  1057. close1 = PassThrough("close1", flush_buffers=False)
  1058. try_to_find_file_descriptor = PassThrough("try_to_find_file_descriptor",
  1059. flush_buffers=False)