PageRenderTime 52ms CodeModel.GetById 18ms RepoModel.GetById 0ms app.codeStats 0ms

/lib-python/2.7/test/test_support.py

https://bitbucket.org/quangquach/pypy
Python | 1289 lines | 1142 code | 56 blank | 91 comment | 77 complexity | eac35509a06c0c877b95bfeb5ef08b17 MD5 | raw file
  1. """Supporting definitions for the Python regression tests."""
  2. if __name__ != 'test.test_support':
  3. raise ImportError('test_support must be imported from the test package')
  4. import contextlib
  5. import errno
  6. import functools
  7. import gc
  8. import socket
  9. import sys
  10. import os
  11. import platform
  12. import shutil
  13. import warnings
  14. import unittest
  15. import importlib
  16. import UserDict
  17. import re
  18. import time
  19. try:
  20. import thread
  21. except ImportError:
  22. thread = None
  23. __all__ = ["Error", "TestFailed", "ResourceDenied", "import_module",
  24. "verbose", "use_resources", "max_memuse", "record_original_stdout",
  25. "get_original_stdout", "unload", "unlink", "rmtree", "forget",
  26. "is_resource_enabled", "requires", "find_unused_port", "bind_port",
  27. "fcmp", "have_unicode", "is_jython", "TESTFN", "HOST", "FUZZ",
  28. "SAVEDCWD", "temp_cwd", "findfile", "sortdict", "check_syntax_error",
  29. "open_urlresource", "check_warnings", "check_py3k_warnings",
  30. "CleanImport", "EnvironmentVarGuard", "captured_output",
  31. "captured_stdout", "TransientResource", "transient_internet",
  32. "run_with_locale", "set_memlimit", "bigmemtest", "bigaddrspacetest",
  33. "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup",
  34. "threading_cleanup", "reap_children", "cpython_only",
  35. "check_impl_detail", "get_attribute", "py3k_bytes",
  36. "import_fresh_module", "threading_cleanup", "reap_children",
  37. "strip_python_stderr"]
  38. class Error(Exception):
  39. """Base class for regression test exceptions."""
  40. class TestFailed(Error):
  41. """Test failed."""
  42. class ResourceDenied(unittest.SkipTest):
  43. """Test skipped because it requested a disallowed resource.
  44. This is raised when a test calls requires() for a resource that
  45. has not been enabled. It is used to distinguish between expected
  46. and unexpected skips.
  47. """
  48. @contextlib.contextmanager
  49. def _ignore_deprecated_imports(ignore=True):
  50. """Context manager to suppress package and module deprecation
  51. warnings when importing them.
  52. If ignore is False, this context manager has no effect."""
  53. if ignore:
  54. with warnings.catch_warnings():
  55. warnings.filterwarnings("ignore", ".+ (module|package)",
  56. DeprecationWarning)
  57. yield
  58. else:
  59. yield
  60. def import_module(name, deprecated=False):
  61. """Import and return the module to be tested, raising SkipTest if
  62. it is not available.
  63. If deprecated is True, any module or package deprecation messages
  64. will be suppressed."""
  65. with _ignore_deprecated_imports(deprecated):
  66. try:
  67. return importlib.import_module(name)
  68. except ImportError, msg:
  69. raise unittest.SkipTest(str(msg))
  70. def _save_and_remove_module(name, orig_modules):
  71. """Helper function to save and remove a module from sys.modules
  72. Raise ImportError if the module can't be imported."""
  73. # try to import the module and raise an error if it can't be imported
  74. if name not in sys.modules:
  75. __import__(name)
  76. del sys.modules[name]
  77. for modname in list(sys.modules):
  78. if modname == name or modname.startswith(name + '.'):
  79. orig_modules[modname] = sys.modules[modname]
  80. del sys.modules[modname]
  81. def _save_and_block_module(name, orig_modules):
  82. """Helper function to save and block a module in sys.modules
  83. Return True if the module was in sys.modules, False otherwise."""
  84. saved = True
  85. try:
  86. orig_modules[name] = sys.modules[name]
  87. except KeyError:
  88. saved = False
  89. sys.modules[name] = None
  90. return saved
  91. def import_fresh_module(name, fresh=(), blocked=(), deprecated=False):
  92. """Imports and returns a module, deliberately bypassing the sys.modules cache
  93. and importing a fresh copy of the module. Once the import is complete,
  94. the sys.modules cache is restored to its original state.
  95. Modules named in fresh are also imported anew if needed by the import.
  96. If one of these modules can't be imported, None is returned.
  97. Importing of modules named in blocked is prevented while the fresh import
  98. takes place.
  99. If deprecated is True, any module or package deprecation messages
  100. will be suppressed."""
  101. # NOTE: test_heapq, test_json, and test_warnings include extra sanity
  102. # checks to make sure that this utility function is working as expected
  103. with _ignore_deprecated_imports(deprecated):
  104. # Keep track of modules saved for later restoration as well
  105. # as those which just need a blocking entry removed
  106. orig_modules = {}
  107. names_to_remove = []
  108. _save_and_remove_module(name, orig_modules)
  109. try:
  110. for fresh_name in fresh:
  111. _save_and_remove_module(fresh_name, orig_modules)
  112. for blocked_name in blocked:
  113. if not _save_and_block_module(blocked_name, orig_modules):
  114. names_to_remove.append(blocked_name)
  115. fresh_module = importlib.import_module(name)
  116. except ImportError:
  117. fresh_module = None
  118. finally:
  119. for orig_name, module in orig_modules.items():
  120. sys.modules[orig_name] = module
  121. for name_to_remove in names_to_remove:
  122. del sys.modules[name_to_remove]
  123. return fresh_module
  124. def get_attribute(obj, name):
  125. """Get an attribute, raising SkipTest if AttributeError is raised."""
  126. try:
  127. attribute = getattr(obj, name)
  128. except AttributeError:
  129. raise unittest.SkipTest("module %s has no attribute %s" % (
  130. obj.__name__, name))
  131. else:
  132. return attribute
  133. verbose = 1 # Flag set to 0 by regrtest.py
  134. use_resources = None # Flag set to [] by regrtest.py
  135. max_memuse = 0 # Disable bigmem tests (they will still be run with
  136. # small sizes, to make sure they work.)
  137. real_max_memuse = 0
  138. # _original_stdout is meant to hold stdout at the time regrtest began.
  139. # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
  140. # The point is to have some flavor of stdout the user can actually see.
  141. _original_stdout = None
  142. def record_original_stdout(stdout):
  143. global _original_stdout
  144. _original_stdout = stdout
  145. def get_original_stdout():
  146. return _original_stdout or sys.stdout
  147. def unload(name):
  148. try:
  149. del sys.modules[name]
  150. except KeyError:
  151. pass
  152. def unlink(filename):
  153. try:
  154. os.unlink(filename)
  155. except OSError:
  156. pass
  157. def rmtree(path):
  158. try:
  159. shutil.rmtree(path)
  160. except OSError, e:
  161. # Unix returns ENOENT, Windows returns ESRCH.
  162. if e.errno not in (errno.ENOENT, errno.ESRCH):
  163. raise
  164. def forget(modname):
  165. '''"Forget" a module was ever imported by removing it from sys.modules and
  166. deleting any .pyc and .pyo files.'''
  167. unload(modname)
  168. for dirname in sys.path:
  169. unlink(os.path.join(dirname, modname + os.extsep + 'pyc'))
  170. # Deleting the .pyo file cannot be within the 'try' for the .pyc since
  171. # the chance exists that there is no .pyc (and thus the 'try' statement
  172. # is exited) but there is a .pyo file.
  173. unlink(os.path.join(dirname, modname + os.extsep + 'pyo'))
  174. def is_resource_enabled(resource):
  175. """Test whether a resource is enabled. Known resources are set by
  176. regrtest.py."""
  177. return use_resources is not None and resource in use_resources
  178. def requires(resource, msg=None):
  179. """Raise ResourceDenied if the specified resource is not available.
  180. If the caller's module is __main__ then automatically return True. The
  181. possibility of False being returned occurs when regrtest.py is executing."""
  182. # see if the caller's module is __main__ - if so, treat as if
  183. # the resource was set
  184. if sys._getframe(1).f_globals.get("__name__") == "__main__":
  185. return
  186. if not is_resource_enabled(resource):
  187. if msg is None:
  188. msg = "Use of the `%s' resource not enabled" % resource
  189. raise ResourceDenied(msg)
  190. HOST = 'localhost'
  191. def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
  192. """Returns an unused port that should be suitable for binding. This is
  193. achieved by creating a temporary socket with the same family and type as
  194. the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to
  195. the specified host address (defaults to 0.0.0.0) with the port set to 0,
  196. eliciting an unused ephemeral port from the OS. The temporary socket is
  197. then closed and deleted, and the ephemeral port is returned.
  198. Either this method or bind_port() should be used for any tests where a
  199. server socket needs to be bound to a particular port for the duration of
  200. the test. Which one to use depends on whether the calling code is creating
  201. a python socket, or if an unused port needs to be provided in a constructor
  202. or passed to an external program (i.e. the -accept argument to openssl's
  203. s_server mode). Always prefer bind_port() over find_unused_port() where
  204. possible. Hard coded ports should *NEVER* be used. As soon as a server
  205. socket is bound to a hard coded port, the ability to run multiple instances
  206. of the test simultaneously on the same host is compromised, which makes the
  207. test a ticking time bomb in a buildbot environment. On Unix buildbots, this
  208. may simply manifest as a failed test, which can be recovered from without
  209. intervention in most cases, but on Windows, the entire python process can
  210. completely and utterly wedge, requiring someone to log in to the buildbot
  211. and manually kill the affected process.
  212. (This is easy to reproduce on Windows, unfortunately, and can be traced to
  213. the SO_REUSEADDR socket option having different semantics on Windows versus
  214. Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
  215. listen and then accept connections on identical host/ports. An EADDRINUSE
  216. socket.error will be raised at some point (depending on the platform and
  217. the order bind and listen were called on each socket).
  218. However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
  219. will ever be raised when attempting to bind two identical host/ports. When
  220. accept() is called on each socket, the second caller's process will steal
  221. the port from the first caller, leaving them both in an awkwardly wedged
  222. state where they'll no longer respond to any signals or graceful kills, and
  223. must be forcibly killed via OpenProcess()/TerminateProcess().
  224. The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option
  225. instead of SO_REUSEADDR, which effectively affords the same semantics as
  226. SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open
  227. Source world compared to Windows ones, this is a common mistake. A quick
  228. look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when
  229. openssl.exe is called with the 's_server' option, for example. See
  230. http://bugs.python.org/issue2550 for more info. The following site also
  231. has a very thorough description about the implications of both REUSEADDR
  232. and EXCLUSIVEADDRUSE on Windows:
  233. http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx)
  234. XXX: although this approach is a vast improvement on previous attempts to
  235. elicit unused ports, it rests heavily on the assumption that the ephemeral
  236. port returned to us by the OS won't immediately be dished back out to some
  237. other process when we close and delete our temporary socket but before our
  238. calling code has a chance to bind the returned port. We can deal with this
  239. issue if/when we come across it."""
  240. tempsock = socket.socket(family, socktype)
  241. port = bind_port(tempsock)
  242. tempsock.close()
  243. del tempsock
  244. return port
  245. def bind_port(sock, host=HOST):
  246. """Bind the socket to a free port and return the port number. Relies on
  247. ephemeral ports in order to ensure we are using an unbound port. This is
  248. important as many tests may be running simultaneously, especially in a
  249. buildbot environment. This method raises an exception if the sock.family
  250. is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR
  251. or SO_REUSEPORT set on it. Tests should *never* set these socket options
  252. for TCP/IP sockets. The only case for setting these options is testing
  253. multicasting via multiple UDP sockets.
  254. Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e.
  255. on Windows), it will be set on the socket. This will prevent anyone else
  256. from bind()'ing to our host/port for the duration of the test.
  257. """
  258. if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM:
  259. if hasattr(socket, 'SO_REUSEADDR'):
  260. if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1:
  261. raise TestFailed("tests should never set the SO_REUSEADDR " \
  262. "socket option on TCP/IP sockets!")
  263. if hasattr(socket, 'SO_REUSEPORT'):
  264. if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1:
  265. raise TestFailed("tests should never set the SO_REUSEPORT " \
  266. "socket option on TCP/IP sockets!")
  267. if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'):
  268. sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
  269. sock.bind((host, 0))
  270. port = sock.getsockname()[1]
  271. return port
  272. FUZZ = 1e-6
  273. def fcmp(x, y): # fuzzy comparison function
  274. if isinstance(x, float) or isinstance(y, float):
  275. try:
  276. fuzz = (abs(x) + abs(y)) * FUZZ
  277. if abs(x-y) <= fuzz:
  278. return 0
  279. except:
  280. pass
  281. elif type(x) == type(y) and isinstance(x, (tuple, list)):
  282. for i in range(min(len(x), len(y))):
  283. outcome = fcmp(x[i], y[i])
  284. if outcome != 0:
  285. return outcome
  286. return (len(x) > len(y)) - (len(x) < len(y))
  287. return (x > y) - (x < y)
  288. try:
  289. unicode
  290. have_unicode = True
  291. except NameError:
  292. have_unicode = False
  293. is_jython = sys.platform.startswith('java')
  294. # Filename used for testing
  295. if os.name == 'java':
  296. # Jython disallows @ in module names
  297. TESTFN = '$test'
  298. elif os.name == 'riscos':
  299. TESTFN = 'testfile'
  300. else:
  301. TESTFN = '@test'
  302. # Unicode name only used if TEST_FN_ENCODING exists for the platform.
  303. if have_unicode:
  304. # Assuming sys.getfilesystemencoding()!=sys.getdefaultencoding()
  305. # TESTFN_UNICODE is a filename that can be encoded using the
  306. # file system encoding, but *not* with the default (ascii) encoding
  307. if isinstance('', unicode):
  308. # python -U
  309. # XXX perhaps unicode() should accept Unicode strings?
  310. TESTFN_UNICODE = "@test-\xe0\xf2"
  311. else:
  312. # 2 latin characters.
  313. TESTFN_UNICODE = unicode("@test-\xe0\xf2", "latin-1")
  314. TESTFN_ENCODING = sys.getfilesystemencoding()
  315. # TESTFN_UNENCODABLE is a filename that should *not* be
  316. # able to be encoded by *either* the default or filesystem encoding.
  317. # This test really only makes sense on Windows NT platforms
  318. # which have special Unicode support in posixmodule.
  319. if (not hasattr(sys, "getwindowsversion") or
  320. sys.getwindowsversion()[3] < 2): # 0=win32s or 1=9x/ME
  321. TESTFN_UNENCODABLE = None
  322. else:
  323. # Japanese characters (I think - from bug 846133)
  324. TESTFN_UNENCODABLE = eval('u"@test-\u5171\u6709\u3055\u308c\u308b"')
  325. try:
  326. # XXX - Note - should be using TESTFN_ENCODING here - but for
  327. # Windows, "mbcs" currently always operates as if in
  328. # errors=ignore' mode - hence we get '?' characters rather than
  329. # the exception. 'Latin1' operates as we expect - ie, fails.
  330. # See [ 850997 ] mbcs encoding ignores errors
  331. TESTFN_UNENCODABLE.encode("Latin1")
  332. except UnicodeEncodeError:
  333. pass
  334. else:
  335. print \
  336. 'WARNING: The filename %r CAN be encoded by the filesystem. ' \
  337. 'Unicode filename tests may not be effective' \
  338. % TESTFN_UNENCODABLE
  339. # Disambiguate TESTFN for parallel testing, while letting it remain a valid
  340. # module name.
  341. TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid())
  342. # Save the initial cwd
  343. SAVEDCWD = os.getcwd()
  344. @contextlib.contextmanager
  345. def temp_cwd(name='tempcwd', quiet=False):
  346. """
  347. Context manager that creates a temporary directory and set it as CWD.
  348. The new CWD is created in the current directory and it's named *name*.
  349. If *quiet* is False (default) and it's not possible to create or change
  350. the CWD, an error is raised. If it's True, only a warning is raised
  351. and the original CWD is used.
  352. """
  353. if isinstance(name, unicode):
  354. try:
  355. name = name.encode(sys.getfilesystemencoding() or 'ascii')
  356. except UnicodeEncodeError:
  357. if not quiet:
  358. raise unittest.SkipTest('unable to encode the cwd name with '
  359. 'the filesystem encoding.')
  360. saved_dir = os.getcwd()
  361. is_temporary = False
  362. try:
  363. os.mkdir(name)
  364. os.chdir(name)
  365. is_temporary = True
  366. except OSError:
  367. if not quiet:
  368. raise
  369. warnings.warn('tests may fail, unable to change the CWD to ' + name,
  370. RuntimeWarning, stacklevel=3)
  371. try:
  372. yield os.getcwd()
  373. finally:
  374. os.chdir(saved_dir)
  375. if is_temporary:
  376. rmtree(name)
  377. def findfile(file, here=None, subdir=None):
  378. """Try to find a file on sys.path and the working directory. If it is not
  379. found the argument passed to the function is returned (this does not
  380. necessarily signal failure; could still be the legitimate path)."""
  381. import test
  382. if os.path.isabs(file):
  383. return file
  384. if subdir is not None:
  385. file = os.path.join(subdir, file)
  386. path = sys.path
  387. if here is None:
  388. path = test.__path__ + path
  389. else:
  390. path = [os.path.dirname(here)] + path
  391. for dn in path:
  392. fn = os.path.join(dn, file)
  393. if os.path.exists(fn): return fn
  394. return file
  395. def sortdict(dict):
  396. "Like repr(dict), but in sorted order."
  397. items = dict.items()
  398. items.sort()
  399. reprpairs = ["%r: %r" % pair for pair in items]
  400. withcommas = ", ".join(reprpairs)
  401. return "{%s}" % withcommas
  402. def make_bad_fd():
  403. """
  404. Create an invalid file descriptor by opening and closing a file and return
  405. its fd.
  406. """
  407. file = open(TESTFN, "wb")
  408. try:
  409. return file.fileno()
  410. finally:
  411. file.close()
  412. unlink(TESTFN)
  413. def check_syntax_error(testcase, statement):
  414. testcase.assertRaises(SyntaxError, compile, statement,
  415. '<test string>', 'exec')
  416. def open_urlresource(url, check=None):
  417. import urlparse, urllib2
  418. filename = urlparse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
  419. fn = os.path.join(os.path.dirname(__file__), "data", filename)
  420. def check_valid_file(fn):
  421. f = open(fn)
  422. if check is None:
  423. return f
  424. elif check(f):
  425. f.seek(0)
  426. return f
  427. f.close()
  428. if os.path.exists(fn):
  429. f = check_valid_file(fn)
  430. if f is not None:
  431. return f
  432. unlink(fn)
  433. # Verify the requirement before downloading the file
  434. requires('urlfetch')
  435. print >> get_original_stdout(), '\tfetching %s ...' % url
  436. f = urllib2.urlopen(url, timeout=15)
  437. try:
  438. with open(fn, "wb") as out:
  439. s = f.read()
  440. while s:
  441. out.write(s)
  442. s = f.read()
  443. finally:
  444. f.close()
  445. f = check_valid_file(fn)
  446. if f is not None:
  447. return f
  448. raise TestFailed('invalid resource "%s"' % fn)
  449. class WarningsRecorder(object):
  450. """Convenience wrapper for the warnings list returned on
  451. entry to the warnings.catch_warnings() context manager.
  452. """
  453. def __init__(self, warnings_list):
  454. self._warnings = warnings_list
  455. self._last = 0
  456. def __getattr__(self, attr):
  457. if len(self._warnings) > self._last:
  458. return getattr(self._warnings[-1], attr)
  459. elif attr in warnings.WarningMessage._WARNING_DETAILS:
  460. return None
  461. raise AttributeError("%r has no attribute %r" % (self, attr))
  462. @property
  463. def warnings(self):
  464. return self._warnings[self._last:]
  465. def reset(self):
  466. self._last = len(self._warnings)
  467. def _filterwarnings(filters, quiet=False):
  468. """Catch the warnings, then check if all the expected
  469. warnings have been raised and re-raise unexpected warnings.
  470. If 'quiet' is True, only re-raise the unexpected warnings.
  471. """
  472. # Clear the warning registry of the calling module
  473. # in order to re-raise the warnings.
  474. frame = sys._getframe(2)
  475. registry = frame.f_globals.get('__warningregistry__')
  476. if registry:
  477. registry.clear()
  478. with warnings.catch_warnings(record=True) as w:
  479. # Set filter "always" to record all warnings. Because
  480. # test_warnings swap the module, we need to look up in
  481. # the sys.modules dictionary.
  482. sys.modules['warnings'].simplefilter("always")
  483. yield WarningsRecorder(w)
  484. # Filter the recorded warnings
  485. reraise = [warning.message for warning in w]
  486. missing = []
  487. for msg, cat in filters:
  488. seen = False
  489. for exc in reraise[:]:
  490. message = str(exc)
  491. # Filter out the matching messages
  492. if (re.match(msg, message, re.I) and
  493. issubclass(exc.__class__, cat)):
  494. seen = True
  495. reraise.remove(exc)
  496. if not seen and not quiet:
  497. # This filter caught nothing
  498. missing.append((msg, cat.__name__))
  499. if reraise:
  500. raise AssertionError("unhandled warning %r" % reraise[0])
  501. if missing:
  502. raise AssertionError("filter (%r, %s) did not catch any warning" %
  503. missing[0])
  504. @contextlib.contextmanager
  505. def check_warnings(*filters, **kwargs):
  506. """Context manager to silence warnings.
  507. Accept 2-tuples as positional arguments:
  508. ("message regexp", WarningCategory)
  509. Optional argument:
  510. - if 'quiet' is True, it does not fail if a filter catches nothing
  511. (default True without argument,
  512. default False if some filters are defined)
  513. Without argument, it defaults to:
  514. check_warnings(("", Warning), quiet=True)
  515. """
  516. quiet = kwargs.get('quiet')
  517. if not filters:
  518. filters = (("", Warning),)
  519. # Preserve backward compatibility
  520. if quiet is None:
  521. quiet = True
  522. return _filterwarnings(filters, quiet)
  523. @contextlib.contextmanager
  524. def check_py3k_warnings(*filters, **kwargs):
  525. """Context manager to silence py3k warnings.
  526. Accept 2-tuples as positional arguments:
  527. ("message regexp", WarningCategory)
  528. Optional argument:
  529. - if 'quiet' is True, it does not fail if a filter catches nothing
  530. (default False)
  531. Without argument, it defaults to:
  532. check_py3k_warnings(("", DeprecationWarning), quiet=False)
  533. """
  534. if sys.py3kwarning:
  535. if not filters:
  536. filters = (("", DeprecationWarning),)
  537. else:
  538. # It should not raise any py3k warning
  539. filters = ()
  540. return _filterwarnings(filters, kwargs.get('quiet'))
  541. class CleanImport(object):
  542. """Context manager to force import to return a new module reference.
  543. This is useful for testing module-level behaviours, such as
  544. the emission of a DeprecationWarning on import.
  545. Use like this:
  546. with CleanImport("foo"):
  547. importlib.import_module("foo") # new reference
  548. """
  549. def __init__(self, *module_names):
  550. self.original_modules = sys.modules.copy()
  551. for module_name in module_names:
  552. if module_name in sys.modules:
  553. module = sys.modules[module_name]
  554. # It is possible that module_name is just an alias for
  555. # another module (e.g. stub for modules renamed in 3.x).
  556. # In that case, we also need delete the real module to clear
  557. # the import cache.
  558. if module.__name__ != module_name:
  559. del sys.modules[module.__name__]
  560. del sys.modules[module_name]
  561. def __enter__(self):
  562. return self
  563. def __exit__(self, *ignore_exc):
  564. sys.modules.update(self.original_modules)
  565. class EnvironmentVarGuard(UserDict.DictMixin):
  566. """Class to help protect the environment variable properly. Can be used as
  567. a context manager."""
  568. def __init__(self):
  569. self._environ = os.environ
  570. self._changed = {}
  571. def __getitem__(self, envvar):
  572. return self._environ[envvar]
  573. def __setitem__(self, envvar, value):
  574. # Remember the initial value on the first access
  575. if envvar not in self._changed:
  576. self._changed[envvar] = self._environ.get(envvar)
  577. self._environ[envvar] = value
  578. def __delitem__(self, envvar):
  579. # Remember the initial value on the first access
  580. if envvar not in self._changed:
  581. self._changed[envvar] = self._environ.get(envvar)
  582. if envvar in self._environ:
  583. del self._environ[envvar]
  584. def keys(self):
  585. return self._environ.keys()
  586. def set(self, envvar, value):
  587. self[envvar] = value
  588. def unset(self, envvar):
  589. del self[envvar]
  590. def __enter__(self):
  591. return self
  592. def __exit__(self, *ignore_exc):
  593. for (k, v) in self._changed.items():
  594. if v is None:
  595. if k in self._environ:
  596. del self._environ[k]
  597. else:
  598. self._environ[k] = v
  599. os.environ = self._environ
  600. class DirsOnSysPath(object):
  601. """Context manager to temporarily add directories to sys.path.
  602. This makes a copy of sys.path, appends any directories given
  603. as positional arguments, then reverts sys.path to the copied
  604. settings when the context ends.
  605. Note that *all* sys.path modifications in the body of the
  606. context manager, including replacement of the object,
  607. will be reverted at the end of the block.
  608. """
  609. def __init__(self, *paths):
  610. self.original_value = sys.path[:]
  611. self.original_object = sys.path
  612. sys.path.extend(paths)
  613. def __enter__(self):
  614. return self
  615. def __exit__(self, *ignore_exc):
  616. sys.path = self.original_object
  617. sys.path[:] = self.original_value
  618. class TransientResource(object):
  619. """Raise ResourceDenied if an exception is raised while the context manager
  620. is in effect that matches the specified exception and attributes."""
  621. def __init__(self, exc, **kwargs):
  622. self.exc = exc
  623. self.attrs = kwargs
  624. def __enter__(self):
  625. return self
  626. def __exit__(self, type_=None, value=None, traceback=None):
  627. """If type_ is a subclass of self.exc and value has attributes matching
  628. self.attrs, raise ResourceDenied. Otherwise let the exception
  629. propagate (if any)."""
  630. if type_ is not None and issubclass(self.exc, type_):
  631. for attr, attr_value in self.attrs.iteritems():
  632. if not hasattr(value, attr):
  633. break
  634. if getattr(value, attr) != attr_value:
  635. break
  636. else:
  637. raise ResourceDenied("an optional resource is not available")
  638. @contextlib.contextmanager
  639. def transient_internet(resource_name, timeout=30.0, errnos=()):
  640. """Return a context manager that raises ResourceDenied when various issues
  641. with the Internet connection manifest themselves as exceptions."""
  642. default_errnos = [
  643. ('ECONNREFUSED', 111),
  644. ('ECONNRESET', 104),
  645. ('EHOSTUNREACH', 113),
  646. ('ENETUNREACH', 101),
  647. ('ETIMEDOUT', 110),
  648. ]
  649. default_gai_errnos = [
  650. ('EAI_AGAIN', -3),
  651. ('EAI_FAIL', -4),
  652. ('EAI_NONAME', -2),
  653. ('EAI_NODATA', -5),
  654. ]
  655. denied = ResourceDenied("Resource '%s' is not available" % resource_name)
  656. captured_errnos = errnos
  657. gai_errnos = []
  658. if not captured_errnos:
  659. captured_errnos = [getattr(errno, name, num)
  660. for (name, num) in default_errnos]
  661. gai_errnos = [getattr(socket, name, num)
  662. for (name, num) in default_gai_errnos]
  663. def filter_error(err):
  664. n = getattr(err, 'errno', None)
  665. if (isinstance(err, socket.timeout) or
  666. (isinstance(err, socket.gaierror) and n in gai_errnos) or
  667. n in captured_errnos):
  668. if not verbose:
  669. sys.stderr.write(denied.args[0] + "\n")
  670. raise denied
  671. old_timeout = socket.getdefaulttimeout()
  672. try:
  673. if timeout is not None:
  674. socket.setdefaulttimeout(timeout)
  675. yield
  676. except IOError as err:
  677. # urllib can wrap original socket errors multiple times (!), we must
  678. # unwrap to get at the original error.
  679. while True:
  680. a = err.args
  681. if len(a) >= 1 and isinstance(a[0], IOError):
  682. err = a[0]
  683. # The error can also be wrapped as args[1]:
  684. # except socket.error as msg:
  685. # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
  686. elif len(a) >= 2 and isinstance(a[1], IOError):
  687. err = a[1]
  688. else:
  689. break
  690. filter_error(err)
  691. raise
  692. # XXX should we catch generic exceptions and look for their
  693. # __cause__ or __context__?
  694. finally:
  695. socket.setdefaulttimeout(old_timeout)
  696. @contextlib.contextmanager
  697. def captured_output(stream_name):
  698. """Return a context manager used by captured_stdout and captured_stdin
  699. that temporarily replaces the sys stream *stream_name* with a StringIO."""
  700. import StringIO
  701. orig_stdout = getattr(sys, stream_name)
  702. setattr(sys, stream_name, StringIO.StringIO())
  703. try:
  704. yield getattr(sys, stream_name)
  705. finally:
  706. setattr(sys, stream_name, orig_stdout)
  707. def captured_stdout():
  708. """Capture the output of sys.stdout:
  709. with captured_stdout() as s:
  710. print "hello"
  711. self.assertEqual(s.getvalue(), "hello")
  712. """
  713. return captured_output("stdout")
  714. def captured_stderr():
  715. return captured_output("stderr")
  716. def captured_stdin():
  717. return captured_output("stdin")
  718. def gc_collect():
  719. """Force as many objects as possible to be collected.
  720. In non-CPython implementations of Python, this is needed because timely
  721. deallocation is not guaranteed by the garbage collector. (Even in CPython
  722. this can be the case in case of reference cycles.) This means that __del__
  723. methods may be called later than expected and weakrefs may remain alive for
  724. longer than expected. This function tries its best to force all garbage
  725. objects to disappear.
  726. """
  727. gc.collect()
  728. if is_jython:
  729. time.sleep(0.1)
  730. gc.collect()
  731. gc.collect()
  732. #=======================================================================
  733. # Decorator for running a function in a different locale, correctly resetting
  734. # it afterwards.
  735. def run_with_locale(catstr, *locales):
  736. def decorator(func):
  737. def inner(*args, **kwds):
  738. try:
  739. import locale
  740. category = getattr(locale, catstr)
  741. orig_locale = locale.setlocale(category)
  742. except AttributeError:
  743. # if the test author gives us an invalid category string
  744. raise
  745. except:
  746. # cannot retrieve original locale, so do nothing
  747. locale = orig_locale = None
  748. else:
  749. for loc in locales:
  750. try:
  751. locale.setlocale(category, loc)
  752. break
  753. except:
  754. pass
  755. # now run the function, resetting the locale on exceptions
  756. try:
  757. return func(*args, **kwds)
  758. finally:
  759. if locale and orig_locale:
  760. locale.setlocale(category, orig_locale)
  761. inner.func_name = func.func_name
  762. inner.__doc__ = func.__doc__
  763. return inner
  764. return decorator
  765. #=======================================================================
  766. # Big-memory-test support. Separate from 'resources' because memory use should be configurable.
  767. # Some handy shorthands. Note that these are used for byte-limits as well
  768. # as size-limits, in the various bigmem tests
  769. _1M = 1024*1024
  770. _1G = 1024 * _1M
  771. _2G = 2 * _1G
  772. _4G = 4 * _1G
  773. MAX_Py_ssize_t = sys.maxsize
  774. def set_memlimit(limit):
  775. global max_memuse
  776. global real_max_memuse
  777. sizes = {
  778. 'k': 1024,
  779. 'm': _1M,
  780. 'g': _1G,
  781. 't': 1024*_1G,
  782. }
  783. m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
  784. re.IGNORECASE | re.VERBOSE)
  785. if m is None:
  786. raise ValueError('Invalid memory limit %r' % (limit,))
  787. memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
  788. real_max_memuse = memlimit
  789. if memlimit > MAX_Py_ssize_t:
  790. memlimit = MAX_Py_ssize_t
  791. if memlimit < _2G - 1:
  792. raise ValueError('Memory limit %r too low to be useful' % (limit,))
  793. max_memuse = memlimit
  794. def bigmemtest(minsize, memuse, overhead=5*_1M):
  795. """Decorator for bigmem tests.
  796. 'minsize' is the minimum useful size for the test (in arbitrary,
  797. test-interpreted units.) 'memuse' is the number of 'bytes per size' for
  798. the test, or a good estimate of it. 'overhead' specifies fixed overhead,
  799. independent of the testsize, and defaults to 5Mb.
  800. The decorator tries to guess a good value for 'size' and passes it to
  801. the decorated test function. If minsize * memuse is more than the
  802. allowed memory use (as defined by max_memuse), the test is skipped.
  803. Otherwise, minsize is adjusted upward to use up to max_memuse.
  804. """
  805. def decorator(f):
  806. def wrapper(self):
  807. if not max_memuse:
  808. # If max_memuse is 0 (the default),
  809. # we still want to run the tests with size set to a few kb,
  810. # to make sure they work. We still want to avoid using
  811. # too much memory, though, but we do that noisily.
  812. maxsize = 5147
  813. self.assertFalse(maxsize * memuse + overhead > 20 * _1M)
  814. else:
  815. maxsize = int((max_memuse - overhead) / memuse)
  816. if maxsize < minsize:
  817. # Really ought to print 'test skipped' or something
  818. if verbose:
  819. sys.stderr.write("Skipping %s because of memory "
  820. "constraint\n" % (f.__name__,))
  821. return
  822. # Try to keep some breathing room in memory use
  823. maxsize = max(maxsize - 50 * _1M, minsize)
  824. return f(self, maxsize)
  825. wrapper.minsize = minsize
  826. wrapper.memuse = memuse
  827. wrapper.overhead = overhead
  828. return wrapper
  829. return decorator
  830. def precisionbigmemtest(size, memuse, overhead=5*_1M):
  831. def decorator(f):
  832. def wrapper(self):
  833. if not real_max_memuse:
  834. maxsize = 5147
  835. else:
  836. maxsize = size
  837. if real_max_memuse and real_max_memuse < maxsize * memuse:
  838. if verbose:
  839. sys.stderr.write("Skipping %s because of memory "
  840. "constraint\n" % (f.__name__,))
  841. return
  842. return f(self, maxsize)
  843. wrapper.size = size
  844. wrapper.memuse = memuse
  845. wrapper.overhead = overhead
  846. return wrapper
  847. return decorator
  848. def bigaddrspacetest(f):
  849. """Decorator for tests that fill the address space."""
  850. def wrapper(self):
  851. if max_memuse < MAX_Py_ssize_t:
  852. if verbose:
  853. sys.stderr.write("Skipping %s because of memory "
  854. "constraint\n" % (f.__name__,))
  855. else:
  856. return f(self)
  857. return wrapper
  858. #=======================================================================
  859. # unittest integration.
  860. class BasicTestRunner:
  861. def run(self, test):
  862. result = unittest.TestResult()
  863. test(result)
  864. return result
  865. def _id(obj):
  866. return obj
  867. def requires_resource(resource):
  868. if is_resource_enabled(resource):
  869. return _id
  870. else:
  871. return unittest.skip("resource {0!r} is not enabled".format(resource))
  872. def cpython_only(test):
  873. """
  874. Decorator for tests only applicable on CPython.
  875. """
  876. return impl_detail(cpython=True)(test)
  877. def impl_detail(msg=None, **guards):
  878. if check_impl_detail(**guards):
  879. return _id
  880. if msg is None:
  881. guardnames, default = _parse_guards(guards)
  882. if default:
  883. msg = "implementation detail not available on {0}"
  884. else:
  885. msg = "implementation detail specific to {0}"
  886. guardnames = sorted(guardnames.keys())
  887. msg = msg.format(' or '.join(guardnames))
  888. return unittest.skip(msg)
  889. def _parse_guards(guards):
  890. # Returns a tuple ({platform_name: run_me}, default_value)
  891. if not guards:
  892. return ({'cpython': True}, False)
  893. is_true = guards.values()[0]
  894. assert guards.values() == [is_true] * len(guards) # all True or all False
  895. return (guards, not is_true)
  896. # Use the following check to guard CPython's implementation-specific tests --
  897. # or to run them only on the implementation(s) guarded by the arguments.
  898. def check_impl_detail(**guards):
  899. """This function returns True or False depending on the host platform.
  900. Examples:
  901. if check_impl_detail(): # only on CPython (default)
  902. if check_impl_detail(jython=True): # only on Jython
  903. if check_impl_detail(cpython=False): # everywhere except on CPython
  904. """
  905. guards, default = _parse_guards(guards)
  906. return guards.get(platform.python_implementation().lower(), default)
  907. # ----------------------------------
  908. # PyPy extension: you can run::
  909. # python ..../test_foo.py --pdb
  910. # to get a pdb prompt in case of exceptions
  911. ResultClass = unittest.TextTestRunner.resultclass
  912. class TestResultWithPdb(ResultClass):
  913. def addError(self, testcase, exc_info):
  914. ResultClass.addError(self, testcase, exc_info)
  915. if '--pdb' in sys.argv:
  916. import pdb, traceback
  917. traceback.print_tb(exc_info[2])
  918. pdb.post_mortem(exc_info[2])
  919. # ----------------------------------
  920. def _run_suite(suite):
  921. """Run tests from a unittest.TestSuite-derived class."""
  922. if verbose:
  923. runner = unittest.TextTestRunner(sys.stdout, verbosity=2,
  924. resultclass=TestResultWithPdb)
  925. else:
  926. runner = BasicTestRunner()
  927. result = runner.run(suite)
  928. if not result.wasSuccessful():
  929. if len(result.errors) == 1 and not result.failures:
  930. err = result.errors[0][1]
  931. elif len(result.failures) == 1 and not result.errors:
  932. err = result.failures[0][1]
  933. else:
  934. err = "multiple errors occurred"
  935. if not verbose:
  936. err += "; run in verbose mode for details"
  937. raise TestFailed(err)
  938. # ----------------------------------
  939. # PyPy extension: you can run::
  940. # python ..../test_foo.py --filter bar
  941. # to run only the test cases whose name contains bar
  942. def filter_maybe(suite):
  943. try:
  944. i = sys.argv.index('--filter')
  945. filter = sys.argv[i+1]
  946. except (ValueError, IndexError):
  947. return suite
  948. tests = []
  949. for test in linearize_suite(suite):
  950. if filter in test._testMethodName:
  951. tests.append(test)
  952. return unittest.TestSuite(tests)
  953. def linearize_suite(suite_or_test):
  954. try:
  955. it = iter(suite_or_test)
  956. except TypeError:
  957. yield suite_or_test
  958. return
  959. for subsuite in it:
  960. for item in linearize_suite(subsuite):
  961. yield item
  962. # ----------------------------------
  963. def run_unittest(*classes):
  964. """Run tests from unittest.TestCase-derived classes."""
  965. valid_types = (unittest.TestSuite, unittest.TestCase)
  966. suite = unittest.TestSuite()
  967. for cls in classes:
  968. if isinstance(cls, str):
  969. if cls in sys.modules:
  970. suite.addTest(unittest.findTestCases(sys.modules[cls]))
  971. else:
  972. raise ValueError("str arguments must be keys in sys.modules")
  973. elif isinstance(cls, valid_types):
  974. suite.addTest(cls)
  975. else:
  976. suite.addTest(unittest.makeSuite(cls))
  977. suite = filter_maybe(suite)
  978. _run_suite(suite)
  979. #=======================================================================
  980. # doctest driver.
  981. def run_doctest(module, verbosity=None):
  982. """Run doctest on the given module. Return (#failures, #tests).
  983. If optional argument verbosity is not specified (or is None), pass
  984. test_support's belief about verbosity on to doctest. Else doctest's
  985. usual behavior is used (it searches sys.argv for -v).
  986. """
  987. import doctest
  988. if verbosity is None:
  989. verbosity = verbose
  990. else:
  991. verbosity = None
  992. # Direct doctest output (normally just errors) to real stdout; doctest
  993. # output shouldn't be compared by regrtest.
  994. save_stdout = sys.stdout
  995. sys.stdout = get_original_stdout()
  996. try:
  997. f, t = doctest.testmod(module, verbose=verbosity)
  998. if f:
  999. raise TestFailed("%d of %d doctests failed" % (f, t))
  1000. finally:
  1001. sys.stdout = save_stdout
  1002. if verbose:
  1003. print 'doctest (%s) ... %d tests with zero failures' % (module.__name__, t)
  1004. return f, t
  1005. #=======================================================================
  1006. # Threading support to prevent reporting refleaks when running regrtest.py -R
  1007. # NOTE: we use thread._count() rather than threading.enumerate() (or the
  1008. # moral equivalent thereof) because a threading.Thread object is still alive
  1009. # until its __bootstrap() method has returned, even after it has been
  1010. # unregistered from the threading module.
  1011. # thread._count(), on the other hand, only gets decremented *after* the
  1012. # __bootstrap() method has returned, which gives us reliable reference counts
  1013. # at the end of a test run.
  1014. def threading_setup():
  1015. if thread:
  1016. return thread._count(),
  1017. else:
  1018. return 1,
  1019. def threading_cleanup(nb_threads):
  1020. if not thread:
  1021. return
  1022. _MAX_COUNT = 10
  1023. for count in range(_MAX_COUNT):
  1024. n = thread._count()
  1025. if n == nb_threads:
  1026. break
  1027. time.sleep(0.1)
  1028. # XXX print a warning in case of failure?
  1029. def reap_threads(func):
  1030. """Use this function when threads are being used. This will
  1031. ensure that the threads are cleaned up even when the test fails.
  1032. If threading is unavailable this function does nothing.
  1033. """
  1034. if not thread:
  1035. return func
  1036. @functools.wraps(func)
  1037. def decorator(*args):
  1038. key = threading_setup()
  1039. try:
  1040. return func(*args)
  1041. finally:
  1042. threading_cleanup(*key)
  1043. return decorator
  1044. def reap_children():
  1045. """Use this function at the end of test_main() whenever sub-processes
  1046. are started. This will help ensure that no extra children (zombies)
  1047. stick around to hog resources and create problems when looking
  1048. for refleaks.
  1049. """
  1050. # Reap all our dead child processes so we don't leave zombies around.
  1051. # These hog resources and might be causing some of the buildbots to die.
  1052. if hasattr(os, 'waitpid'):
  1053. any_process = -1
  1054. while True:
  1055. try:
  1056. # This will raise an exception on Windows. That's ok.
  1057. pid, status = os.waitpid(any_process, os.WNOHANG)
  1058. if pid == 0:
  1059. break
  1060. except:
  1061. break
  1062. def py3k_bytes(b):
  1063. """Emulate the py3k bytes() constructor.
  1064. NOTE: This is only a best effort function.
  1065. """
  1066. try:
  1067. # memoryview?
  1068. return b.tobytes()
  1069. except AttributeError:
  1070. try:
  1071. # iterable of ints?
  1072. return b"".join(chr(x) for x in b)
  1073. except TypeError:
  1074. return bytes(b)
  1075. def args_from_interpreter_flags():
  1076. """Return a list of command-line arguments reproducing the current
  1077. settings in sys.flags."""
  1078. flag_opt_map = {
  1079. 'bytes_warning': 'b',
  1080. 'dont_write_bytecode': 'B',
  1081. 'ignore_environment': 'E',
  1082. 'no_user_site': 's',
  1083. 'no_site': 'S',
  1084. 'optimize': 'O',
  1085. 'py3k_warning': '3',
  1086. 'verbose': 'v',
  1087. }
  1088. args = []
  1089. for flag, opt in flag_opt_map.items():
  1090. v = getattr(sys.flags, flag)
  1091. if v > 0:
  1092. args.append('-' + opt * v)
  1093. return args
  1094. def strip_python_stderr(stderr):
  1095. """Strip the stderr of a Python process from potential debug output
  1096. emitted by the interpreter.
  1097. This will typically be run on the result of the communicate() method
  1098. of a subprocess.Popen object.
  1099. """
  1100. stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip()
  1101. return stderr