/Lib/test/test_logging.py

http://unladen-swallow.googlecode.com/ · Python · 923 lines · 719 code · 98 blank · 106 comment · 36 complexity · 493042192f4e9c2a25948162d40f6e09 MD5 · raw file

  1. #!/usr/bin/env python
  2. #
  3. # Copyright 2001-2004 by Vinay Sajip. All Rights Reserved.
  4. #
  5. # Permission to use, copy, modify, and distribute this software and its
  6. # documentation for any purpose and without fee is hereby granted,
  7. # provided that the above copyright notice appear in all copies and that
  8. # both that copyright notice and this permission notice appear in
  9. # supporting documentation, and that the name of Vinay Sajip
  10. # not be used in advertising or publicity pertaining to distribution
  11. # of the software without specific, written prior permission.
  12. # VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
  13. # ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
  14. # VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
  15. # ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
  16. # IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
  17. # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  18. """Test harness for the logging module. Run all tests.
  19. Copyright (C) 2001-2002 Vinay Sajip. All Rights Reserved.
  20. """
  21. import logging
  22. import logging.handlers
  23. import logging.config
  24. import codecs
  25. import copy
  26. import cPickle
  27. import cStringIO
  28. import gc
  29. import os
  30. import re
  31. import select
  32. import socket
  33. from SocketServer import ThreadingTCPServer, StreamRequestHandler
  34. import string
  35. import struct
  36. import sys
  37. import tempfile
  38. from test.test_support import captured_stdout, run_with_locale, run_unittest
  39. import textwrap
  40. import threading
  41. import time
  42. import types
  43. import unittest
  44. import weakref
  45. class BaseTest(unittest.TestCase):
  46. """Base class for logging tests."""
  47. log_format = "%(name)s -> %(levelname)s: %(message)s"
  48. expected_log_pat = r"^([\w.]+) -> ([\w]+): ([\d]+)$"
  49. message_num = 0
  50. def setUp(self):
  51. """Setup the default logging stream to an internal StringIO instance,
  52. so that we can examine log output as we want."""
  53. logger_dict = logging.getLogger().manager.loggerDict
  54. logging._acquireLock()
  55. try:
  56. self.saved_handlers = logging._handlers.copy()
  57. self.saved_handler_list = logging._handlerList[:]
  58. self.saved_loggers = logger_dict.copy()
  59. self.saved_level_names = logging._levelNames.copy()
  60. finally:
  61. logging._releaseLock()
  62. self.root_logger = logging.getLogger("")
  63. self.original_logging_level = self.root_logger.getEffectiveLevel()
  64. self.stream = cStringIO.StringIO()
  65. self.root_logger.setLevel(logging.DEBUG)
  66. self.root_hdlr = logging.StreamHandler(self.stream)
  67. self.root_formatter = logging.Formatter(self.log_format)
  68. self.root_hdlr.setFormatter(self.root_formatter)
  69. self.root_logger.addHandler(self.root_hdlr)
  70. def tearDown(self):
  71. """Remove our logging stream, and restore the original logging
  72. level."""
  73. self.stream.close()
  74. self.root_logger.removeHandler(self.root_hdlr)
  75. self.root_logger.setLevel(self.original_logging_level)
  76. logging._acquireLock()
  77. try:
  78. logging._levelNames.clear()
  79. logging._levelNames.update(self.saved_level_names)
  80. logging._handlers.clear()
  81. logging._handlers.update(self.saved_handlers)
  82. logging._handlerList[:] = self.saved_handler_list
  83. loggerDict = logging.getLogger().manager.loggerDict
  84. loggerDict.clear()
  85. loggerDict.update(self.saved_loggers)
  86. finally:
  87. logging._releaseLock()
  88. def assert_log_lines(self, expected_values, stream=None):
  89. """Match the collected log lines against the regular expression
  90. self.expected_log_pat, and compare the extracted group values to
  91. the expected_values list of tuples."""
  92. stream = stream or self.stream
  93. pat = re.compile(self.expected_log_pat)
  94. try:
  95. stream.reset()
  96. actual_lines = stream.readlines()
  97. except AttributeError:
  98. # StringIO.StringIO lacks a reset() method.
  99. actual_lines = stream.getvalue().splitlines()
  100. self.assertEquals(len(actual_lines), len(expected_values))
  101. for actual, expected in zip(actual_lines, expected_values):
  102. match = pat.search(actual)
  103. if not match:
  104. self.fail("Log line does not match expected pattern:\n" +
  105. actual)
  106. self.assertEquals(tuple(match.groups()), expected)
  107. s = stream.read()
  108. if s:
  109. self.fail("Remaining output at end of log stream:\n" + s)
  110. def next_message(self):
  111. """Generate a message consisting solely of an auto-incrementing
  112. integer."""
  113. self.message_num += 1
  114. return "%d" % self.message_num
  115. class BuiltinLevelsTest(BaseTest):
  116. """Test builtin levels and their inheritance."""
  117. def test_flat(self):
  118. #Logging levels in a flat logger namespace.
  119. m = self.next_message
  120. ERR = logging.getLogger("ERR")
  121. ERR.setLevel(logging.ERROR)
  122. INF = logging.getLogger("INF")
  123. INF.setLevel(logging.INFO)
  124. DEB = logging.getLogger("DEB")
  125. DEB.setLevel(logging.DEBUG)
  126. # These should log.
  127. ERR.log(logging.CRITICAL, m())
  128. ERR.error(m())
  129. INF.log(logging.CRITICAL, m())
  130. INF.error(m())
  131. INF.warn(m())
  132. INF.info(m())
  133. DEB.log(logging.CRITICAL, m())
  134. DEB.error(m())
  135. DEB.warn (m())
  136. DEB.info (m())
  137. DEB.debug(m())
  138. # These should not log.
  139. ERR.warn(m())
  140. ERR.info(m())
  141. ERR.debug(m())
  142. INF.debug(m())
  143. self.assert_log_lines([
  144. ('ERR', 'CRITICAL', '1'),
  145. ('ERR', 'ERROR', '2'),
  146. ('INF', 'CRITICAL', '3'),
  147. ('INF', 'ERROR', '4'),
  148. ('INF', 'WARNING', '5'),
  149. ('INF', 'INFO', '6'),
  150. ('DEB', 'CRITICAL', '7'),
  151. ('DEB', 'ERROR', '8'),
  152. ('DEB', 'WARNING', '9'),
  153. ('DEB', 'INFO', '10'),
  154. ('DEB', 'DEBUG', '11'),
  155. ])
  156. def test_nested_explicit(self):
  157. # Logging levels in a nested namespace, all explicitly set.
  158. m = self.next_message
  159. INF = logging.getLogger("INF")
  160. INF.setLevel(logging.INFO)
  161. INF_ERR = logging.getLogger("INF.ERR")
  162. INF_ERR.setLevel(logging.ERROR)
  163. # These should log.
  164. INF_ERR.log(logging.CRITICAL, m())
  165. INF_ERR.error(m())
  166. # These should not log.
  167. INF_ERR.warn(m())
  168. INF_ERR.info(m())
  169. INF_ERR.debug(m())
  170. self.assert_log_lines([
  171. ('INF.ERR', 'CRITICAL', '1'),
  172. ('INF.ERR', 'ERROR', '2'),
  173. ])
  174. def test_nested_inherited(self):
  175. #Logging levels in a nested namespace, inherited from parent loggers.
  176. m = self.next_message
  177. INF = logging.getLogger("INF")
  178. INF.setLevel(logging.INFO)
  179. INF_ERR = logging.getLogger("INF.ERR")
  180. INF_ERR.setLevel(logging.ERROR)
  181. INF_UNDEF = logging.getLogger("INF.UNDEF")
  182. INF_ERR_UNDEF = logging.getLogger("INF.ERR.UNDEF")
  183. UNDEF = logging.getLogger("UNDEF")
  184. # These should log.
  185. INF_UNDEF.log(logging.CRITICAL, m())
  186. INF_UNDEF.error(m())
  187. INF_UNDEF.warn(m())
  188. INF_UNDEF.info(m())
  189. INF_ERR_UNDEF.log(logging.CRITICAL, m())
  190. INF_ERR_UNDEF.error(m())
  191. # These should not log.
  192. INF_UNDEF.debug(m())
  193. INF_ERR_UNDEF.warn(m())
  194. INF_ERR_UNDEF.info(m())
  195. INF_ERR_UNDEF.debug(m())
  196. self.assert_log_lines([
  197. ('INF.UNDEF', 'CRITICAL', '1'),
  198. ('INF.UNDEF', 'ERROR', '2'),
  199. ('INF.UNDEF', 'WARNING', '3'),
  200. ('INF.UNDEF', 'INFO', '4'),
  201. ('INF.ERR.UNDEF', 'CRITICAL', '5'),
  202. ('INF.ERR.UNDEF', 'ERROR', '6'),
  203. ])
  204. def test_nested_with_virtual_parent(self):
  205. # Logging levels when some parent does not exist yet.
  206. m = self.next_message
  207. INF = logging.getLogger("INF")
  208. GRANDCHILD = logging.getLogger("INF.BADPARENT.UNDEF")
  209. CHILD = logging.getLogger("INF.BADPARENT")
  210. INF.setLevel(logging.INFO)
  211. # These should log.
  212. GRANDCHILD.log(logging.FATAL, m())
  213. GRANDCHILD.info(m())
  214. CHILD.log(logging.FATAL, m())
  215. CHILD.info(m())
  216. # These should not log.
  217. GRANDCHILD.debug(m())
  218. CHILD.debug(m())
  219. self.assert_log_lines([
  220. ('INF.BADPARENT.UNDEF', 'CRITICAL', '1'),
  221. ('INF.BADPARENT.UNDEF', 'INFO', '2'),
  222. ('INF.BADPARENT', 'CRITICAL', '3'),
  223. ('INF.BADPARENT', 'INFO', '4'),
  224. ])
  225. class BasicFilterTest(BaseTest):
  226. """Test the bundled Filter class."""
  227. def test_filter(self):
  228. # Only messages satisfying the specified criteria pass through the
  229. # filter.
  230. filter_ = logging.Filter("spam.eggs")
  231. handler = self.root_logger.handlers[0]
  232. try:
  233. handler.addFilter(filter_)
  234. spam = logging.getLogger("spam")
  235. spam_eggs = logging.getLogger("spam.eggs")
  236. spam_eggs_fish = logging.getLogger("spam.eggs.fish")
  237. spam_bakedbeans = logging.getLogger("spam.bakedbeans")
  238. spam.info(self.next_message())
  239. spam_eggs.info(self.next_message()) # Good.
  240. spam_eggs_fish.info(self.next_message()) # Good.
  241. spam_bakedbeans.info(self.next_message())
  242. self.assert_log_lines([
  243. ('spam.eggs', 'INFO', '2'),
  244. ('spam.eggs.fish', 'INFO', '3'),
  245. ])
  246. finally:
  247. handler.removeFilter(filter_)
  248. #
  249. # First, we define our levels. There can be as many as you want - the only
  250. # limitations are that they should be integers, the lowest should be > 0 and
  251. # larger values mean less information being logged. If you need specific
  252. # level values which do not fit into these limitations, you can use a
  253. # mapping dictionary to convert between your application levels and the
  254. # logging system.
  255. #
  256. SILENT = 120
  257. TACITURN = 119
  258. TERSE = 118
  259. EFFUSIVE = 117
  260. SOCIABLE = 116
  261. VERBOSE = 115
  262. TALKATIVE = 114
  263. GARRULOUS = 113
  264. CHATTERBOX = 112
  265. BORING = 111
  266. LEVEL_RANGE = range(BORING, SILENT + 1)
  267. #
  268. # Next, we define names for our levels. You don't need to do this - in which
  269. # case the system will use "Level n" to denote the text for the level.
  270. #
  271. my_logging_levels = {
  272. SILENT : 'Silent',
  273. TACITURN : 'Taciturn',
  274. TERSE : 'Terse',
  275. EFFUSIVE : 'Effusive',
  276. SOCIABLE : 'Sociable',
  277. VERBOSE : 'Verbose',
  278. TALKATIVE : 'Talkative',
  279. GARRULOUS : 'Garrulous',
  280. CHATTERBOX : 'Chatterbox',
  281. BORING : 'Boring',
  282. }
  283. class GarrulousFilter(logging.Filter):
  284. """A filter which blocks garrulous messages."""
  285. def filter(self, record):
  286. return record.levelno != GARRULOUS
  287. class VerySpecificFilter(logging.Filter):
  288. """A filter which blocks sociable and taciturn messages."""
  289. def filter(self, record):
  290. return record.levelno not in [SOCIABLE, TACITURN]
  291. class CustomLevelsAndFiltersTest(BaseTest):
  292. """Test various filtering possibilities with custom logging levels."""
  293. # Skip the logger name group.
  294. expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
  295. def setUp(self):
  296. BaseTest.setUp(self)
  297. for k, v in my_logging_levels.items():
  298. logging.addLevelName(k, v)
  299. def log_at_all_levels(self, logger):
  300. for lvl in LEVEL_RANGE:
  301. logger.log(lvl, self.next_message())
  302. def test_logger_filter(self):
  303. # Filter at logger level.
  304. self.root_logger.setLevel(VERBOSE)
  305. # Levels >= 'Verbose' are good.
  306. self.log_at_all_levels(self.root_logger)
  307. self.assert_log_lines([
  308. ('Verbose', '5'),
  309. ('Sociable', '6'),
  310. ('Effusive', '7'),
  311. ('Terse', '8'),
  312. ('Taciturn', '9'),
  313. ('Silent', '10'),
  314. ])
  315. def test_handler_filter(self):
  316. # Filter at handler level.
  317. self.root_logger.handlers[0].setLevel(SOCIABLE)
  318. try:
  319. # Levels >= 'Sociable' are good.
  320. self.log_at_all_levels(self.root_logger)
  321. self.assert_log_lines([
  322. ('Sociable', '6'),
  323. ('Effusive', '7'),
  324. ('Terse', '8'),
  325. ('Taciturn', '9'),
  326. ('Silent', '10'),
  327. ])
  328. finally:
  329. self.root_logger.handlers[0].setLevel(logging.NOTSET)
  330. def test_specific_filters(self):
  331. # Set a specific filter object on the handler, and then add another
  332. # filter object on the logger itself.
  333. handler = self.root_logger.handlers[0]
  334. specific_filter = None
  335. garr = GarrulousFilter()
  336. handler.addFilter(garr)
  337. try:
  338. self.log_at_all_levels(self.root_logger)
  339. first_lines = [
  340. # Notice how 'Garrulous' is missing
  341. ('Boring', '1'),
  342. ('Chatterbox', '2'),
  343. ('Talkative', '4'),
  344. ('Verbose', '5'),
  345. ('Sociable', '6'),
  346. ('Effusive', '7'),
  347. ('Terse', '8'),
  348. ('Taciturn', '9'),
  349. ('Silent', '10'),
  350. ]
  351. self.assert_log_lines(first_lines)
  352. specific_filter = VerySpecificFilter()
  353. self.root_logger.addFilter(specific_filter)
  354. self.log_at_all_levels(self.root_logger)
  355. self.assert_log_lines(first_lines + [
  356. # Not only 'Garrulous' is still missing, but also 'Sociable'
  357. # and 'Taciturn'
  358. ('Boring', '11'),
  359. ('Chatterbox', '12'),
  360. ('Talkative', '14'),
  361. ('Verbose', '15'),
  362. ('Effusive', '17'),
  363. ('Terse', '18'),
  364. ('Silent', '20'),
  365. ])
  366. finally:
  367. if specific_filter:
  368. self.root_logger.removeFilter(specific_filter)
  369. handler.removeFilter(garr)
  370. class MemoryHandlerTest(BaseTest):
  371. """Tests for the MemoryHandler."""
  372. # Do not bother with a logger name group.
  373. expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
  374. def setUp(self):
  375. BaseTest.setUp(self)
  376. self.mem_hdlr = logging.handlers.MemoryHandler(10, logging.WARNING,
  377. self.root_hdlr)
  378. self.mem_logger = logging.getLogger('mem')
  379. self.mem_logger.propagate = 0
  380. self.mem_logger.addHandler(self.mem_hdlr)
  381. def tearDown(self):
  382. self.mem_hdlr.close()
  383. BaseTest.tearDown(self)
  384. def test_flush(self):
  385. # The memory handler flushes to its target handler based on specific
  386. # criteria (message count and message level).
  387. self.mem_logger.debug(self.next_message())
  388. self.assert_log_lines([])
  389. self.mem_logger.info(self.next_message())
  390. self.assert_log_lines([])
  391. # This will flush because the level is >= logging.WARNING
  392. self.mem_logger.warn(self.next_message())
  393. lines = [
  394. ('DEBUG', '1'),
  395. ('INFO', '2'),
  396. ('WARNING', '3'),
  397. ]
  398. self.assert_log_lines(lines)
  399. for n in (4, 14):
  400. for i in range(9):
  401. self.mem_logger.debug(self.next_message())
  402. self.assert_log_lines(lines)
  403. # This will flush because it's the 10th message since the last
  404. # flush.
  405. self.mem_logger.debug(self.next_message())
  406. lines = lines + [('DEBUG', str(i)) for i in range(n, n + 10)]
  407. self.assert_log_lines(lines)
  408. self.mem_logger.debug(self.next_message())
  409. self.assert_log_lines(lines)
  410. class ExceptionFormatter(logging.Formatter):
  411. """A special exception formatter."""
  412. def formatException(self, ei):
  413. return "Got a [%s]" % ei[0].__name__
  414. class ConfigFileTest(BaseTest):
  415. """Reading logging config from a .ini-style config file."""
  416. expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$"
  417. # config0 is a standard configuration.
  418. config0 = """
  419. [loggers]
  420. keys=root
  421. [handlers]
  422. keys=hand1
  423. [formatters]
  424. keys=form1
  425. [logger_root]
  426. level=WARNING
  427. handlers=hand1
  428. [handler_hand1]
  429. class=StreamHandler
  430. level=NOTSET
  431. formatter=form1
  432. args=(sys.stdout,)
  433. [formatter_form1]
  434. format=%(levelname)s ++ %(message)s
  435. datefmt=
  436. """
  437. # config1 adds a little to the standard configuration.
  438. config1 = """
  439. [loggers]
  440. keys=root,parser
  441. [handlers]
  442. keys=hand1
  443. [formatters]
  444. keys=form1
  445. [logger_root]
  446. level=WARNING
  447. handlers=
  448. [logger_parser]
  449. level=DEBUG
  450. handlers=hand1
  451. propagate=1
  452. qualname=compiler.parser
  453. [handler_hand1]
  454. class=StreamHandler
  455. level=NOTSET
  456. formatter=form1
  457. args=(sys.stdout,)
  458. [formatter_form1]
  459. format=%(levelname)s ++ %(message)s
  460. datefmt=
  461. """
  462. # config2 has a subtle configuration error that should be reported
  463. config2 = config1.replace("sys.stdout", "sys.stbout")
  464. # config3 has a less subtle configuration error
  465. config3 = config1.replace("formatter=form1", "formatter=misspelled_name")
  466. # config4 specifies a custom formatter class to be loaded
  467. config4 = """
  468. [loggers]
  469. keys=root
  470. [handlers]
  471. keys=hand1
  472. [formatters]
  473. keys=form1
  474. [logger_root]
  475. level=NOTSET
  476. handlers=hand1
  477. [handler_hand1]
  478. class=StreamHandler
  479. level=NOTSET
  480. formatter=form1
  481. args=(sys.stdout,)
  482. [formatter_form1]
  483. class=""" + __name__ + """.ExceptionFormatter
  484. format=%(levelname)s:%(name)s:%(message)s
  485. datefmt=
  486. """
  487. # config5 specifies a custom handler class to be loaded
  488. config5 = config1.replace('class=StreamHandler', 'class=logging.StreamHandler')
  489. # config6 uses ', ' delimiters in the handlers and formatters sections
  490. config6 = """
  491. [loggers]
  492. keys=root,parser
  493. [handlers]
  494. keys=hand1, hand2
  495. [formatters]
  496. keys=form1, form2
  497. [logger_root]
  498. level=WARNING
  499. handlers=
  500. [logger_parser]
  501. level=DEBUG
  502. handlers=hand1
  503. propagate=1
  504. qualname=compiler.parser
  505. [handler_hand1]
  506. class=StreamHandler
  507. level=NOTSET
  508. formatter=form1
  509. args=(sys.stdout,)
  510. [handler_hand2]
  511. class=StreamHandler
  512. level=NOTSET
  513. formatter=form1
  514. args=(sys.stderr,)
  515. [formatter_form1]
  516. format=%(levelname)s ++ %(message)s
  517. datefmt=
  518. [formatter_form2]
  519. format=%(message)s
  520. datefmt=
  521. """
  522. def apply_config(self, conf):
  523. try:
  524. fn = tempfile.mktemp(".ini")
  525. f = open(fn, "w")
  526. f.write(textwrap.dedent(conf))
  527. f.close()
  528. logging.config.fileConfig(fn)
  529. finally:
  530. os.remove(fn)
  531. def test_config0_ok(self):
  532. # A simple config file which overrides the default settings.
  533. with captured_stdout() as output:
  534. self.apply_config(self.config0)
  535. logger = logging.getLogger()
  536. # Won't output anything
  537. logger.info(self.next_message())
  538. # Outputs a message
  539. logger.error(self.next_message())
  540. self.assert_log_lines([
  541. ('ERROR', '2'),
  542. ], stream=output)
  543. # Original logger output is empty.
  544. self.assert_log_lines([])
  545. def test_config1_ok(self, config=config1):
  546. # A config file defining a sub-parser as well.
  547. with captured_stdout() as output:
  548. self.apply_config(config)
  549. logger = logging.getLogger("compiler.parser")
  550. # Both will output a message
  551. logger.info(self.next_message())
  552. logger.error(self.next_message())
  553. self.assert_log_lines([
  554. ('INFO', '1'),
  555. ('ERROR', '2'),
  556. ], stream=output)
  557. # Original logger output is empty.
  558. self.assert_log_lines([])
  559. def test_config2_failure(self):
  560. # A simple config file which overrides the default settings.
  561. self.assertRaises(StandardError, self.apply_config, self.config2)
  562. def test_config3_failure(self):
  563. # A simple config file which overrides the default settings.
  564. self.assertRaises(StandardError, self.apply_config, self.config3)
  565. def test_config4_ok(self):
  566. # A config file specifying a custom formatter class.
  567. with captured_stdout() as output:
  568. self.apply_config(self.config4)
  569. logger = logging.getLogger()
  570. try:
  571. raise RuntimeError()
  572. except RuntimeError:
  573. logging.exception("just testing")
  574. sys.stdout.seek(0)
  575. self.assertEquals(output.getvalue(),
  576. "ERROR:root:just testing\nGot a [RuntimeError]\n")
  577. # Original logger output is empty
  578. self.assert_log_lines([])
  579. def test_config5_ok(self):
  580. self.test_config1_ok(config=self.config5)
  581. def test_config6_ok(self):
  582. self.test_config1_ok(config=self.config6)
  583. class LogRecordStreamHandler(StreamRequestHandler):
  584. """Handler for a streaming logging request. It saves the log message in the
  585. TCP server's 'log_output' attribute."""
  586. TCP_LOG_END = "!!!END!!!"
  587. def handle(self):
  588. """Handle multiple requests - each expected to be of 4-byte length,
  589. followed by the LogRecord in pickle format. Logs the record
  590. according to whatever policy is configured locally."""
  591. while True:
  592. chunk = self.connection.recv(4)
  593. if len(chunk) < 4:
  594. break
  595. slen = struct.unpack(">L", chunk)[0]
  596. chunk = self.connection.recv(slen)
  597. while len(chunk) < slen:
  598. chunk = chunk + self.connection.recv(slen - len(chunk))
  599. obj = self.unpickle(chunk)
  600. record = logging.makeLogRecord(obj)
  601. self.handle_log_record(record)
  602. def unpickle(self, data):
  603. return cPickle.loads(data)
  604. def handle_log_record(self, record):
  605. # If the end-of-messages sentinel is seen, tell the server to
  606. # terminate.
  607. if self.TCP_LOG_END in record.msg:
  608. self.server.abort = 1
  609. return
  610. self.server.log_output += record.msg + "\n"
  611. class LogRecordSocketReceiver(ThreadingTCPServer):
  612. """A simple-minded TCP socket-based logging receiver suitable for test
  613. purposes."""
  614. allow_reuse_address = 1
  615. log_output = ""
  616. def __init__(self, host='localhost',
  617. port=logging.handlers.DEFAULT_TCP_LOGGING_PORT,
  618. handler=LogRecordStreamHandler):
  619. ThreadingTCPServer.__init__(self, (host, port), handler)
  620. self.abort = False
  621. self.timeout = 0.1
  622. self.finished = threading.Event()
  623. def serve_until_stopped(self):
  624. while not self.abort:
  625. rd, wr, ex = select.select([self.socket.fileno()], [], [],
  626. self.timeout)
  627. if rd:
  628. self.handle_request()
  629. # Notify the main thread that we're about to exit
  630. self.finished.set()
  631. # close the listen socket
  632. self.server_close()
  633. class SocketHandlerTest(BaseTest):
  634. """Test for SocketHandler objects."""
  635. def setUp(self):
  636. """Set up a TCP server to receive log messages, and a SocketHandler
  637. pointing to that server's address and port."""
  638. BaseTest.setUp(self)
  639. self.tcpserver = LogRecordSocketReceiver(port=0)
  640. self.port = self.tcpserver.socket.getsockname()[1]
  641. self.threads = [
  642. threading.Thread(target=self.tcpserver.serve_until_stopped)]
  643. for thread in self.threads:
  644. thread.start()
  645. self.sock_hdlr = logging.handlers.SocketHandler('localhost', self.port)
  646. self.sock_hdlr.setFormatter(self.root_formatter)
  647. self.root_logger.removeHandler(self.root_logger.handlers[0])
  648. self.root_logger.addHandler(self.sock_hdlr)
  649. def tearDown(self):
  650. """Shutdown the TCP server."""
  651. try:
  652. self.tcpserver.abort = True
  653. del self.tcpserver
  654. self.root_logger.removeHandler(self.sock_hdlr)
  655. self.sock_hdlr.close()
  656. for thread in self.threads:
  657. thread.join(2.0)
  658. finally:
  659. BaseTest.tearDown(self)
  660. def get_output(self):
  661. """Get the log output as received by the TCP server."""
  662. # Signal the TCP receiver and wait for it to terminate.
  663. self.root_logger.critical(LogRecordStreamHandler.TCP_LOG_END)
  664. self.tcpserver.finished.wait(2.0)
  665. return self.tcpserver.log_output
  666. def test_output(self):
  667. # The log message sent to the SocketHandler is properly received.
  668. logger = logging.getLogger("tcp")
  669. logger.error("spam")
  670. logger.debug("eggs")
  671. self.assertEquals(self.get_output(), "spam\neggs\n")
  672. class MemoryTest(BaseTest):
  673. """Test memory persistence of logger objects."""
  674. def setUp(self):
  675. """Create a dict to remember potentially destroyed objects."""
  676. BaseTest.setUp(self)
  677. self._survivors = {}
  678. def _watch_for_survival(self, *args):
  679. """Watch the given objects for survival, by creating weakrefs to
  680. them."""
  681. for obj in args:
  682. key = id(obj), repr(obj)
  683. self._survivors[key] = weakref.ref(obj)
  684. def _assert_survival(self):
  685. """Assert that all objects watched for survival have survived."""
  686. # Trigger cycle breaking.
  687. gc.collect()
  688. dead = []
  689. for (id_, repr_), ref in self._survivors.items():
  690. if ref() is None:
  691. dead.append(repr_)
  692. if dead:
  693. self.fail("%d objects should have survived "
  694. "but have been destroyed: %s" % (len(dead), ", ".join(dead)))
  695. def test_persistent_loggers(self):
  696. # Logger objects are persistent and retain their configuration, even
  697. # if visible references are destroyed.
  698. self.root_logger.setLevel(logging.INFO)
  699. foo = logging.getLogger("foo")
  700. self._watch_for_survival(foo)
  701. foo.setLevel(logging.DEBUG)
  702. self.root_logger.debug(self.next_message())
  703. foo.debug(self.next_message())
  704. self.assert_log_lines([
  705. ('foo', 'DEBUG', '2'),
  706. ])
  707. del foo
  708. # foo has survived.
  709. self._assert_survival()
  710. # foo has retained its settings.
  711. bar = logging.getLogger("foo")
  712. bar.debug(self.next_message())
  713. self.assert_log_lines([
  714. ('foo', 'DEBUG', '2'),
  715. ('foo', 'DEBUG', '3'),
  716. ])
  717. class EncodingTest(BaseTest):
  718. def test_encoding_plain_file(self):
  719. # In Python 2.x, a plain file object is treated as having no encoding.
  720. log = logging.getLogger("test")
  721. fn = tempfile.mktemp(".log")
  722. # the non-ascii data we write to the log.
  723. data = "foo\x80"
  724. try:
  725. handler = logging.FileHandler(fn)
  726. log.addHandler(handler)
  727. try:
  728. # write non-ascii data to the log.
  729. log.warning(data)
  730. finally:
  731. log.removeHandler(handler)
  732. handler.close()
  733. # check we wrote exactly those bytes, ignoring trailing \n etc
  734. f = open(fn)
  735. try:
  736. self.failUnlessEqual(f.read().rstrip(), data)
  737. finally:
  738. f.close()
  739. finally:
  740. if os.path.isfile(fn):
  741. os.remove(fn)
  742. def test_encoding_cyrillic_unicode(self):
  743. log = logging.getLogger("test")
  744. #Get a message in Unicode: Do svidanya in Cyrillic (meaning goodbye)
  745. message = u'\u0434\u043e \u0441\u0432\u0438\u0434\u0430\u043d\u0438\u044f'
  746. #Ensure it's written in a Cyrillic encoding
  747. writer_class = codecs.getwriter('cp1251')
  748. writer_class.encoding = 'cp1251'
  749. stream = cStringIO.StringIO()
  750. writer = writer_class(stream, 'strict')
  751. handler = logging.StreamHandler(writer)
  752. log.addHandler(handler)
  753. try:
  754. log.warning(message)
  755. finally:
  756. log.removeHandler(handler)
  757. handler.close()
  758. # check we wrote exactly those bytes, ignoring trailing \n etc
  759. s = stream.getvalue()
  760. #Compare against what the data should be when encoded in CP-1251
  761. self.assertEqual(s, '\xe4\xee \xf1\xe2\xe8\xe4\xe0\xed\xe8\xff\n')
  762. # Set the locale to the platform-dependent default. I have no idea
  763. # why the test does this, but in any case we save the current locale
  764. # first and restore it at the end.
  765. @run_with_locale('LC_ALL', '')
  766. def test_main():
  767. run_unittest(BuiltinLevelsTest, BasicFilterTest,
  768. CustomLevelsAndFiltersTest, MemoryHandlerTest,
  769. ConfigFileTest, SocketHandlerTest, MemoryTest,
  770. EncodingTest)
  771. if __name__ == "__main__":
  772. test_main()