PageRenderTime 44ms CodeModel.GetById 8ms RepoModel.GetById 0ms app.codeStats 0ms

/tests/test_commands.py

https://gitlab.com/e0/scrapy
Python | 293 lines | 250 code | 41 blank | 2 comment | 10 complexity | 50c58fa4a18e9dfd9de2e271d72603d1 MD5 | raw file
  1. import os
  2. import sys
  3. import subprocess
  4. import tempfile
  5. from time import sleep
  6. from os.path import exists, join, abspath
  7. from shutil import rmtree, copytree
  8. from tempfile import mkdtemp
  9. from contextlib import contextmanager
  10. from twisted.trial import unittest
  11. from twisted.internet import defer
  12. import scrapy
  13. from scrapy.utils.python import to_native_str
  14. from scrapy.utils.python import retry_on_eintr
  15. from scrapy.utils.test import get_testenv
  16. from scrapy.utils.testsite import SiteTest
  17. from scrapy.utils.testproc import ProcessTest
  18. class ProjectTest(unittest.TestCase):
  19. project_name = 'testproject'
  20. def setUp(self):
  21. self.temp_path = mkdtemp()
  22. self.cwd = self.temp_path
  23. self.proj_path = join(self.temp_path, self.project_name)
  24. self.proj_mod_path = join(self.proj_path, self.project_name)
  25. self.env = get_testenv()
  26. def tearDown(self):
  27. rmtree(self.temp_path)
  28. def call(self, *new_args, **kwargs):
  29. with tempfile.TemporaryFile() as out:
  30. args = (sys.executable, '-m', 'scrapy.cmdline') + new_args
  31. return subprocess.call(args, stdout=out, stderr=out, cwd=self.cwd,
  32. env=self.env, **kwargs)
  33. def proc(self, *new_args, **kwargs):
  34. args = (sys.executable, '-m', 'scrapy.cmdline') + new_args
  35. p = subprocess.Popen(args, cwd=self.cwd, env=self.env,
  36. stdout=subprocess.PIPE, stderr=subprocess.PIPE,
  37. **kwargs)
  38. waited = 0
  39. interval = 0.2
  40. while p.poll() is None:
  41. sleep(interval)
  42. waited += interval
  43. if waited > 15:
  44. p.kill()
  45. assert False, 'Command took too much time to complete'
  46. return p
  47. class StartprojectTest(ProjectTest):
  48. def test_startproject(self):
  49. self.assertEqual(0, self.call('startproject', self.project_name))
  50. assert exists(join(self.proj_path, 'scrapy.cfg'))
  51. assert exists(join(self.proj_path, 'testproject'))
  52. assert exists(join(self.proj_mod_path, '__init__.py'))
  53. assert exists(join(self.proj_mod_path, 'items.py'))
  54. assert exists(join(self.proj_mod_path, 'pipelines.py'))
  55. assert exists(join(self.proj_mod_path, 'settings.py'))
  56. assert exists(join(self.proj_mod_path, 'spiders', '__init__.py'))
  57. self.assertEqual(1, self.call('startproject', self.project_name))
  58. self.assertEqual(1, self.call('startproject', 'wrong---project---name'))
  59. self.assertEqual(1, self.call('startproject', 'sys'))
  60. class StartprojectTemplatesTest(ProjectTest):
  61. def setUp(self):
  62. super(StartprojectTemplatesTest, self).setUp()
  63. self.tmpl = join(self.temp_path, 'templates')
  64. self.tmpl_proj = join(self.tmpl, 'project')
  65. def test_startproject_template_override(self):
  66. copytree(join(scrapy.__path__[0], 'templates'), self.tmpl)
  67. with open(join(self.tmpl_proj, 'root_template'), 'w'):
  68. pass
  69. assert exists(join(self.tmpl_proj, 'root_template'))
  70. args = ['--set', 'TEMPLATES_DIR=%s' % self.tmpl]
  71. p = self.proc('startproject', self.project_name, *args)
  72. out = to_native_str(retry_on_eintr(p.stdout.read))
  73. self.assertIn("New Scrapy project %r, using template directory" % self.project_name, out)
  74. self.assertIn(self.tmpl_proj, out)
  75. assert exists(join(self.proj_path, 'root_template'))
  76. class CommandTest(ProjectTest):
  77. def setUp(self):
  78. super(CommandTest, self).setUp()
  79. self.call('startproject', self.project_name)
  80. self.cwd = join(self.temp_path, self.project_name)
  81. self.env['SCRAPY_SETTINGS_MODULE'] = '%s.settings' % self.project_name
  82. class GenspiderCommandTest(CommandTest):
  83. def test_arguments(self):
  84. # only pass one argument. spider script shouldn't be created
  85. self.assertEqual(2, self.call('genspider', 'test_name'))
  86. assert not exists(join(self.proj_mod_path, 'spiders', 'test_name.py'))
  87. # pass two arguments <name> <domain>. spider script should be created
  88. self.assertEqual(0, self.call('genspider', 'test_name', 'test.com'))
  89. assert exists(join(self.proj_mod_path, 'spiders', 'test_name.py'))
  90. def test_template(self, tplname='crawl'):
  91. args = ['--template=%s' % tplname] if tplname else []
  92. spname = 'test_spider'
  93. p = self.proc('genspider', spname, 'test.com', *args)
  94. out = to_native_str(retry_on_eintr(p.stdout.read))
  95. self.assertIn("Created spider %r using template %r in module" % (spname, tplname), out)
  96. self.assertTrue(exists(join(self.proj_mod_path, 'spiders', 'test_spider.py')))
  97. p = self.proc('genspider', spname, 'test.com', *args)
  98. out = to_native_str(retry_on_eintr(p.stdout.read))
  99. self.assertIn("Spider %r already exists in module" % spname, out)
  100. def test_template_basic(self):
  101. self.test_template('basic')
  102. def test_template_csvfeed(self):
  103. self.test_template('csvfeed')
  104. def test_template_xmlfeed(self):
  105. self.test_template('xmlfeed')
  106. def test_list(self):
  107. self.assertEqual(0, self.call('genspider', '--list'))
  108. def test_dump(self):
  109. self.assertEqual(0, self.call('genspider', '--dump=basic'))
  110. self.assertEqual(0, self.call('genspider', '-d', 'basic'))
  111. def test_same_name_as_project(self):
  112. self.assertEqual(2, self.call('genspider', self.project_name))
  113. assert not exists(join(self.proj_mod_path, 'spiders', '%s.py' % self.project_name))
  114. class MiscCommandsTest(CommandTest):
  115. def test_list(self):
  116. self.assertEqual(0, self.call('list'))
  117. class RunSpiderCommandTest(CommandTest):
  118. @contextmanager
  119. def _create_file(self, content, name):
  120. tmpdir = self.mktemp()
  121. os.mkdir(tmpdir)
  122. fname = abspath(join(tmpdir, name))
  123. with open(fname, 'w') as f:
  124. f.write(content)
  125. try:
  126. yield fname
  127. finally:
  128. rmtree(tmpdir)
  129. def runspider(self, code, name='myspider.py'):
  130. with self._create_file(code, name) as fname:
  131. return self.proc('runspider', fname)
  132. def test_runspider(self):
  133. spider = """
  134. import scrapy
  135. class MySpider(scrapy.Spider):
  136. name = 'myspider'
  137. def start_requests(self):
  138. self.logger.debug("It Works!")
  139. return []
  140. """
  141. p = self.runspider(spider)
  142. log = to_native_str(p.stderr.read())
  143. self.assertIn("DEBUG: It Works!", log)
  144. self.assertIn("INFO: Spider opened", log)
  145. self.assertIn("INFO: Closing spider (finished)", log)
  146. self.assertIn("INFO: Spider closed (finished)", log)
  147. def test_runspider_no_spider_found(self):
  148. p = self.runspider("from scrapy.spiders import Spider\n")
  149. log = to_native_str(p.stderr.read())
  150. self.assertIn("No spider found in file", log)
  151. def test_runspider_file_not_found(self):
  152. p = self.proc('runspider', 'some_non_existent_file')
  153. log = to_native_str(p.stderr.read())
  154. self.assertIn("File not found: some_non_existent_file", log)
  155. def test_runspider_unable_to_load(self):
  156. p = self.runspider('', 'myspider.txt')
  157. log = to_native_str(p.stderr.read())
  158. self.assertIn('Unable to load', log)
  159. def test_start_requests_errors(self):
  160. p = self.runspider("""
  161. import scrapy
  162. class BadSpider(scrapy.Spider):
  163. name = "bad"
  164. def start_requests(self):
  165. raise Exception("oops!")
  166. """, name="badspider.py")
  167. log = to_native_str(p.stderr.read())
  168. print(log)
  169. self.assertIn("start_requests", log)
  170. self.assertIn("badspider.py", log)
  171. class ParseCommandTest(ProcessTest, SiteTest, CommandTest):
  172. command = 'parse'
  173. def setUp(self):
  174. super(ParseCommandTest, self).setUp()
  175. self.spider_name = 'parse_spider'
  176. fname = abspath(join(self.proj_mod_path, 'spiders', 'myspider.py'))
  177. with open(fname, 'w') as f:
  178. f.write("""
  179. import scrapy
  180. class MySpider(scrapy.Spider):
  181. name = '{0}'
  182. def parse(self, response):
  183. if getattr(self, 'test_arg', None):
  184. self.logger.debug('It Works!')
  185. return [scrapy.Item(), dict(foo='bar')]
  186. """.format(self.spider_name))
  187. fname = abspath(join(self.proj_mod_path, 'pipelines.py'))
  188. with open(fname, 'w') as f:
  189. f.write("""
  190. import logging
  191. class MyPipeline(object):
  192. component_name = 'my_pipeline'
  193. def process_item(self, item, spider):
  194. logging.info('It Works!')
  195. return item
  196. """)
  197. fname = abspath(join(self.proj_mod_path, 'settings.py'))
  198. with open(fname, 'a') as f:
  199. f.write("""
  200. ITEM_PIPELINES = {'%s.pipelines.MyPipeline': 1}
  201. """ % self.project_name)
  202. @defer.inlineCallbacks
  203. def test_spider_arguments(self):
  204. _, _, stderr = yield self.execute(['--spider', self.spider_name,
  205. '-a', 'test_arg=1',
  206. '-c', 'parse',
  207. self.url('/html')])
  208. self.assertIn("DEBUG: It Works!", to_native_str(stderr))
  209. @defer.inlineCallbacks
  210. def test_pipelines(self):
  211. _, _, stderr = yield self.execute(['--spider', self.spider_name,
  212. '--pipelines',
  213. '-c', 'parse',
  214. self.url('/html')])
  215. self.assertIn("INFO: It Works!", to_native_str(stderr))
  216. @defer.inlineCallbacks
  217. def test_parse_items(self):
  218. status, out, stderr = yield self.execute(
  219. ['--spider', self.spider_name, '-c', 'parse', self.url('/html')]
  220. )
  221. self.assertIn("""[{}, {'foo': 'bar'}]""", to_native_str(out))
  222. class BenchCommandTest(CommandTest):
  223. def test_run(self):
  224. p = self.proc('bench', '-s', 'LOGSTATS_INTERVAL=0.001',
  225. '-s', 'CLOSESPIDER_TIMEOUT=0.01')
  226. log = to_native_str(p.stderr.read())
  227. self.assertIn('INFO: Crawled', log)
  228. self.assertNotIn('Unhandled Error', log)