PageRenderTime 49ms CodeModel.GetById 20ms RepoModel.GetById 0ms app.codeStats 0ms

/setup.py

https://gitlab.com/czm1129/pyspider
Python | 124 lines | 95 code | 23 blank | 6 comment | 3 complexity | 0a2a2528d537ff52501cd57fcb7c0dc4 MD5 | raw file
  1. #!/usr/bin/env python
  2. # -*- encoding: utf-8 -*-
  3. # vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
  4. # Author: Binux<roy@binux.me>
  5. # http://binux.me
  6. # Created on 2014-11-24 22:27:45
  7. import sys
  8. from setuptools import setup, find_packages
  9. from codecs import open
  10. from os import path
  11. here = path.abspath(path.dirname(__file__))
  12. with open(path.join(here, 'README.md'), encoding='utf-8') as f:
  13. long_description = f.read()
  14. import pyspider
  15. install_requires = [
  16. 'Flask>=0.10',
  17. 'Jinja2>=2.7',
  18. 'chardet>=2.2',
  19. 'cssselect>=0.9',
  20. 'lxml',
  21. 'pycurl',
  22. 'pyquery',
  23. 'requests>=2.2',
  24. 'tornado>=3.2',
  25. 'Flask-Login>=0.2.11',
  26. 'u-msgpack-python>=1.6',
  27. 'click>=3.3',
  28. 'six',
  29. ]
  30. if sys.version_info < (3, 0):
  31. install_requires.extend([
  32. 'wsgidav',
  33. ])
  34. extras_require_all = [
  35. 'mysql-connector-python>=1.2.2',
  36. 'amqp>=1.3.0',
  37. 'pymongo>=2.7.2',
  38. 'SQLAlchemy>=0.9.7',
  39. 'redis',
  40. 'kombu',
  41. 'psycopg2',
  42. 'elasticsearch',
  43. ]
  44. if sys.version_info < (3, 0):
  45. extras_require_all.extend([
  46. 'pika>=0.9.14',
  47. 'beanstalkc',
  48. ])
  49. setup(
  50. name='pyspider',
  51. version=pyspider.__version__,
  52. description='A Powerful Spider System in Python',
  53. long_description=long_description,
  54. url='https://github.com/binux/pyspider',
  55. author='Roy Binux',
  56. author_email='roy@binux.me',
  57. license='Apache License, Version 2.0',
  58. classifiers=[
  59. 'Development Status :: 4 - Beta',
  60. 'Programming Language :: Python :: 2',
  61. 'Programming Language :: Python :: 2.6',
  62. 'Programming Language :: Python :: 2.7',
  63. 'Programming Language :: Python :: 3',
  64. 'Programming Language :: Python :: 3.3',
  65. 'Programming Language :: Python :: 3.4',
  66. 'License :: OSI Approved :: Apache Software License',
  67. 'Intended Audience :: Developers',
  68. 'Operating System :: OS Independent',
  69. 'Environment :: Web Environment',
  70. 'Topic :: Internet :: WWW/HTTP',
  71. 'Topic :: Software Development :: Libraries :: Application Frameworks',
  72. 'Topic :: Software Development :: Libraries :: Python Modules',
  73. ],
  74. keywords='scrapy crawler spider webui',
  75. packages=find_packages(exclude=['data', 'tests*']),
  76. install_requires=install_requires,
  77. extras_require={
  78. 'all': extras_require_all,
  79. 'test': [
  80. 'unittest2>=0.5.1',
  81. 'coverage',
  82. 'httpbin',
  83. 'pyproxy>=0.1.6',
  84. 'easywebdav',
  85. ]
  86. },
  87. package_data={
  88. 'pyspider': [
  89. 'logging.conf',
  90. 'fetcher/phantomjs_fetcher.js',
  91. 'webui/static/*',
  92. 'webui/templates/*'
  93. ],
  94. },
  95. entry_points={
  96. 'console_scripts': [
  97. 'pyspider=pyspider.run:main'
  98. ]
  99. },
  100. test_suite='tests.all_suite',
  101. )