PageRenderTime 96ms CodeModel.GetById 28ms RepoModel.GetById 0ms app.codeStats 0ms

/pip.py

https://bitbucket.org/jezdez/pip
Python | 4841 lines | 4605 code | 129 blank | 107 comment | 414 complexity | 2b5001e3688a34df698c0efd6a2a249f MD5 | raw file
  1. #!/usr/bin/env python
  2. import sys
  3. import os
  4. import errno
  5. import stat
  6. import optparse
  7. import pkg_resources
  8. import urllib2
  9. import urllib
  10. import mimetypes
  11. import zipfile
  12. import tarfile
  13. import tempfile
  14. import subprocess
  15. import posixpath
  16. import re
  17. import shutil
  18. import fnmatch
  19. import operator
  20. import copy
  21. try:
  22. from hashlib import md5
  23. except ImportError:
  24. import md5 as md5_module
  25. md5 = md5_module.new
  26. import urlparse
  27. from email.FeedParser import FeedParser
  28. import traceback
  29. from cStringIO import StringIO
  30. import socket
  31. from Queue import Queue
  32. from Queue import Empty as QueueEmpty
  33. import threading
  34. import httplib
  35. import time
  36. import logging
  37. import ConfigParser
  38. from distutils.util import strtobool
  39. from distutils import sysconfig
  40. class InstallationError(Exception):
  41. """General exception during installation"""
  42. class UninstallationError(Exception):
  43. """General exception during uninstallation"""
  44. class DistributionNotFound(InstallationError):
  45. """Raised when a distribution cannot be found to satisfy a requirement"""
  46. class BadCommand(Exception):
  47. """Raised when virtualenv or a command is not found"""
  48. try:
  49. any
  50. except NameError:
  51. def any(seq):
  52. for item in seq:
  53. if item:
  54. return True
  55. return False
  56. if getattr(sys, 'real_prefix', None):
  57. ## FIXME: is build/ a good name?
  58. build_prefix = os.path.join(sys.prefix, 'build')
  59. src_prefix = os.path.join(sys.prefix, 'src')
  60. else:
  61. ## FIXME: this isn't a very good default
  62. build_prefix = os.path.join(os.getcwd(), 'build')
  63. src_prefix = os.path.join(os.getcwd(), 'src')
  64. # FIXME doesn't account for venv linked to global site-packages
  65. site_packages = sysconfig.get_python_lib()
  66. user_dir = os.path.expanduser('~')
  67. if sys.platform == 'win32':
  68. bin_py = os.path.join(sys.prefix, 'Scripts')
  69. # buildout uses 'bin' on Windows too?
  70. if not os.path.exists(bin_py):
  71. bin_py = os.path.join(sys.prefix, 'bin')
  72. config_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming
  73. default_config_file = os.path.join(config_dir, 'pip', 'pip.ini')
  74. else:
  75. bin_py = os.path.join(sys.prefix, 'bin')
  76. default_config_file = os.path.join(user_dir, '.pip', 'pip.conf')
  77. # Forcing to use /usr/local/bin for standard Mac OS X framework installs
  78. if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
  79. bin_py = '/usr/local/bin'
  80. class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
  81. """Custom help formatter for use in ConfigOptionParser that updates
  82. the defaults before expanding them, allowing them to show up correctly
  83. in the help listing"""
  84. def expand_default(self, option):
  85. if self.parser is not None:
  86. self.parser.update_defaults(self.parser.defaults)
  87. return optparse.IndentedHelpFormatter.expand_default(self, option)
  88. class ConfigOptionParser(optparse.OptionParser):
  89. """Custom option parser which updates its defaults by by checking the
  90. configuration files and environmental variables"""
  91. def __init__(self, *args, **kwargs):
  92. self.config = ConfigParser.RawConfigParser()
  93. self.name = kwargs.pop('name')
  94. self.files = self.get_config_files()
  95. self.config.read(self.files)
  96. assert self.name
  97. optparse.OptionParser.__init__(self, *args, **kwargs)
  98. def get_config_files(self):
  99. config_file = os.environ.get('PIP_CONFIG_FILE', False)
  100. if config_file and os.path.exists(config_file):
  101. return [config_file]
  102. return [default_config_file]
  103. def update_defaults(self, defaults):
  104. """Updates the given defaults with values from the config files and
  105. the environ. Does a little special handling for certain types of
  106. options (lists)."""
  107. # Then go and look for the other sources of configuration:
  108. config = {}
  109. # 1. config files
  110. for section in ('global', self.name):
  111. config.update(dict(self.get_config_section(section)))
  112. # 2. environmental variables
  113. config.update(dict(self.get_environ_vars()))
  114. # Then set the options with those values
  115. for key, val in config.iteritems():
  116. key = key.replace('_', '-')
  117. if not key.startswith('--'):
  118. key = '--%s' % key # only prefer long opts
  119. option = self.get_option(key)
  120. if option is not None:
  121. # ignore empty values
  122. if not val:
  123. continue
  124. # handle multiline configs
  125. if option.action == 'append':
  126. val = val.split()
  127. else:
  128. option.nargs = 1
  129. if option.action in ('store_true', 'store_false', 'count'):
  130. val = strtobool(val)
  131. try:
  132. val = option.convert_value(key, val)
  133. except optparse.OptionValueError, e:
  134. print ("An error occured during configuration: %s" % e)
  135. sys.exit(3)
  136. defaults[option.dest] = val
  137. return defaults
  138. def get_config_section(self, name):
  139. """Get a section of a configuration"""
  140. if self.config.has_section(name):
  141. return self.config.items(name)
  142. return []
  143. def get_environ_vars(self, prefix='PIP_'):
  144. """Returns a generator with all environmental vars with prefix PIP_"""
  145. for key, val in os.environ.iteritems():
  146. if key.startswith(prefix):
  147. yield (key.replace(prefix, '').lower(), val)
  148. def get_default_values(self):
  149. """Overridding to make updating the defaults after instantiation of
  150. the option parser possible, update_defaults() does the dirty work."""
  151. if not self.process_default_values:
  152. # Old, pre-Optik 1.5 behaviour.
  153. return optparse.Values(self.defaults)
  154. defaults = self.update_defaults(self.defaults.copy()) # ours
  155. for option in self._get_all_options():
  156. default = defaults.get(option.dest)
  157. if isinstance(default, basestring):
  158. opt_str = option.get_opt_string()
  159. defaults[option.dest] = option.check_value(opt_str, default)
  160. return optparse.Values(defaults)
  161. try:
  162. pip_dist = pkg_resources.get_distribution('pip')
  163. version = '%s from %s (python %s)' % (
  164. pip_dist, pip_dist.location, sys.version[:3])
  165. except pkg_resources.DistributionNotFound:
  166. # when running pip.py without installing
  167. version=None
  168. def rmtree_errorhandler(func, path, exc_info):
  169. """On Windows, the files in .svn are read-only, so when rmtree() tries to
  170. remove them, an exception is thrown. We catch that here, remove the
  171. read-only attribute, and hopefully continue without problems."""
  172. exctype, value = exc_info[:2]
  173. # lookin for a windows error
  174. if exctype is not WindowsError or 'Access is denied' not in str(value):
  175. raise
  176. # file type should currently be read only
  177. if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
  178. raise
  179. # convert to read/write
  180. os.chmod(path, stat.S_IWRITE)
  181. # use the original function to repeat the operation
  182. func(path)
  183. class VcsSupport(object):
  184. _registry = {}
  185. schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp']
  186. def __init__(self):
  187. # Register more schemes with urlparse for various version control systems
  188. urlparse.uses_netloc.extend(self.schemes)
  189. urlparse.uses_fragment.extend(self.schemes)
  190. super(VcsSupport, self).__init__()
  191. def __iter__(self):
  192. return self._registry.__iter__()
  193. @property
  194. def backends(self):
  195. return self._registry.values()
  196. @property
  197. def dirnames(self):
  198. return [backend.dirname for backend in self.backends]
  199. @property
  200. def all_schemes(self):
  201. schemes = []
  202. for backend in self.backends:
  203. schemes.extend(backend.schemes)
  204. return schemes
  205. def register(self, cls):
  206. if not hasattr(cls, 'name'):
  207. logger.warn('Cannot register VCS %s' % cls.__name__)
  208. return
  209. if cls.name not in self._registry:
  210. self._registry[cls.name] = cls
  211. def unregister(self, cls=None, name=None):
  212. if name in self._registry:
  213. del self._registry[name]
  214. elif cls in self._registry.values():
  215. del self._registry[cls.name]
  216. else:
  217. logger.warn('Cannot unregister because no class or name given')
  218. def get_backend_name(self, location):
  219. """
  220. Return the name of the version control backend if found at given
  221. location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
  222. """
  223. for vc_type in self._registry.values():
  224. path = os.path.join(location, vc_type.dirname)
  225. if os.path.exists(path):
  226. return vc_type.name
  227. return None
  228. def get_backend(self, name):
  229. name = name.lower()
  230. if name in self._registry:
  231. return self._registry[name]
  232. def get_backend_from_location(self, location):
  233. vc_type = self.get_backend_name(location)
  234. if vc_type:
  235. return self.get_backend(vc_type)
  236. return None
  237. vcs = VcsSupport()
  238. parser = ConfigOptionParser(
  239. usage='%prog COMMAND [OPTIONS]',
  240. version=version,
  241. add_help_option=False,
  242. formatter=UpdatingDefaultsHelpFormatter(),
  243. name='global')
  244. parser.add_option(
  245. '-h', '--help',
  246. dest='help',
  247. action='store_true',
  248. help='Show help')
  249. parser.add_option(
  250. '-E', '--environment',
  251. dest='venv',
  252. metavar='DIR',
  253. help='virtualenv environment to run pip in (either give the '
  254. 'interpreter or the environment base directory)')
  255. parser.add_option(
  256. '-s', '--enable-site-packages',
  257. dest='site_packages',
  258. action='store_true',
  259. help='Include site-packages in virtualenv if one is to be '
  260. 'created. Ignored if --environment is not used or '
  261. 'the virtualenv already exists.')
  262. parser.add_option(
  263. # Defines a default root directory for virtualenvs, relative
  264. # virtualenvs names/paths are considered relative to it.
  265. '--virtualenv-base',
  266. dest='venv_base',
  267. type='str',
  268. default='',
  269. help=optparse.SUPPRESS_HELP)
  270. parser.add_option(
  271. # Run only if inside a virtualenv, bail if not.
  272. '--require-virtualenv', '--require-venv',
  273. dest='require_venv',
  274. action='store_true',
  275. default=False,
  276. help=optparse.SUPPRESS_HELP)
  277. parser.add_option(
  278. # Use automatically an activated virtualenv instead of installing
  279. # globally. -E will be ignored if used.
  280. '--respect-virtualenv', '--respect-venv',
  281. dest='respect_venv',
  282. action='store_true',
  283. default=False,
  284. help=optparse.SUPPRESS_HELP)
  285. parser.add_option(
  286. '-v', '--verbose',
  287. dest='verbose',
  288. action='count',
  289. default=0,
  290. help='Give more output')
  291. parser.add_option(
  292. '-q', '--quiet',
  293. dest='quiet',
  294. action='count',
  295. default=0,
  296. help='Give less output')
  297. parser.add_option(
  298. '--log',
  299. dest='log',
  300. metavar='FILENAME',
  301. help='Log file where a complete (maximum verbosity) record will be kept')
  302. parser.add_option(
  303. # Writes the log levels explicitely to the log'
  304. '--log-explicit-levels',
  305. dest='log_explicit_levels',
  306. action='store_true',
  307. default=False,
  308. help=optparse.SUPPRESS_HELP)
  309. parser.add_option(
  310. # The default log file
  311. '--local-log', '--log-file',
  312. dest='log_file',
  313. metavar='FILENAME',
  314. default='./pip-log.txt',
  315. help=optparse.SUPPRESS_HELP)
  316. parser.add_option(
  317. '--proxy',
  318. dest='proxy',
  319. type='str',
  320. default='',
  321. help="Specify a proxy in the form user:passwd@proxy.server:port. "
  322. "Note that the user:password@ is optional and required only if you "
  323. "are behind an authenticated proxy. If you provide "
  324. "user@proxy.server:port then you will be prompted for a password.")
  325. parser.add_option(
  326. '--timeout', '--default-timeout',
  327. metavar='SECONDS',
  328. dest='timeout',
  329. type='float',
  330. default=15,
  331. help='Set the socket timeout (default %default seconds)')
  332. parser.add_option(
  333. # The default version control system for editables, e.g. 'svn'
  334. '--default-vcs',
  335. dest='default_vcs',
  336. type='str',
  337. default='',
  338. help=optparse.SUPPRESS_HELP)
  339. parser.add_option(
  340. # A regex to be used to skip requirements
  341. '--skip-requirements-regex',
  342. dest='skip_requirements_regex',
  343. type='str',
  344. default='',
  345. help=optparse.SUPPRESS_HELP)
  346. parser.disable_interspersed_args()
  347. _commands = {}
  348. class Command(object):
  349. name = None
  350. usage = None
  351. hidden = False
  352. def __init__(self):
  353. assert self.name
  354. self.parser = ConfigOptionParser(
  355. usage=self.usage,
  356. prog='%s %s' % (sys.argv[0], self.name),
  357. version=parser.version,
  358. formatter=UpdatingDefaultsHelpFormatter(),
  359. name=self.name)
  360. for option in parser.option_list:
  361. if not option.dest or option.dest == 'help':
  362. # -h, --version, etc
  363. continue
  364. self.parser.add_option(option)
  365. _commands[self.name] = self
  366. def merge_options(self, initial_options, options):
  367. # Make sure we have all global options carried over
  368. for attr in ['log', 'venv', 'proxy', 'venv_base', 'require_venv',
  369. 'respect_venv', 'log_explicit_levels', 'log_file',
  370. 'timeout', 'default_vcs', 'skip_requirements_regex']:
  371. setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr))
  372. options.quiet += initial_options.quiet
  373. options.verbose += initial_options.verbose
  374. def main(self, complete_args, args, initial_options):
  375. global logger
  376. options, args = self.parser.parse_args(args)
  377. self.merge_options(initial_options, options)
  378. if options.require_venv and not options.venv:
  379. # If a venv is required check if it can really be found
  380. if not os.environ.get('VIRTUAL_ENV'):
  381. print 'Could not find an activated virtualenv (required).'
  382. sys.exit(3)
  383. # Automatically install in currently activated venv if required
  384. options.respect_venv = True
  385. if args and args[-1] == '___VENV_RESTART___':
  386. ## FIXME: We don't do anything this this value yet:
  387. venv_location = args[-2]
  388. args = args[:-2]
  389. options.venv = None
  390. else:
  391. # If given the option to respect the activated environment
  392. # check if no venv is given as a command line parameter
  393. if options.respect_venv and os.environ.get('VIRTUAL_ENV'):
  394. if options.venv and os.path.exists(options.venv):
  395. # Make sure command line venv and environmental are the same
  396. if (os.path.realpath(os.path.expanduser(options.venv)) !=
  397. os.path.realpath(os.environ.get('VIRTUAL_ENV'))):
  398. print ("Given virtualenv (%s) doesn't match "
  399. "currently activated virtualenv (%s)."
  400. % (options.venv, os.environ.get('VIRTUAL_ENV')))
  401. sys.exit(3)
  402. else:
  403. options.venv = os.environ.get('VIRTUAL_ENV')
  404. print 'Using already activated environment %s' % options.venv
  405. level = 1 # Notify
  406. level += options.verbose
  407. level -= options.quiet
  408. level = Logger.level_for_integer(4-level)
  409. complete_log = []
  410. logger = Logger([(level, sys.stdout),
  411. (Logger.DEBUG, complete_log.append)])
  412. if options.log_explicit_levels:
  413. logger.explicit_levels = True
  414. if options.venv:
  415. if options.verbose > 0:
  416. # The logger isn't setup yet
  417. print 'Running in environment %s' % options.venv
  418. site_packages=False
  419. if options.site_packages:
  420. site_packages=True
  421. restart_in_venv(options.venv, options.venv_base, site_packages,
  422. complete_args)
  423. # restart_in_venv should actually never return, but for clarity...
  424. return
  425. ## FIXME: not sure if this sure come before or after venv restart
  426. if options.log:
  427. log_fp = open_logfile_append(options.log)
  428. logger.consumers.append((logger.DEBUG, log_fp))
  429. else:
  430. log_fp = None
  431. socket.setdefaulttimeout(options.timeout or None)
  432. setup_proxy_handler(options.proxy)
  433. exit = 0
  434. try:
  435. self.run(options, args)
  436. except (InstallationError, UninstallationError), e:
  437. logger.fatal(str(e))
  438. logger.info('Exception information:\n%s' % format_exc())
  439. exit = 1
  440. except:
  441. logger.fatal('Exception:\n%s' % format_exc())
  442. exit = 2
  443. if log_fp is not None:
  444. log_fp.close()
  445. if exit:
  446. log_fn = options.log_file
  447. text = '\n'.join(complete_log)
  448. logger.fatal('Storing complete log in %s' % log_fn)
  449. log_fp = open_logfile_append(log_fn)
  450. log_fp.write(text)
  451. log_fp.close()
  452. return exit
  453. class HelpCommand(Command):
  454. name = 'help'
  455. usage = '%prog'
  456. summary = 'Show available commands'
  457. def run(self, options, args):
  458. if args:
  459. ## FIXME: handle errors better here
  460. command = args[0]
  461. if command not in _commands:
  462. raise InstallationError('No command with the name: %s' % command)
  463. command = _commands[command]
  464. command.parser.print_help()
  465. return
  466. parser.print_help()
  467. print
  468. print 'Commands available:'
  469. commands = list(set(_commands.values()))
  470. commands.sort(key=lambda x: x.name)
  471. for command in commands:
  472. if command.hidden:
  473. continue
  474. print ' %s: %s' % (command.name, command.summary)
  475. HelpCommand()
  476. class InstallCommand(Command):
  477. name = 'install'
  478. usage = '%prog [OPTIONS] PACKAGE_NAMES...'
  479. summary = 'Install packages'
  480. bundle = False
  481. def __init__(self):
  482. super(InstallCommand, self).__init__()
  483. self.parser.add_option(
  484. '-e', '--editable',
  485. dest='editables',
  486. action='append',
  487. default=[],
  488. metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE',
  489. help='Install a package directly from a checkout. Source will be checked '
  490. 'out into src/PACKAGE (lower-case) and installed in-place (using '
  491. 'setup.py develop). You can run this on an existing directory/checkout (like '
  492. 'pip install -e src/mycheckout). This option may be provided multiple times. '
  493. 'Possible values for VCS are: svn, git, hg and bzr.')
  494. self.parser.add_option(
  495. '-r', '--requirement',
  496. dest='requirements',
  497. action='append',
  498. default=[],
  499. metavar='FILENAME',
  500. help='Install all the packages listed in the given requirements file. '
  501. 'This option can be used multiple times.')
  502. self.parser.add_option(
  503. '-f', '--find-links',
  504. dest='find_links',
  505. action='append',
  506. default=[],
  507. metavar='URL',
  508. help='URL to look for packages at')
  509. self.parser.add_option(
  510. '-i', '--index-url', '--pypi-url',
  511. dest='index_url',
  512. metavar='URL',
  513. default='http://pypi.python.org/simple',
  514. help='Base URL of Python Package Index (default %default)')
  515. self.parser.add_option(
  516. '--extra-index-url',
  517. dest='extra_index_urls',
  518. metavar='URL',
  519. action='append',
  520. default=[],
  521. help='Extra URLs of package indexes to use in addition to --index-url')
  522. self.parser.add_option(
  523. '--no-index',
  524. dest='no_index',
  525. action='store_true',
  526. default=False,
  527. help='Ignore package index (only looking at --find-links URLs instead)')
  528. self.parser.add_option(
  529. '-b', '--build', '--build-dir', '--build-directory',
  530. dest='build_dir',
  531. metavar='DIR',
  532. default=None,
  533. help='Unpack packages into DIR (default %s) and build from there' % build_prefix)
  534. self.parser.add_option(
  535. '-d', '--download', '--download-dir', '--download-directory',
  536. dest='download_dir',
  537. metavar='DIR',
  538. default=None,
  539. help='Download packages into DIR instead of installing them')
  540. self.parser.add_option(
  541. '--download-cache',
  542. dest='download_cache',
  543. metavar='DIR',
  544. default=None,
  545. help='Cache downloaded packages in DIR')
  546. self.parser.add_option(
  547. '--src', '--source', '--source-dir', '--source-directory',
  548. dest='src_dir',
  549. metavar='DIR',
  550. default=None,
  551. help='Check out --editable packages into DIR (default %s)' % src_prefix)
  552. self.parser.add_option(
  553. '-U', '--upgrade',
  554. dest='upgrade',
  555. action='store_true',
  556. help='Upgrade all packages to the newest available version')
  557. self.parser.add_option(
  558. '-I', '--ignore-installed',
  559. dest='ignore_installed',
  560. action='store_true',
  561. help='Ignore the installed packages (reinstalling instead)')
  562. self.parser.add_option(
  563. '--no-deps', '--no-dependencies',
  564. dest='ignore_dependencies',
  565. action='store_true',
  566. default=False,
  567. help='Ignore package dependencies')
  568. self.parser.add_option(
  569. '--no-install',
  570. dest='no_install',
  571. action='store_true',
  572. help="Download and unpack all packages, but don't actually install them")
  573. self.parser.add_option(
  574. '--install-option',
  575. dest='install_options',
  576. action='append',
  577. help="Extra arguments to be supplied to the setup.py install "
  578. "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
  579. "Use multiple --install-option options to pass multiple options to setup.py install. "
  580. "If you are using an option with a directory path, be sure to use absolute path.")
  581. def run(self, options, args):
  582. if not options.build_dir:
  583. options.build_dir = build_prefix
  584. if not options.src_dir:
  585. options.src_dir = src_prefix
  586. if options.download_dir:
  587. options.no_install = True
  588. options.ignore_installed = True
  589. else:
  590. options.build_dir = os.path.abspath(options.build_dir)
  591. options.src_dir = os.path.abspath(options.src_dir)
  592. install_options = options.install_options or []
  593. index_urls = [options.index_url] + options.extra_index_urls
  594. if options.no_index:
  595. logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
  596. index_urls = []
  597. finder = PackageFinder(
  598. find_links=options.find_links,
  599. index_urls=index_urls)
  600. requirement_set = RequirementSet(
  601. build_dir=options.build_dir,
  602. src_dir=options.src_dir,
  603. download_dir=options.download_dir,
  604. download_cache=options.download_cache,
  605. upgrade=options.upgrade,
  606. ignore_installed=options.ignore_installed,
  607. ignore_dependencies=options.ignore_dependencies)
  608. for name in args:
  609. requirement_set.add_requirement(
  610. InstallRequirement.from_line(name, None))
  611. for name in options.editables:
  612. requirement_set.add_requirement(
  613. InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
  614. for filename in options.requirements:
  615. for req in parse_requirements(filename, finder=finder, options=options):
  616. requirement_set.add_requirement(req)
  617. requirement_set.install_files(finder, force_root_egg_info=self.bundle)
  618. if not options.no_install and not self.bundle:
  619. requirement_set.install(install_options)
  620. installed = ' '.join([req.name for req in
  621. requirement_set.successfully_installed])
  622. if installed:
  623. logger.notify('Successfully installed %s' % installed)
  624. elif not self.bundle:
  625. downloaded = ' '.join([req.name for req in
  626. requirement_set.successfully_downloaded])
  627. if downloaded:
  628. logger.notify('Successfully downloaded %s' % downloaded)
  629. return requirement_set
  630. InstallCommand()
  631. class UninstallCommand(Command):
  632. name = 'uninstall'
  633. usage = '%prog [OPTIONS] PACKAGE_NAMES ...'
  634. summary = 'Uninstall packages'
  635. def __init__(self):
  636. super(UninstallCommand, self).__init__()
  637. self.parser.add_option(
  638. '-r', '--requirement',
  639. dest='requirements',
  640. action='append',
  641. default=[],
  642. metavar='FILENAME',
  643. help='Uninstall all the packages listed in the given requirements file. '
  644. 'This option can be used multiple times.')
  645. self.parser.add_option(
  646. '-y', '--yes',
  647. dest='yes',
  648. action='store_true',
  649. help="Don't ask for confirmation of uninstall deletions.")
  650. def run(self, options, args):
  651. requirement_set = RequirementSet(
  652. build_dir=None,
  653. src_dir=None,
  654. download_dir=None)
  655. for name in args:
  656. requirement_set.add_requirement(
  657. InstallRequirement.from_line(name))
  658. for filename in options.requirements:
  659. for req in parse_requirements(filename, options=options):
  660. requirement_set.add_requirement(req)
  661. requirement_set.uninstall(auto_confirm=options.yes)
  662. UninstallCommand()
  663. class BundleCommand(InstallCommand):
  664. name = 'bundle'
  665. usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...'
  666. summary = 'Create pybundles (archives containing multiple packages)'
  667. bundle = True
  668. def __init__(self):
  669. super(BundleCommand, self).__init__()
  670. def run(self, options, args):
  671. if not args:
  672. raise InstallationError('You must give a bundle filename')
  673. if not options.build_dir:
  674. options.build_dir = backup_dir(build_prefix, '-bundle')
  675. if not options.src_dir:
  676. options.src_dir = backup_dir(src_prefix, '-bundle')
  677. # We have to get everything when creating a bundle:
  678. options.ignore_installed = True
  679. logger.notify('Putting temporary build files in %s and source/develop files in %s'
  680. % (display_path(options.build_dir), display_path(options.src_dir)))
  681. bundle_filename = args[0]
  682. args = args[1:]
  683. requirement_set = super(BundleCommand, self).run(options, args)
  684. # FIXME: here it has to do something
  685. requirement_set.create_bundle(bundle_filename)
  686. logger.notify('Created bundle in %s' % bundle_filename)
  687. return requirement_set
  688. BundleCommand()
  689. class FreezeCommand(Command):
  690. name = 'freeze'
  691. usage = '%prog [OPTIONS]'
  692. summary = 'Output all currently installed packages (exact versions) to stdout'
  693. def __init__(self):
  694. super(FreezeCommand, self).__init__()
  695. self.parser.add_option(
  696. '-r', '--requirement',
  697. dest='requirement',
  698. action='store',
  699. default=None,
  700. metavar='FILENAME',
  701. help='Use the given requirements file as a hint about how to generate the new frozen requirements')
  702. self.parser.add_option(
  703. '-f', '--find-links',
  704. dest='find_links',
  705. action='append',
  706. default=[],
  707. metavar='URL',
  708. help='URL for finding packages, which will be added to the frozen requirements file')
  709. def run(self, options, args):
  710. requirement = options.requirement
  711. find_links = options.find_links or []
  712. ## FIXME: Obviously this should be settable:
  713. find_tags = False
  714. skip_match = None
  715. skip_regex = options.skip_requirements_regex
  716. if skip_regex:
  717. skip_match = re.compile(skip_regex)
  718. logger.move_stdout_to_stderr()
  719. dependency_links = []
  720. f = sys.stdout
  721. for dist in pkg_resources.working_set:
  722. if dist.has_metadata('dependency_links.txt'):
  723. dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
  724. for link in find_links:
  725. if '#egg=' in link:
  726. dependency_links.append(link)
  727. for link in find_links:
  728. f.write('-f %s\n' % link)
  729. installations = {}
  730. for dist in pkg_resources.working_set:
  731. if dist.key in ('setuptools', 'pip', 'python'):
  732. ## FIXME: also skip virtualenv?
  733. continue
  734. req = FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
  735. installations[req.name] = req
  736. if requirement:
  737. req_f = open(requirement)
  738. for line in req_f:
  739. if not line.strip() or line.strip().startswith('#'):
  740. f.write(line)
  741. continue
  742. if skip_match and skip_match.search(line):
  743. f.write(line)
  744. continue
  745. elif line.startswith('-e') or line.startswith('--editable'):
  746. if line.startswith('-e'):
  747. line = line[2:].strip()
  748. else:
  749. line = line[len('--editable'):].strip().lstrip('=')
  750. line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
  751. elif (line.startswith('-r') or line.startswith('--requirement')
  752. or line.startswith('-Z') or line.startswith('--always-unzip')):
  753. logger.debug('Skipping line %r' % line.strip())
  754. continue
  755. else:
  756. line_req = InstallRequirement.from_line(line)
  757. if not line_req.name:
  758. logger.notify("Skipping line because it's not clear what it would install: %s"
  759. % line.strip())
  760. logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
  761. continue
  762. if line_req.name not in installations:
  763. logger.warn("Requirement file contains %s, but that package is not installed"
  764. % line.strip())
  765. continue
  766. f.write(str(installations[line_req.name]))
  767. del installations[line_req.name]
  768. f.write('## The following requirements were added by pip --freeze:\n')
  769. for installation in sorted(installations.values(), key=lambda x: x.name):
  770. f.write(str(installation))
  771. FreezeCommand()
  772. class ZipCommand(Command):
  773. name = 'zip'
  774. usage = '%prog [OPTIONS] PACKAGE_NAMES...'
  775. summary = 'Zip individual packages'
  776. def __init__(self):
  777. super(ZipCommand, self).__init__()
  778. if self.name == 'zip':
  779. self.parser.add_option(
  780. '--unzip',
  781. action='store_true',
  782. dest='unzip',
  783. help='Unzip (rather than zip) a package')
  784. else:
  785. self.parser.add_option(
  786. '--zip',
  787. action='store_false',
  788. dest='unzip',
  789. default=True,
  790. help='Zip (rather than unzip) a package')
  791. self.parser.add_option(
  792. '--no-pyc',
  793. action='store_true',
  794. dest='no_pyc',
  795. help='Do not include .pyc files in zip files (useful on Google App Engine)')
  796. self.parser.add_option(
  797. '-l', '--list',
  798. action='store_true',
  799. dest='list',
  800. help='List the packages available, and their zip status')
  801. self.parser.add_option(
  802. '--sort-files',
  803. action='store_true',
  804. dest='sort_files',
  805. help='With --list, sort packages according to how many files they contain')
  806. self.parser.add_option(
  807. '--path',
  808. action='append',
  809. dest='paths',
  810. help='Restrict operations to the given paths (may include wildcards)')
  811. self.parser.add_option(
  812. '-n', '--simulate',
  813. action='store_true',
  814. help='Do not actually perform the zip/unzip operation')
  815. def paths(self):
  816. """All the entries of sys.path, possibly restricted by --path"""
  817. if not self.select_paths:
  818. return sys.path
  819. result = []
  820. match_any = set()
  821. for path in sys.path:
  822. path = os.path.normcase(os.path.abspath(path))
  823. for match in self.select_paths:
  824. match = os.path.normcase(os.path.abspath(match))
  825. if '*' in match:
  826. if re.search(fnmatch.translate(match+'*'), path):
  827. result.append(path)
  828. match_any.add(match)
  829. break
  830. else:
  831. if path.startswith(match):
  832. result.append(path)
  833. match_any.add(match)
  834. break
  835. else:
  836. logger.debug("Skipping path %s because it doesn't match %s"
  837. % (path, ', '.join(self.select_paths)))
  838. for match in self.select_paths:
  839. if match not in match_any and '*' not in match:
  840. result.append(match)
  841. logger.debug("Adding path %s because it doesn't match anything already on sys.path"
  842. % match)
  843. return result
  844. def run(self, options, args):
  845. self.select_paths = options.paths
  846. self.simulate = options.simulate
  847. if options.list:
  848. return self.list(options, args)
  849. if not args:
  850. raise InstallationError(
  851. 'You must give at least one package to zip or unzip')
  852. packages = []
  853. for arg in args:
  854. module_name, filename = self.find_package(arg)
  855. if options.unzip and os.path.isdir(filename):
  856. raise InstallationError(
  857. 'The module %s (in %s) is not a zip file; cannot be unzipped'
  858. % (module_name, filename))
  859. elif not options.unzip and not os.path.isdir(filename):
  860. raise InstallationError(
  861. 'The module %s (in %s) is not a directory; cannot be zipped'
  862. % (module_name, filename))
  863. packages.append((module_name, filename))
  864. last_status = None
  865. for module_name, filename in packages:
  866. if options.unzip:
  867. last_status = self.unzip_package(module_name, filename)
  868. else:
  869. last_status = self.zip_package(module_name, filename, options.no_pyc)
  870. return last_status
  871. def unzip_package(self, module_name, filename):
  872. zip_filename = os.path.dirname(filename)
  873. if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
  874. raise InstallationError(
  875. 'Module %s (in %s) isn\'t located in a zip file in %s'
  876. % (module_name, filename, zip_filename))
  877. package_path = os.path.dirname(zip_filename)
  878. if not package_path in self.paths():
  879. logger.warn(
  880. 'Unpacking %s into %s, but %s is not on sys.path'
  881. % (display_path(zip_filename), display_path(package_path),
  882. display_path(package_path)))
  883. logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
  884. if self.simulate:
  885. logger.notify('Skipping remaining operations because of --simulate')
  886. return
  887. logger.indent += 2
  888. try:
  889. ## FIXME: this should be undoable:
  890. zip = zipfile.ZipFile(zip_filename)
  891. to_save = []
  892. for name in zip.namelist():
  893. if name.startswith('%s/' % module_name):
  894. content = zip.read(name)
  895. dest = os.path.join(package_path, name)
  896. if not os.path.exists(os.path.dirname(dest)):
  897. os.makedirs(os.path.dirname(dest))
  898. if not content and dest.endswith('/'):
  899. if not os.path.exists(dest):
  900. os.makedirs(dest)
  901. else:
  902. f = open(dest, 'wb')
  903. f.write(content)
  904. f.close()
  905. else:
  906. to_save.append((name, zip.read(name)))
  907. zip.close()
  908. if not to_save:
  909. logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
  910. os.unlink(zip_filename)
  911. self.remove_filename_from_pth(zip_filename)
  912. else:
  913. logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
  914. zip = zipfile.ZipFile(zip_filename, 'w')
  915. for name, content in to_save:
  916. zip.writestr(name, content)
  917. zip.close()
  918. finally:
  919. logger.indent -= 2
  920. def zip_package(self, module_name, filename, no_pyc):
  921. orig_filename = filename
  922. logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
  923. logger.indent += 2
  924. if filename.endswith('.egg'):
  925. dest_filename = filename
  926. else:
  927. dest_filename = filename + '.zip'
  928. try:
  929. ## FIXME: I think this needs to be undoable:
  930. if filename == dest_filename:
  931. filename = backup_dir(orig_filename)
  932. logger.notify('Moving %s aside to %s' % (orig_filename, filename))
  933. if not self.simulate:
  934. shutil.move(orig_filename, filename)
  935. try:
  936. logger.info('Creating zip file in %s' % display_path(dest_filename))
  937. if not self.simulate:
  938. zip = zipfile.ZipFile(dest_filename, 'w')
  939. zip.writestr(module_name + '/', '')
  940. for dirpath, dirnames, filenames in os.walk(filename):
  941. if no_pyc:
  942. filenames = [f for f in filenames
  943. if not f.lower().endswith('.pyc')]
  944. for fns, is_dir in [(dirnames, True), (filenames, False)]:
  945. for fn in fns:
  946. full = os.path.join(dirpath, fn)
  947. dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
  948. if is_dir:
  949. zip.writestr(dest+'/', '')
  950. else:
  951. zip.write(full, dest)
  952. zip.close()
  953. logger.info('Removing old directory %s' % display_path(filename))
  954. if not self.simulate:
  955. shutil.rmtree(filename)
  956. except:
  957. ## FIXME: need to do an undo here
  958. raise
  959. ## FIXME: should also be undone:
  960. self.add_filename_to_pth(dest_filename)
  961. finally:
  962. logger.indent -= 2
  963. def remove_filename_from_pth(self, filename):
  964. for pth in self.pth_files():
  965. f = open(pth, 'r')
  966. lines = f.readlines()
  967. f.close()
  968. new_lines = [
  969. l for l in lines if l.strip() != filename]
  970. if lines != new_lines:
  971. logger.info('Removing reference to %s from .pth file %s'
  972. % (display_path(filename), display_path(pth)))
  973. if not filter(None, new_lines):
  974. logger.info('%s file would be empty: deleting' % display_path(pth))
  975. if not self.simulate:
  976. os.unlink(pth)
  977. else:
  978. if not self.simulate:
  979. f = open(pth, 'w')
  980. f.writelines(new_lines)
  981. f.close()
  982. return
  983. logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
  984. def add_filename_to_pth(self, filename):
  985. path = os.path.dirname(filename)
  986. dest = os.path.join(path, filename + '.pth')
  987. if path not in self.paths():
  988. logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
  989. if not self.simulate:
  990. if os.path.exists(dest):
  991. f = open(dest)
  992. lines = f.readlines()
  993. f.close()
  994. if lines and not lines[-1].endswith('\n'):
  995. lines[-1] += '\n'
  996. lines.append(filename+'\n')
  997. else:
  998. lines = [filename + '\n']
  999. f = open(dest, 'w')
  1000. f.writelines(lines)
  1001. f.close()
  1002. def pth_files(self):
  1003. for path in self.paths():
  1004. if not os.path.exists(path) or not os.path.isdir(path):
  1005. continue
  1006. for filename in os.listdir(path):
  1007. if filename.endswith('.pth'):
  1008. yield os.path.join(path, filename)
  1009. def find_package(self, package):
  1010. for path in self.paths():
  1011. full = os.path.join(path, package)
  1012. if os.path.exists(full):
  1013. return package, full
  1014. if not os.path.isdir(path) and zipfile.is_zipfile(path):
  1015. zip = zipfile.ZipFile(path, 'r')
  1016. try:
  1017. zip.read('%s/__init__.py' % package)
  1018. except KeyError:
  1019. pass
  1020. else:
  1021. zip.close()
  1022. return package, full
  1023. zip.close()
  1024. ## FIXME: need special error for package.py case:
  1025. raise InstallationError(
  1026. 'No package with the name %s found' % package)
  1027. def list(self, options, args):
  1028. if args:
  1029. raise InstallationError(
  1030. 'You cannot give an argument with --list')
  1031. for path in sorted(self.paths()):
  1032. if not os.path.exists(path):
  1033. continue
  1034. basename = os.path.basename(path.rstrip(os.path.sep))
  1035. if os.path.isfile(path) and zipfile.is_zipfile(path):
  1036. if os.path.dirname(path) not in self.paths():
  1037. logger.notify('Zipped egg: %s' % display_path(path))
  1038. continue
  1039. if (basename != 'site-packages'
  1040. and not path.replace('\\', '/').endswith('lib/python')):
  1041. continue
  1042. logger.notify('In %s:' % display_path(path))
  1043. logger.indent += 2
  1044. zipped = []
  1045. unzipped = []
  1046. try:
  1047. for filename in sorted(os.listdir(path)):
  1048. ext = os.path.splitext(filename)[1].lower()
  1049. if ext in ('.pth', '.egg-info', '.egg-link'):
  1050. continue
  1051. if ext == '.py':
  1052. logger.info('Not displaying %s: not a package' % display_path(filename))
  1053. continue
  1054. full = os.path.join(path, filename)
  1055. if os.path.isdir(full):
  1056. unzipped.append((filename, self.count_package(full)))
  1057. elif zipfile.is_zipfile(full):
  1058. zipped.append(filename)
  1059. else:
  1060. logger.info('Unknown file: %s' % display_path(filename))
  1061. if zipped:
  1062. logger.notify('Zipped packages:')
  1063. logger.indent += 2
  1064. try:
  1065. for filename in zipped:
  1066. logger.notify(filename)
  1067. finally:
  1068. logger.indent -= 2
  1069. else:
  1070. logger.notify('No zipped packages.')
  1071. if unzipped:
  1072. if options.sort_files:
  1073. unzipped.sort(key=lambda x: -x[1])
  1074. logger.notify('Unzipped packages:')
  1075. logger.indent += 2
  1076. try:
  1077. for filename, count in unzipped:
  1078. logger.notify('%s (%i files)' % (filename, count))
  1079. finally:
  1080. logger.indent -= 2
  1081. else:
  1082. logger.notify('No unzipped packages.')
  1083. finally:
  1084. logger.indent -= 2
  1085. def count_package(self, path):
  1086. total = 0
  1087. for dirpath, dirnames, filenames in os.walk(path):
  1088. filenames = [f for f in filenames
  1089. if not f.lower().endswith('.pyc')]
  1090. total += len(filenames)
  1091. return total
  1092. ZipCommand()
  1093. class UnzipCommand(ZipCommand):
  1094. name = 'unzip'
  1095. summary = 'Unzip individual packages'
  1096. UnzipCommand()
  1097. BASE_COMPLETION = """
  1098. # pip %(shell)s completion start%(script)s# pip %(shell)s completion end
  1099. """
  1100. COMPLETION_SCRIPTS = {
  1101. 'bash': """
  1102. _pip_completion()
  1103. {
  1104. COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
  1105. COMP_CWORD=$COMP_CWORD \\
  1106. PIP_AUTO_COMPLETE=1 $1 ) )
  1107. }
  1108. complete -o default -F _pip_completion pip
  1109. """, 'zsh': """
  1110. function _pip_completion {
  1111. local words cword
  1112. read -Ac words
  1113. read -cn cword
  1114. reply=( $( COMP_WORDS="$words[*]" \\
  1115. COMP_CWORD=$(( cword-1 )) \\
  1116. PIP_AUTO_COMPLETE=1 $words[1] ) )
  1117. }
  1118. compctl -K _pip_completion pip
  1119. """
  1120. }
  1121. class CompletionCommand(Command):
  1122. name = 'completion'
  1123. summary = 'A helper command to be used for command completion'
  1124. hidden = True
  1125. def __init__(self):
  1126. super(CompletionCommand, self).__init__()
  1127. self.parser.add_option(
  1128. '--bash', '-b',
  1129. action='store_const',
  1130. const='bash',
  1131. dest='shell',
  1132. help='Emit completion code for bash')
  1133. self.parser.add_option(
  1134. '--zsh', '-z',
  1135. action='store_const',
  1136. const='zsh',
  1137. dest='shell',
  1138. help='Emit completion code for zsh')
  1139. def run(self, options, args):
  1140. """Prints the completion code of the given shell"""
  1141. if options.shell in ('bash', 'zsh'):
  1142. script = COMPLETION_SCRIPTS.get(options.shell, '')
  1143. print BASE_COMPLETION % {'script': script, 'shell': options.shell}
  1144. else:
  1145. print 'ERROR: You must pass --bash or --zsh'
  1146. CompletionCommand()
  1147. def autocomplete():
  1148. """Command and option completion for the main option parser (and options)
  1149. and its subcommands (and options).
  1150. Enable by sourcing one of the completion shell scripts (bash or zsh).
  1151. """
  1152. # Don't complete if user hasn't sourced bash_completion file.
  1153. if not os.environ.has_key('PIP_AUTO_COMPLETE'):
  1154. return
  1155. cwords = os.environ['COMP_WORDS'].split()[1:]
  1156. cword = int(os.environ['COMP_CWORD'])
  1157. try:
  1158. current = cwords[cword-1]
  1159. except IndexError:
  1160. current = ''
  1161. subcommands = [cmd for cmd, cls in _commands.items() if not cls.hidden]
  1162. options = []
  1163. # subcommand
  1164. if cword == 1:
  1165. # show options of main parser only when necessary
  1166. if current.startswith('-') or current.startswith('--'):
  1167. subcommands += [opt.get_opt_string()
  1168. for opt in parser.option_list
  1169. if opt.help != optparse.SUPPRESS_HELP]
  1170. print ' '.join(filter(lambda x: x.startswith(current), subcommands))
  1171. # subcommand options
  1172. # special case: the 'help' subcommand has no options
  1173. elif cwords[0] in subcommands and cwords[0] != 'help':
  1174. subcommand = _commands.get(cwords[0])
  1175. options += [(opt.get_opt_string(), opt.nargs)
  1176. for opt in subcommand.parser.option_list
  1177. if opt.help != optparse.SUPPRESS_HELP]
  1178. # filter out previously specified options from available options
  1179. prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
  1180. options = filter(lambda (x, v): x not in prev_opts, options)
  1181. # filter options by current input
  1182. options = [(k, v) for k, v in options if k.startswith(current)]
  1183. for option in options:
  1184. opt_label = option[0]
  1185. # append '=' to options which require args
  1186. if option[1]:
  1187. opt_label += '='
  1188. print opt_label
  1189. sys.exit(1)
  1190. def main(initial_args=None):
  1191. if initial_args is None:
  1192. initial_args = sys.argv[1:]
  1193. autocomplete()
  1194. options, args = parser.parse_args(initial_args)
  1195. if options.help and not args:
  1196. args = ['help']
  1197. if not args:
  1198. parser.error('You must give a command (use "pip help" see a list of commands)')
  1199. command = args[0].lower()
  1200. ## FIXME: search for a command match?
  1201. if command not in _commands:
  1202. parser.error('No command by the name %(script)s %(arg)s\n (maybe you meant "%(script)s install %(arg)s")'
  1203. % dict(script=os.path.basename(sys.argv[0]), arg=command))
  1204. command = _commands[command]
  1205. return command.main(initial_args, args[1:], options)
  1206. def get_proxy(proxystr=''):
  1207. """Get the proxy given the option passed on the command line. If an
  1208. empty string is passed it looks at the HTTP_PROXY environment
  1209. variable."""
  1210. if not proxystr:
  1211. proxystr = os.environ.get('HTTP_PROXY', '')
  1212. if proxystr:
  1213. if '@' in proxystr:
  1214. user_password, server_port = proxystr.split('@', 1)
  1215. if ':' in user_password:
  1216. user, password = user_password.split(':', 1)
  1217. else:
  1218. user = user_password
  1219. import getpass
  1220. prompt = 'Password for %s@%s: ' % (user, server_port)
  1221. password = urllib.quote(getpass.getpass(prompt))
  1222. return '%s:%s@%s' % (user, password, server_port)
  1223. else:
  1224. return proxystr
  1225. else:
  1226. return None
  1227. def setup_proxy_handler(proxystr=''):
  1228. """Set the proxy handler given the option passed on the command
  1229. line. If an empty string is passed it looks at the HTTP_PROXY
  1230. environment variable. """
  1231. proxy = get_proxy(proxystr)
  1232. if proxy:
  1233. proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy})
  1234. opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
  1235. urllib2.install_opener(opener)
  1236. def format_exc(exc_info=None):
  1237. if exc_info is None:
  1238. exc_info = sys.exc_info()
  1239. out = StringIO()
  1240. traceback.print_exception(*exc_info, **dict(file=out))
  1241. return out.getvalue()
  1242. def restart_in_venv(venv, base, site_packages, args):
  1243. """
  1244. Restart this script using the interpreter in the given virtual environment
  1245. """
  1246. if base and not os.path.isabs(venv) and not venv.startswith('~'):
  1247. base = os.path.expanduser(base)
  1248. # ensure we have an abs basepath at this point:
  1249. # a relative one makes no sense (or does it?)
  1250. if os.path.isabs(base):
  1251. venv = os.path.join(base, venv)
  1252. if venv.startswith('~'):
  1253. venv = os.path.expanduser(venv)
  1254. if not os.path.exists(venv):
  1255. try:
  1256. import virtualenv
  1257. except ImportError:
  1258. print 'The virtual environment does not exist: %s' % venv
  1259. print 'and virtualenv is not installed, so a new environment cannot be created'
  1260. sys.exit(3)
  1261. print 'Creating new virtualenv environment in %s' % venv
  1262. virtualenv.logger = logger
  1263. logger.indent += 2
  1264. virtualenv.create_environment(venv, site_packages=site_packages)
  1265. if sys.platform == 'win32':
  1266. python = os.path.join(venv, 'Scripts', 'python.exe')
  1267. # check for bin directory which is used in buildouts
  1268. if not os.path.exists(python):
  1269. python = os.path.join(venv, 'bin', 'python.exe')
  1270. else:
  1271. python = os.path.join(venv, 'bin', 'python')
  1272. if not os.path.exists(python):
  1273. python = venv
  1274. if not os.path.exists(python):
  1275. raise BadCommand('Cannot find virtual environment interpreter at %s' % python)
  1276. base = os.path.dirname(os.path.dirname(python))
  1277. file = __file__
  1278. if file.endswith('.pyc'):
  1279. file = file[:-1]
  1280. proc = subprocess.Popen(
  1281. [python, file] + args + [base, '___VENV_RESTART___'])
  1282. proc.wait()
  1283. sys.exit(proc.returncode)
  1284. class PackageFinder(object):
  1285. """This finds packages.
  1286. This is meant to match easy_install's technique for looking for
  1287. packages, by reading pages and looking for appropriate links
  1288. """
  1289. failure_limit = 3
  1290. def __init__(self, find_links, index_urls):
  1291. self.find_links = find_links
  1292. self.index_urls = index_urls
  1293. self.dependency_links = []
  1294. self.cache = PageCache()
  1295. # These are boring links that have already been logged somehow:
  1296. self.logged_links = set()
  1297. def add_dependency_links(self, links):
  1298. ## FIXME: this shouldn't be global list this, it should only
  1299. ## apply to requirements of the package that specifies the
  1300. ## dependency_links value
  1301. ## FIXME: also, we should track comes_from (i.e., use Link)
  1302. self.dependency_links.extend(links)
  1303. def find_requirement(self, req, upgrade):
  1304. url_name = req.url_name
  1305. # Only check main index if index URL is given:
  1306. main_index_url = None
  1307. if self.index_urls:
  1308. # Check that we have the url_name correctly spelled:
  1309. main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
  1310. # This will also cache the page, so it's okay that we get it again later:
  1311. page = self._get_page(main_index_url, req)
  1312. if page is None:
  1313. url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or req.url_name
  1314. def mkurl_pypi_url(url):
  1315. loc = posixpath.join(url, url_name)
  1316. # For maximum compatibility with easy_install, ensure the path
  1317. # ends in a trailing slash. Although this isn't in the spec
  1318. # (and PyPI can handle it without the slash) some other index
  1319. # implementations might break if they relied on easy_install's behavior.
  1320. if not loc.endswith('/'):
  1321. loc = loc + '/'
  1322. return loc
  1323. if url_name is not None:
  1324. locations = [
  1325. mkurl_pypi_url(url)
  1326. for url in self.index_urls] + self.find_links
  1327. else:
  1328. locations = list(self.find_links)
  1329. locations.extend(self.dependency_links)
  1330. for version in req.absolute_versions:
  1331. if url_name is not None and main_index_url is not None:
  1332. locations = [
  1333. posixpath.join(main_index_url.url, version)] + locations
  1334. file_locations = []
  1335. url_locations = []
  1336. for url in locations:
  1337. if url.startswith('file:'):
  1338. fn = url_to_filename(url)
  1339. if os.path.isdir(fn):
  1340. path = os.path.realpath(fn)
  1341. for item in os.listdir(path):
  1342. file_locations.append(
  1343. filename_to_url2(os.path.join(path, item)))
  1344. elif os.path.isfile(fn):
  1345. file_locations.append(filename_to_url2(fn))
  1346. else:
  1347. url_locations.append(url)
  1348. locations = [Link(url) for url in url_locations]
  1349. logger.debug('URLs to search for versions for %s:' % req)
  1350. for location in locations:
  1351. logger.debug('* %s' % location)
  1352. found_versions = []
  1353. found_versions.extend(
  1354. self._package_versions(
  1355. [Link(url, '-f') for url in self.find_links], req.name.lower()))
  1356. page_versions = []
  1357. for page in self._get_pages(locations, req):
  1358. logger.debug('Analyzing links from page %s' % page.url)
  1359. logger.indent += 2
  1360. try:
  1361. page_versions.extend(self._package_versions(page.links, req.name.lower()))
  1362. finally:
  1363. logger.indent -= 2
  1364. dependency_versions = list(self._package_versions(
  1365. [Link(url) for url in self.dependency_links], req.name.lower()))
  1366. if dependency_versions:
  1367. logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
  1368. file_versions = list(self._package_versions(
  1369. [Link(url) for url in file_locations], req.name.lower()))
  1370. if not found_versions and not page_versions and not dependency_versions and not file_versions:
  1371. logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
  1372. raise DistributionNotFound('No distributions at all found for %s' % req)
  1373. if req.satisfied_by is not None:
  1374. found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version))
  1375. if file_versions:
  1376. file_versions.sort(reverse=True)
  1377. logger.info('Local files found: %s' % ', '.join([url_to_filename(link.url) for parsed, link, version in file_versions]))
  1378. found_versions = file_versions + found_versions
  1379. all_versions = found_versions + page_versions + dependency_versions
  1380. applicable_versions = []
  1381. for (parsed_version, link, version) in all_versions:
  1382. if version not in req.req:
  1383. logger.info("Ignoring link %s, version %s doesn't match %s"
  1384. % (link, version, ','.join([''.join(s) for s in req.req.specs])))
  1385. continue
  1386. applicable_versions.append((link, version))
  1387. applicable_versions = sorted(applicable_versions, key=operator.itemgetter(1),
  1388. cmp=lambda x, y : cmp(pkg_resources.parse_version(y), pkg_resources.parse_version(x))
  1389. )
  1390. existing_applicable = bool([link for link, version in applicable_versions if link is Inf])
  1391. if not upgrade and existing_applicable:
  1392. if applicable_versions[0][1] is Inf:
  1393. logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
  1394. % req.satisfied_by.version)
  1395. else:
  1396. logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
  1397. % (req.satisfied_by.version, applicable_versions[0][1]))
  1398. return None
  1399. if not applicable_versions:
  1400. logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
  1401. % (req, ', '.join([version for parsed_version, link, version in found_versions])))
  1402. raise DistributionNotFound('No distributions matching the version for %s' % req)
  1403. if applicable_versions[0][0] is Inf:
  1404. # We have an existing version, and its the best version
  1405. logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
  1406. % (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none'))
  1407. return None
  1408. if len(applicable_versions) > 1:
  1409. logger.info('Using version %s (newest of versions: %s)' %
  1410. (applicable_versions[0][1], ', '.join([version for link, version in applicable_versions])))
  1411. return applicable_versions[0][0]
  1412. def _find_url_name(self, index_url, url_name, req):
  1413. """Finds the true URL name of a package, when the given name isn't quite correct.
  1414. This is usually used to implement case-insensitivity."""
  1415. if not index_url.url.endswith('/'):
  1416. # Vaguely part of the PyPI API... weird but true.
  1417. ## FIXME: bad to modify this?
  1418. index_url.url += '/'
  1419. page = self._get_page(index_url, req)
  1420. if page is None:
  1421. logger.fatal('Cannot fetch index base URL %s' % index_url)
  1422. return
  1423. norm_name = normalize_name(req.url_name)
  1424. for link in page.links:
  1425. base = posixpath.basename(link.path.rstrip('/'))
  1426. if norm_name == normalize_name(base):
  1427. logger.notify('Real name of requirement %s is %s' % (url_name, base))
  1428. return base
  1429. return None
  1430. def _get_pages(self, locations, req):
  1431. """Yields (page, page_url) from the given locations, skipping
  1432. locations that have errors, and adding download/homepage links"""
  1433. pending_queue = Queue()
  1434. for location in locations:
  1435. pending_queue.put(location)
  1436. done = []
  1437. seen = set()
  1438. threads = []
  1439. for i in range(min(10, len(locations))):
  1440. t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen))
  1441. t.setDaemon(True)
  1442. threads.append(t)
  1443. t.start()
  1444. for t in threads:
  1445. t.join()
  1446. return done
  1447. _log_lock = threading.Lock()
  1448. def _get_queued_page(self, req, pending_queue, done, seen):
  1449. while 1:
  1450. try:
  1451. location = pending_queue.get(False)
  1452. except QueueEmpty:
  1453. return
  1454. if location in seen:
  1455. continue
  1456. seen.add(location)
  1457. page = self._get_page(location, req)
  1458. if page is None:
  1459. continue
  1460. done.append(page)
  1461. for link in page.rel_links():
  1462. pending_queue.put(link)
  1463. _egg_fragment_re = re.compile(r'#egg=([^&]*)')
  1464. _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
  1465. _py_version_re = re.compile(r'-py([123]\.[0-9])$')
  1466. def _sort_links(self, links):
  1467. "Brings links in order, non-egg links first, egg links second"
  1468. eggs, no_eggs = [], []
  1469. for link in links:
  1470. if link.egg_fragment:
  1471. eggs.append(link)
  1472. else:
  1473. no_eggs.append(link)
  1474. return no_eggs + eggs
  1475. def _package_versions(self, links, search_name):
  1476. seen_links = {}
  1477. for link in self._sort_links(links):
  1478. if link.url in seen_links:
  1479. continue
  1480. seen_links[link.url] = None
  1481. if link.egg_fragment:
  1482. egg_info = link.egg_fragment
  1483. else:
  1484. path = link.path
  1485. egg_info, ext = link.splitext()
  1486. if not ext:
  1487. if link not in self.logged_links:
  1488. logger.debug('Skipping link %s; not a file' % link)
  1489. self.logged_links.add(link)
  1490. continue
  1491. if egg_info.endswith('.tar'):
  1492. # Special double-extension case:
  1493. egg_info = egg_info[:-4]
  1494. ext = '.tar' + ext
  1495. if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'):
  1496. if link not in self.logged_links:
  1497. logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
  1498. self.logged_links.add(link)
  1499. continue
  1500. version = self._egg_info_matches(egg_info, search_name, link)
  1501. if version is None:
  1502. logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
  1503. continue
  1504. match = self._py_version_re.search(version)
  1505. if match:
  1506. version = version[:match.start()]
  1507. py_version = match.group(1)
  1508. if py_version != sys.version[:3]:
  1509. logger.debug('Skipping %s because Python version is incorrect' % link)
  1510. continue
  1511. logger.debug('Found link %s, version: %s' % (link, version))
  1512. yield (pkg_resources.parse_version(version),
  1513. link,
  1514. version)
  1515. def _egg_info_matches(self, egg_info, search_name, link):
  1516. match = self._egg_info_re.search(egg_info)
  1517. if not match:
  1518. logger.debug('Could not parse version from link: %s' % link)
  1519. return None
  1520. name = match.group(0).lower()
  1521. # To match the "safe" name that pkg_resources creates:
  1522. name = name.replace('_', '-')
  1523. if name.startswith(search_name.lower()):
  1524. return match.group(0)[len(search_name):].lstrip('-')
  1525. else:
  1526. return None
  1527. def _get_page(self, link, req):
  1528. return HTMLPage.get_page(link, req, cache=self.cache)
  1529. class InstallRequirement(object):
  1530. def __init__(self, req, comes_from, source_dir=None, editable=False,
  1531. url=None, update=True):
  1532. if isinstance(req, basestring):
  1533. req = pkg_resources.Requirement.parse(req)
  1534. self.req = req
  1535. self.comes_from = comes_from
  1536. self.source_dir = source_dir
  1537. self.editable = editable
  1538. self.url = url
  1539. self._egg_info_path = None
  1540. # This holds the pkg_resources.Distribution object if this requirement
  1541. # is already available:
  1542. self.satisfied_by = None
  1543. # This hold the pkg_resources.Distribution object if this requirement
  1544. # conflicts with another installed distribution:
  1545. self.conflicts_with = None
  1546. self._temp_build_dir = None
  1547. self._is_bundle = None
  1548. # True if the editable should be updated:
  1549. self.update = update
  1550. # Set to True after successful installation
  1551. self.install_succeeded = None
  1552. # UninstallPathSet of uninstalled distribution (for possible rollback)
  1553. self.uninstalled = None
  1554. @classmethod
  1555. def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
  1556. name, url = parse_editable(editable_req, default_vcs)
  1557. if url.startswith('file:'):
  1558. source_dir = url_to_filename(url)
  1559. else:
  1560. source_dir = None
  1561. return cls(name, comes_from, source_dir=source_dir, editable=True, url=url)
  1562. @classmethod
  1563. def from_line(cls, name, comes_from=None):
  1564. """Creates an InstallRequirement from a name, which might be a
  1565. requirement, filename, or URL.
  1566. """
  1567. url = None
  1568. name = name.strip()
  1569. req = name
  1570. if is_url(name):
  1571. url = name
  1572. ## FIXME: I think getting the requirement here is a bad idea:
  1573. #req = get_requirement_from_url(url)
  1574. req = None
  1575. elif is_filename(name):
  1576. if not os.path.exists(name):
  1577. logger.warn('Requirement %r looks like a filename, but the file does not exist'
  1578. % name)
  1579. url = filename_to_url(name)
  1580. #req = get_requirement_from_url(url)
  1581. req = None
  1582. return cls(req, comes_from, url=url)
  1583. def __str__(self):
  1584. if self.req:
  1585. s = str(self.req)
  1586. if self.url:
  1587. s += ' from %s' % self.url
  1588. else:
  1589. s = self.url
  1590. if self.satisfied_by is not None:
  1591. s += ' in %s' % display_path(self.satisfied_by.location)
  1592. if self.comes_from:
  1593. if isinstance(self.comes_from, basestring):
  1594. comes_from = self.comes_from
  1595. else:
  1596. comes_from = self.comes_from.from_path()
  1597. if comes_from:
  1598. s += ' (from %s)' % comes_from
  1599. return s
  1600. def from_path(self):
  1601. if self.req is None:
  1602. return None
  1603. s = str(self.req)
  1604. if self.comes_from:
  1605. if isinstance(self.comes_from, basestring):
  1606. comes_from = self.comes_from
  1607. else:
  1608. comes_from = self.comes_from.from_path()
  1609. if comes_from:
  1610. s += '->' + comes_from
  1611. return s
  1612. def build_location(self, build_dir, unpack=True):
  1613. if self._temp_build_dir is not None:
  1614. return self._temp_build_dir
  1615. if self.req is None:
  1616. self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
  1617. self._ideal_build_dir = build_dir
  1618. return self._temp_build_dir
  1619. if self.editable:
  1620. name = self.name.lower()
  1621. else:
  1622. name = self.name
  1623. # FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
  1624. if not os.path.exists(build_dir):
  1625. os.makedirs(build_dir)
  1626. return os.path.join(build_dir, name)
  1627. def correct_build_location(self):
  1628. """If the build location was a temporary directory, this will move it
  1629. to a new more permanent location"""
  1630. if self.source_dir is not None:
  1631. return
  1632. assert self.req is not None
  1633. assert self._temp_build_dir
  1634. old_location = self._temp_build_dir
  1635. new_build_dir = self._ideal_build_dir
  1636. del self._ideal_build_dir
  1637. if self.editable:
  1638. name = self.name.lower()
  1639. else:
  1640. name = self.name
  1641. new_location = os.path.join(new_build_dir, name)
  1642. if not os.path.exists(new_build_dir):
  1643. logger.debug('Creating directory %s' % new_build_dir)
  1644. os.makedirs(new_build_dir)
  1645. if os.path.exists(new_location):
  1646. raise InstallationError(
  1647. 'A package already exists in %s; please remove it to continue'
  1648. % display_path(new_location))
  1649. logger.debug('Moving package %s from %s to new location %s'
  1650. % (self, display_path(old_location), display_path(new_location)))
  1651. shutil.move(old_location, new_location)
  1652. self._temp_build_dir = new_location
  1653. self.source_dir = new_location
  1654. self._egg_info_path = None
  1655. @property
  1656. def name(self):
  1657. if self.req is None:
  1658. return None
  1659. return self.req.project_name
  1660. @property
  1661. def url_name(self):
  1662. if self.req is None:
  1663. return None
  1664. return urllib.quote(self.req.unsafe_name)
  1665. @property
  1666. def setup_py(self):
  1667. return os.path.join(self.source_dir, 'setup.py')
  1668. def run_egg_info(self, force_root_egg_info=False):
  1669. assert self.source_dir
  1670. if self.name:
  1671. logger.notify('Running setup.py egg_info for package %s' % self.name)
  1672. else:
  1673. logger.notify('Running setup.py egg_info for package from %s' % self.url)
  1674. logger.indent += 2
  1675. try:
  1676. script = self._run_setup_py
  1677. script = script.replace('__SETUP_PY__', repr(self.setup_py))
  1678. script = script.replace('__PKG_NAME__', repr(self.name))
  1679. # We can't put the .egg-info files at the root, because then the source code will be mistaken
  1680. # for an installed egg, causing problems
  1681. if self.editable or force_root_egg_info:
  1682. egg_base_option = []
  1683. else:
  1684. egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
  1685. if not os.path.exists(egg_info_dir):
  1686. os.makedirs(egg_info_dir)
  1687. egg_base_option = ['--egg-base', 'pip-egg-info']
  1688. call_subprocess(
  1689. [sys.executable, '-c', script, 'egg_info'] + egg_base_option,
  1690. cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
  1691. command_level=Logger.VERBOSE_DEBUG,
  1692. command_desc='python setup.py egg_info')
  1693. finally:
  1694. logger.indent -= 2
  1695. if not self.req:
  1696. self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name'])
  1697. self.correct_build_location()
  1698. ## FIXME: This is a lame hack, entirely for PasteScript which has
  1699. ## a self-provided entry point that causes this awkwardness
  1700. _run_setup_py = """
  1701. __file__ = __SETUP_PY__
  1702. from setuptools.command import egg_info
  1703. def replacement_run(self):
  1704. self.mkpath(self.egg_info)
  1705. installer = self.distribution.fetch_build_egg
  1706. for ep in egg_info.iter_entry_points('egg_info.writers'):
  1707. # require=False is the change we're making:
  1708. writer = ep.load(require=False)
  1709. if writer:
  1710. writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
  1711. self.find_sources()
  1712. egg_info.egg_info.run = replacement_run
  1713. execfile(__file__)
  1714. """
  1715. def egg_info_data(self, filename):
  1716. if self.satisfied_by is not None:
  1717. if not self.satisfied_by.has_metadata(filename):
  1718. return None
  1719. return self.satisfied_by.get_metadata(filename)
  1720. assert self.source_dir
  1721. filename = self.egg_info_path(filename)
  1722. if not os.path.exists(filename):
  1723. return None
  1724. fp = open(filename, 'r')
  1725. data = fp.read()
  1726. fp.close()
  1727. return data
  1728. def egg_info_path(self, filename):
  1729. if self._egg_info_path is None:
  1730. if self.editable:
  1731. base = self.source_dir
  1732. else:
  1733. base = os.path.join(self.source_dir, 'pip-egg-info')
  1734. filenames = os.listdir(base)
  1735. if self.editable:
  1736. filenames = []
  1737. for root, dirs, files in os.walk(base):
  1738. for dir in vcs.dirnames:
  1739. if dir in dirs:
  1740. dirs.remove(dir)
  1741. filenames.extend([os.path.join(root, dir)
  1742. for dir in dirs])
  1743. filenames = [f for f in filenames if f.endswith('.egg-info')]
  1744. assert filenames, "No files/directories in %s (from %s)" % (base, filename)
  1745. assert len(filenames) == 1, "Unexpected files/directories in %s: %s" % (base, ' '.join(filenames))
  1746. self._egg_info_path = os.path.join(base, filenames[0])
  1747. return os.path.join(self._egg_info_path, filename)
  1748. def egg_info_lines(self, filename):
  1749. data = self.egg_info_data(filename)
  1750. if not data:
  1751. return []
  1752. result = []
  1753. for line in data.splitlines():
  1754. line = line.strip()
  1755. if not line or line.startswith('#'):
  1756. continue
  1757. result.append(line)
  1758. return result
  1759. def pkg_info(self):
  1760. p = FeedParser()
  1761. data = self.egg_info_data('PKG-INFO')
  1762. if not data:
  1763. logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
  1764. p.feed(data or '')
  1765. return p.close()
  1766. @property
  1767. def dependency_links(self):
  1768. return self.egg_info_lines('dependency_links.txt')
  1769. _requirements_section_re = re.compile(r'\[(.*?)\]')
  1770. def requirements(self, extras=()):
  1771. in_extra = None
  1772. for line in self.egg_info_lines('requires.txt'):
  1773. match = self._requirements_section_re.match(line)
  1774. if match:
  1775. in_extra = match.group(1)
  1776. continue
  1777. if in_extra and in_extra not in extras:
  1778. # Skip requirement for an extra we aren't requiring
  1779. continue
  1780. yield line
  1781. @property
  1782. def absolute_versions(self):
  1783. for qualifier, version in self.req.specs:
  1784. if qualifier == '==':
  1785. yield version
  1786. @property
  1787. def installed_version(self):
  1788. return self.pkg_info()['version']
  1789. def assert_source_matches_version(self):
  1790. assert self.source_dir
  1791. if self.comes_from is None:
  1792. # We don't check the versions of things explicitly installed.
  1793. # This makes, e.g., "pip Package==dev" possible
  1794. return
  1795. version = self.installed_version
  1796. if version not in self.req:
  1797. logger.fatal(
  1798. 'Source in %s has the version %s, which does not match the requirement %s'
  1799. % (display_path(self.source_dir), version, self))
  1800. raise InstallationError(
  1801. 'Source in %s has version %s that conflicts with %s'
  1802. % (display_path(self.source_dir), version, self))
  1803. else:
  1804. logger.debug('Source in %s has version %s, which satisfies requirement %s'
  1805. % (display_path(self.source_dir), version, self))
  1806. def update_editable(self, obtain=True):
  1807. if not self.url:
  1808. logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
  1809. return
  1810. assert self.editable
  1811. assert self.source_dir
  1812. if self.url.startswith('file:'):
  1813. # Static paths don't get updated
  1814. return
  1815. assert '+' in self.url, "bad url: %r" % self.url
  1816. if not self.update:
  1817. return
  1818. vc_type, url = self.url.split('+', 1)
  1819. backend = vcs.get_backend(vc_type)
  1820. if backend:
  1821. vcs_backend = backend(self.url)
  1822. if obtain:
  1823. vcs_backend.obtain(self.source_dir)
  1824. else:
  1825. vcs_backend.export(self.source_dir)
  1826. else:
  1827. assert 0, (
  1828. 'Unexpected version control type (in %s): %s'
  1829. % (self.url, vc_type))
  1830. def uninstall(self, auto_confirm=False):
  1831. """
  1832. Uninstall the distribution currently satisfying this requirement.
  1833. Prompts before removing or modifying files unless
  1834. ``auto_confirm`` is True.
  1835. Refuses to delete or modify files outside of ``sys.prefix`` -
  1836. thus uninstallation within a virtual environment can only
  1837. modify that virtual environment, even if the virtualenv is
  1838. linked to global site-packages.
  1839. """
  1840. if not self.check_if_exists():
  1841. raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
  1842. dist = self.satisfied_by or self.conflicts_with
  1843. paths_to_remove = UninstallPathSet(dist, sys.prefix)
  1844. pip_egg_info_path = os.path.join(dist.location,
  1845. dist.egg_name()) + '.egg-info'
  1846. easy_install_egg = dist.egg_name() + '.egg'
  1847. # This won't find a globally-installed develop egg if
  1848. # we're in a virtualenv.
  1849. # (There doesn't seem to be any metadata in the
  1850. # Distribution object for a develop egg that points back
  1851. # to its .egg-link and easy-install.pth files). That's
  1852. # OK, because we restrict ourselves to making changes
  1853. # within sys.prefix anyway.
  1854. develop_egg_link = os.path.join(site_packages,
  1855. dist.project_name) + '.egg-link'
  1856. if os.path.exists(pip_egg_info_path):
  1857. # package installed by pip
  1858. paths_to_remove.add(pip_egg_info_path)
  1859. if dist.has_metadata('installed-files.txt'):
  1860. for installed_file in dist.get_metadata('installed-files.txt').splitlines():
  1861. path = os.path.normpath(os.path.join(pip_egg_info_path, installed_file))
  1862. if os.path.exists(path):
  1863. paths_to_remove.add(path)
  1864. if dist.has_metadata('top_level.txt'):
  1865. for top_level_pkg in [p for p
  1866. in dist.get_metadata('top_level.txt').splitlines()
  1867. if p]:
  1868. path = os.path.join(dist.location, top_level_pkg)
  1869. if os.path.exists(path):
  1870. paths_to_remove.add(path)
  1871. elif os.path.exists(path + '.py'):
  1872. paths_to_remove.add(path + '.py')
  1873. if os.path.exists(path + '.pyc'):
  1874. paths_to_remove.add(path + '.pyc')
  1875. elif dist.location.endswith(easy_install_egg):
  1876. # package installed by easy_install
  1877. paths_to_remove.add(dist.location)
  1878. easy_install_pth = os.path.join(os.path.dirname(dist.location),
  1879. 'easy-install.pth')
  1880. paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
  1881. elif os.path.isfile(develop_egg_link):
  1882. # develop egg
  1883. fh = open(develop_egg_link, 'r')
  1884. link_pointer = os.path.normcase(fh.readline().strip())
  1885. fh.close()
  1886. assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
  1887. paths_to_remove.add(develop_egg_link)
  1888. easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
  1889. 'easy-install.pth')
  1890. paths_to_remove.add_pth(easy_install_pth, dist.location)
  1891. # fix location (so we can uninstall links to sources outside venv)
  1892. paths_to_remove.location = develop_egg_link
  1893. # find distutils scripts= scripts
  1894. if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
  1895. for script in dist.metadata_listdir('scripts'):
  1896. paths_to_remove.add(os.path.join(bin_py, script))
  1897. if sys.platform == 'win32':
  1898. paths_to_remove.add(os.path.join(bin_py, script) + '.bat')
  1899. # find console_scripts
  1900. if dist.has_metadata('entry_points.txt'):
  1901. config = ConfigParser.SafeConfigParser()
  1902. config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
  1903. for name, value in config.items('console_scripts'):
  1904. paths_to_remove.add(os.path.join(bin_py, name))
  1905. if sys.platform == 'win32':
  1906. paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
  1907. paths_to_remove.add(os.path.join(bin_py, name) + '-script.py')
  1908. paths_to_remove.remove(auto_confirm)
  1909. self.uninstalled = paths_to_remove
  1910. def rollback_uninstall(self):
  1911. if self.uninstalled:
  1912. self.uninstalled.rollback()
  1913. else:
  1914. logger.error("Can't rollback %s, nothing uninstalled."
  1915. % (self.project_name,))
  1916. def archive(self, build_dir):
  1917. assert self.source_dir
  1918. create_archive = True
  1919. archive_name = '%s-%s.zip' % (self.name, self.installed_version)
  1920. archive_path = os.path.join(build_dir, archive_name)
  1921. if os.path.exists(archive_path):
  1922. response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
  1923. % display_path(archive_path), ('i', 'w', 'b'))
  1924. if response == 'i':
  1925. create_archive = False
  1926. elif response == 'w':
  1927. logger.warn('Deleting %s' % display_path(archive_path))
  1928. os.remove(archive_path)
  1929. elif response == 'b':
  1930. dest_file = backup_dir(archive_path)
  1931. logger.warn('Backing up %s to %s'
  1932. % (display_path(archive_path), display_path(dest_file)))
  1933. shutil.move(archive_path, dest_file)
  1934. if create_archive:
  1935. zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
  1936. dir = os.path.normcase(os.path.abspath(self.source_dir))
  1937. for dirpath, dirnames, filenames in os.walk(dir):
  1938. if 'pip-egg-info' in dirnames:
  1939. dirnames.remove('pip-egg-info')
  1940. for dirname in dirnames:
  1941. dirname = os.path.join(dirpath, dirname)
  1942. name = self._clean_zip_name(dirname, dir)
  1943. zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
  1944. zipdir.external_attr = 0755 << 16L
  1945. zip.writestr(zipdir, '')
  1946. for filename in filenames:
  1947. if filename == 'pip-delete-this-directory.txt':
  1948. continue
  1949. filename = os.path.join(dirpath, filename)
  1950. name = self._clean_zip_name(filename, dir)
  1951. zip.write(filename, self.name + '/' + name)
  1952. zip.close()
  1953. logger.indent -= 2
  1954. logger.notify('Saved %s' % display_path(archive_path))
  1955. def _clean_zip_name(self, name, prefix):
  1956. assert name.startswith(prefix+'/'), (
  1957. "name %r doesn't start with prefix %r" % (name, prefix))
  1958. name = name[len(prefix)+1:]
  1959. name = name.replace(os.path.sep, '/')
  1960. return name
  1961. def install(self, install_options):
  1962. if self.editable:
  1963. self.install_editable()
  1964. return
  1965. temp_location = tempfile.mkdtemp('-record', 'pip-')
  1966. record_filename = os.path.join(temp_location, 'install-record.txt')
  1967. ## FIXME: I'm not sure if this is a reasonable location; probably not
  1968. ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
  1969. header_dir = os.path.join(os.path.dirname(os.path.dirname(self.source_dir)), 'lib', 'include')
  1970. logger.notify('Running setup.py install for %s' % self.name)
  1971. logger.indent += 2
  1972. try:
  1973. call_subprocess(
  1974. [sys.executable, '-c',
  1975. "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
  1976. 'install', '--single-version-externally-managed', '--record', record_filename,
  1977. '--install-headers', header_dir] + install_options,
  1978. cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
  1979. finally:
  1980. logger.indent -= 2
  1981. self.install_succeeded = True
  1982. f = open(record_filename)
  1983. for line in f:
  1984. line = line.strip()
  1985. if line.endswith('.egg-info'):
  1986. egg_info_dir = line
  1987. break
  1988. else:
  1989. logger.warn('Could not find .egg-info directory in install record for %s' % self)
  1990. ## FIXME: put the record somewhere
  1991. return
  1992. f.close()
  1993. new_lines = []
  1994. f = open(record_filename)
  1995. for line in f:
  1996. filename = line.strip()
  1997. if os.path.isdir(filename):
  1998. filename += os.path.sep
  1999. new_lines.append(make_path_relative(filename, egg_info_dir))
  2000. f.close()
  2001. f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
  2002. f.write('\n'.join(new_lines)+'\n')
  2003. f.close()
  2004. def remove_temporary_source(self):
  2005. """Remove the source files from this requirement, if they are marked
  2006. for deletion"""
  2007. if self.is_bundle or os.path.exists(self.delete_marker_filename):
  2008. logger.info('Removing source in %s' % self.source_dir)
  2009. if self.source_dir:
  2010. shutil.rmtree(self.source_dir, ignore_errors=True, onerror=rmtree_errorhandler)
  2011. self.source_dir = None
  2012. if self._temp_build_dir and os.path.exists(self._temp_build_dir):
  2013. shutil.rmtree(self._temp_build_dir, ignore_errors=True, onerror=rmtree_errorhandler)
  2014. self._temp_build_dir = None
  2015. def install_editable(self):
  2016. logger.notify('Running setup.py develop for %s' % self.name)
  2017. logger.indent += 2
  2018. try:
  2019. ## FIXME: should we do --install-headers here too?
  2020. call_subprocess(
  2021. [sys.executable, '-c',
  2022. "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py),
  2023. 'develop', '--no-deps'], cwd=self.source_dir, filter_stdout=self._filter_install,
  2024. show_stdout=False)
  2025. finally:
  2026. logger.indent -= 2
  2027. self.install_succeeded = True
  2028. def _filter_install(self, line):
  2029. level = Logger.NOTIFY
  2030. for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
  2031. r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
  2032. r'^byte-compiling ',
  2033. # Not sure what this warning is, but it seems harmless:
  2034. r"^warning: manifest_maker: standard file '-c' not found$"]:
  2035. if re.search(regex, line.strip()):
  2036. level = Logger.INFO
  2037. break
  2038. return (level, line)
  2039. def check_if_exists(self):
  2040. """Find an installed distribution that satisfies or conflicts
  2041. with this requirement, and set self.satisfied_by or
  2042. self.conflicts_with appropriately."""
  2043. if self.req is None:
  2044. return False
  2045. try:
  2046. self.satisfied_by = pkg_resources.get_distribution(self.req)
  2047. except pkg_resources.DistributionNotFound:
  2048. return False
  2049. except pkg_resources.VersionConflict:
  2050. self.conflicts_with = pkg_resources.get_distribution(self.req.project_name)
  2051. return True
  2052. @property
  2053. def is_bundle(self):
  2054. if self._is_bundle is not None:
  2055. return self._is_bundle
  2056. base = self._temp_build_dir
  2057. if not base:
  2058. ## FIXME: this doesn't seem right:
  2059. return False
  2060. self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
  2061. or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
  2062. return self._is_bundle
  2063. def bundle_requirements(self):
  2064. for dest_dir in self._bundle_editable_dirs:
  2065. package = os.path.basename(dest_dir)
  2066. ## FIXME: svnism:
  2067. for vcs_backend in vcs.backends:
  2068. url = rev = None
  2069. vcs_bundle_file = os.path.join(
  2070. dest_dir, vcs_backend.bundle_file)
  2071. if os.path.exists(vcs_bundle_file):
  2072. vc_type = vcs_backend.name
  2073. fp = open(vcs_bundle_file)
  2074. content = fp.read()
  2075. fp.close()
  2076. url, rev = vcs_backend().parse_vcs_bundle_file(content)
  2077. break
  2078. if url:
  2079. url = '%s+%s@%s' % (vc_type, url, rev)
  2080. else:
  2081. url = None
  2082. yield InstallRequirement(
  2083. package, self, editable=True, url=url,
  2084. update=False, source_dir=dest_dir)
  2085. for dest_dir in self._bundle_build_dirs:
  2086. package = os.path.basename(dest_dir)
  2087. yield InstallRequirement(
  2088. package, self,
  2089. source_dir=dest_dir)
  2090. def move_bundle_files(self, dest_build_dir, dest_src_dir):
  2091. base = self._temp_build_dir
  2092. assert base
  2093. src_dir = os.path.join(base, 'src')
  2094. build_dir = os.path.join(base, 'build')
  2095. bundle_build_dirs = []
  2096. bundle_editable_dirs = []
  2097. for source_dir, dest_dir, dir_collection in [
  2098. (src_dir, dest_src_dir, bundle_editable_dirs),
  2099. (build_dir, dest_build_dir, bundle_build_dirs)]:
  2100. if os.path.exists(source_dir):
  2101. for dirname in os.listdir(source_dir):
  2102. dest = os.path.join(dest_dir, dirname)
  2103. dir_collection.append(dest)
  2104. if os.path.exists(dest):
  2105. logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
  2106. % (dest, dirname, self))
  2107. continue
  2108. if not os.path.exists(dest_dir):
  2109. logger.info('Creating directory %s' % dest_dir)
  2110. os.makedirs(dest_dir)
  2111. shutil.move(os.path.join(source_dir, dirname), dest)
  2112. if not os.listdir(source_dir):
  2113. os.rmdir(source_dir)
  2114. self._temp_build_dir = None
  2115. self._bundle_build_dirs = bundle_build_dirs
  2116. self._bundle_editable_dirs = bundle_editable_dirs
  2117. @property
  2118. def delete_marker_filename(self):
  2119. assert self.source_dir
  2120. return os.path.join(self.source_dir, 'pip-delete-this-directory.txt')
  2121. DELETE_MARKER_MESSAGE = '''\
  2122. This file is placed here by pip to indicate the source was put
  2123. here by pip.
  2124. Once this package is successfully installed this source code will be
  2125. deleted (unless you remove this file).
  2126. '''
  2127. class RequirementSet(object):
  2128. def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
  2129. upgrade=False, ignore_installed=False,
  2130. ignore_dependencies=False):
  2131. self.build_dir = build_dir
  2132. self.src_dir = src_dir
  2133. self.download_dir = download_dir
  2134. self.download_cache = download_cache
  2135. self.upgrade = upgrade
  2136. self.ignore_installed = ignore_installed
  2137. self.requirements = {}
  2138. # Mapping of alias: real_name
  2139. self.requirement_aliases = {}
  2140. self.unnamed_requirements = []
  2141. self.ignore_dependencies = ignore_dependencies
  2142. self.successfully_downloaded = []
  2143. self.successfully_installed = []
  2144. def __str__(self):
  2145. reqs = [req for req in self.requirements.values()
  2146. if not req.comes_from]
  2147. reqs.sort(key=lambda req: req.name.lower())
  2148. return ' '.join([str(req.req) for req in reqs])
  2149. def add_requirement(self, install_req):
  2150. name = install_req.name
  2151. if not name:
  2152. self.unnamed_requirements.append(install_req)
  2153. else:
  2154. if self.has_requirement(name):
  2155. raise InstallationError(
  2156. 'Double requirement given: %s (aready in %s, name=%r)'
  2157. % (install_req, self.get_requirement(name), name))
  2158. self.requirements[name] = install_req
  2159. ## FIXME: what about other normalizations? E.g., _ vs. -?
  2160. if name.lower() != name:
  2161. self.requirement_aliases[name.lower()] = name
  2162. def has_requirement(self, project_name):
  2163. for name in project_name, project_name.lower():
  2164. if name in self.requirements or name in self.requirement_aliases:
  2165. return True
  2166. return False
  2167. @property
  2168. def is_download(self):
  2169. if self.download_dir:
  2170. self.download_dir = os.path.expanduser(self.download_dir)
  2171. if os.path.exists(self.download_dir):
  2172. return True
  2173. else:
  2174. logger.fatal('Could not find download directory')
  2175. raise InstallationError(
  2176. "Could not find or access download directory '%s'"
  2177. % display_path(self.download_dir))
  2178. return False
  2179. def get_requirement(self, project_name):
  2180. for name in project_name, project_name.lower():
  2181. if name in self.requirements:
  2182. return self.requirements[name]
  2183. if name in self.requirement_aliases:
  2184. return self.requirements[self.requirement_aliases[name]]
  2185. raise KeyError("No project with the name %r" % project_name)
  2186. def uninstall(self, auto_confirm=False):
  2187. for req in self.requirements.values():
  2188. req.uninstall(auto_confirm=auto_confirm)
  2189. def install_files(self, finder, force_root_egg_info=False):
  2190. unnamed = list(self.unnamed_requirements)
  2191. reqs = self.requirements.values()
  2192. while reqs or unnamed:
  2193. if unnamed:
  2194. req_to_install = unnamed.pop(0)
  2195. else:
  2196. req_to_install = reqs.pop(0)
  2197. install = True
  2198. if not self.ignore_installed and not req_to_install.editable:
  2199. req_to_install.check_if_exists()
  2200. if req_to_install.satisfied_by:
  2201. if self.upgrade:
  2202. req_to_install.conflicts_with = req_to_install.satisfied_by
  2203. req_to_install.satisfied_by = None
  2204. else:
  2205. install = False
  2206. if req_to_install.satisfied_by:
  2207. logger.notify('Requirement already satisfied '
  2208. '(use --upgrade to upgrade): %s'
  2209. % req_to_install)
  2210. if req_to_install.editable:
  2211. logger.notify('Obtaining %s' % req_to_install)
  2212. elif install:
  2213. if req_to_install.url and req_to_install.url.lower().startswith('file:'):
  2214. logger.notify('Unpacking %s' % display_path(url_to_filename(req_to_install.url)))
  2215. else:
  2216. logger.notify('Downloading/unpacking %s' % req_to_install)
  2217. logger.indent += 2
  2218. is_bundle = False
  2219. try:
  2220. if req_to_install.editable:
  2221. if req_to_install.source_dir is None:
  2222. location = req_to_install.build_location(self.src_dir)
  2223. req_to_install.source_dir = location
  2224. else:
  2225. location = req_to_install.source_dir
  2226. if not os.path.exists(self.build_dir):
  2227. os.makedirs(self.build_dir)
  2228. req_to_install.update_editable(not self.is_download)
  2229. if self.is_download:
  2230. req_to_install.run_egg_info()
  2231. req_to_install.archive(self.download_dir)
  2232. else:
  2233. req_to_install.run_egg_info()
  2234. elif install:
  2235. location = req_to_install.build_location(self.build_dir, not self.is_download)
  2236. ## FIXME: is the existance of the checkout good enough to use it? I don't think so.
  2237. unpack = True
  2238. if not os.path.exists(os.path.join(location, 'setup.py')):
  2239. ## FIXME: this won't upgrade when there's an existing package unpacked in `location`
  2240. if req_to_install.url is None:
  2241. url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
  2242. else:
  2243. ## FIXME: should req_to_install.url already be a link?
  2244. url = Link(req_to_install.url)
  2245. assert url
  2246. if url:
  2247. try:
  2248. self.unpack_url(url, location, self.is_download)
  2249. except urllib2.HTTPError, e:
  2250. logger.fatal('Could not install requirement %s because of error %s'
  2251. % (req_to_install, e))
  2252. raise InstallationError(
  2253. 'Could not install requirement %s because of HTTP error %s for URL %s'
  2254. % (req_to_install, e, url))
  2255. else:
  2256. unpack = False
  2257. if unpack:
  2258. is_bundle = req_to_install.is_bundle
  2259. url = None
  2260. if is_bundle:
  2261. req_to_install.move_bundle_files(self.build_dir, self.src_dir)
  2262. for subreq in req_to_install.bundle_requirements():
  2263. reqs.append(subreq)
  2264. self.add_requirement(subreq)
  2265. elif self.is_download:
  2266. req_to_install.source_dir = location
  2267. if url and url.scheme in vcs.all_schemes:
  2268. req_to_install.run_egg_info()
  2269. req_to_install.archive(self.download_dir)
  2270. else:
  2271. req_to_install.source_dir = location
  2272. req_to_install.run_egg_info()
  2273. if force_root_egg_info:
  2274. # We need to run this to make sure that the .egg-info/
  2275. # directory is created for packing in the bundle
  2276. req_to_install.run_egg_info(force_root_egg_info=True)
  2277. req_to_install.assert_source_matches_version()
  2278. f = open(req_to_install.delete_marker_filename, 'w')
  2279. f.write(DELETE_MARKER_MESSAGE)
  2280. f.close()
  2281. if not is_bundle and not self.is_download:
  2282. ## FIXME: shouldn't be globally added:
  2283. finder.add_dependency_links(req_to_install.dependency_links)
  2284. ## FIXME: add extras in here:
  2285. if not self.ignore_dependencies:
  2286. for req in req_to_install.requirements():
  2287. try:
  2288. name = pkg_resources.Requirement.parse(req).project_name
  2289. except ValueError, e:
  2290. ## FIXME: proper warning
  2291. logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
  2292. continue
  2293. if self.has_requirement(name):
  2294. ## FIXME: check for conflict
  2295. continue
  2296. subreq = InstallRequirement(req, req_to_install)
  2297. reqs.append(subreq)
  2298. self.add_requirement(subreq)
  2299. if req_to_install.name not in self.requirements:
  2300. self.requirements[req_to_install.name] = req_to_install
  2301. else:
  2302. req_to_install.remove_temporary_source()
  2303. if install:
  2304. self.successfully_downloaded.append(req_to_install)
  2305. finally:
  2306. logger.indent -= 2
  2307. def unpack_url(self, link, location, only_download=False):
  2308. if only_download:
  2309. location = self.download_dir
  2310. for backend in vcs.backends:
  2311. if link.scheme in backend.schemes:
  2312. vcs_backend = backend(link.url)
  2313. if only_download:
  2314. vcs_backend.export(location)
  2315. else:
  2316. vcs_backend.unpack(location)
  2317. return
  2318. dir = tempfile.mkdtemp()
  2319. if link.url.lower().startswith('file:'):
  2320. source = url_to_filename(link.url)
  2321. content_type = mimetypes.guess_type(source)[0]
  2322. self.unpack_file(source, location, content_type, link)
  2323. return
  2324. md5_hash = link.md5_hash
  2325. target_url = link.url.split('#', 1)[0]
  2326. target_file = None
  2327. if self.download_cache:
  2328. if not os.path.isdir(self.download_cache):
  2329. logger.indent -= 2
  2330. logger.notify('Creating supposed download cache at %s' % self.download_cache)
  2331. logger.indent += 2
  2332. os.makedirs(self.download_cache)
  2333. target_file = os.path.join(self.download_cache,
  2334. urllib.quote(target_url, ''))
  2335. if (target_file and os.path.exists(target_file)
  2336. and os.path.exists(target_file+'.content-type')):
  2337. fp = open(target_file+'.content-type')
  2338. content_type = fp.read().strip()
  2339. fp.close()
  2340. if md5_hash:
  2341. download_hash = md5()
  2342. fp = open(target_file, 'rb')
  2343. while 1:
  2344. chunk = fp.read(4096)
  2345. if not chunk:
  2346. break
  2347. download_hash.update(chunk)
  2348. fp.close()
  2349. temp_location = target_file
  2350. logger.notify('Using download cache from %s' % target_file)
  2351. else:
  2352. try:
  2353. resp = urllib2.urlopen(target_url)
  2354. except urllib2.HTTPError, e:
  2355. logger.fatal("HTTP error %s while getting %s" % (e.code, link))
  2356. raise
  2357. except IOError, e:
  2358. # Typically an FTP error
  2359. logger.fatal("Error %s while getting %s" % (e, link))
  2360. raise
  2361. content_type = resp.info()['content-type']
  2362. filename = link.filename
  2363. ext = splitext(filename)[1]
  2364. if not ext:
  2365. ext = mimetypes.guess_extension(content_type)
  2366. if ext:
  2367. filename += ext
  2368. if not ext and link.url != resp.geturl():
  2369. ext = os.path.splitext(resp.geturl())[1]
  2370. if ext:
  2371. filename += ext
  2372. temp_location = os.path.join(dir, filename)
  2373. fp = open(temp_location, 'wb')
  2374. if md5_hash:
  2375. download_hash = md5()
  2376. try:
  2377. total_length = int(resp.info()['content-length'])
  2378. except (ValueError, KeyError):
  2379. total_length = 0
  2380. downloaded = 0
  2381. show_progress = total_length > 40*1000 or not total_length
  2382. show_url = link.show_url
  2383. try:
  2384. if show_progress:
  2385. ## FIXME: the URL can get really long in this message:
  2386. if total_length:
  2387. logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
  2388. else:
  2389. logger.start_progress('Downloading %s (unknown size): ' % show_url)
  2390. else:
  2391. logger.notify('Downloading %s' % show_url)
  2392. logger.debug('Downloading from URL %s' % link)
  2393. while 1:
  2394. chunk = resp.read(4096)
  2395. if not chunk:
  2396. break
  2397. downloaded += len(chunk)
  2398. if show_progress:
  2399. if not total_length:
  2400. logger.show_progress('%s' % format_size(downloaded))
  2401. else:
  2402. logger.show_progress('%3i%% %s' % (100*downloaded/total_length, format_size(downloaded)))
  2403. if md5_hash:
  2404. download_hash.update(chunk)
  2405. fp.write(chunk)
  2406. fp.close()
  2407. finally:
  2408. if show_progress:
  2409. logger.end_progress('%s downloaded' % format_size(downloaded))
  2410. if md5_hash:
  2411. download_hash = download_hash.hexdigest()
  2412. if download_hash != md5_hash:
  2413. logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
  2414. % (link, download_hash, md5_hash))
  2415. raise InstallationError('Bad MD5 hash for package %s' % link)
  2416. if only_download:
  2417. self.copy_file(temp_location, location, content_type, link)
  2418. else:
  2419. self.unpack_file(temp_location, location, content_type, link)
  2420. if target_file and target_file != temp_location:
  2421. logger.notify('Storing download in cache at %s' % display_path(target_file))
  2422. shutil.copyfile(temp_location, target_file)
  2423. fp = open(target_file+'.content-type', 'w')
  2424. fp.write(content_type)
  2425. fp.close()
  2426. os.unlink(temp_location)
  2427. if target_file is None:
  2428. os.unlink(temp_location)
  2429. def copy_file(self, filename, location, content_type, link):
  2430. copy = True
  2431. download_location = os.path.join(location, link.filename)
  2432. if os.path.exists(download_location):
  2433. response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
  2434. % display_path(download_location), ('i', 'w', 'b'))
  2435. if response == 'i':
  2436. copy = False
  2437. elif response == 'w':
  2438. logger.warn('Deleting %s' % display_path(download_location))
  2439. os.remove(download_location)
  2440. elif response == 'b':
  2441. dest_file = backup_dir(download_location)
  2442. logger.warn('Backing up %s to %s'
  2443. % (display_path(download_location), display_path(dest_file)))
  2444. shutil.move(download_location, dest_file)
  2445. if copy:
  2446. shutil.copy(filename, download_location)
  2447. logger.indent -= 2
  2448. logger.notify('Saved %s' % display_path(download_location))
  2449. def unpack_file(self, filename, location, content_type, link):
  2450. if (content_type == 'application/zip'
  2451. or filename.endswith('.zip')
  2452. or filename.endswith('.pybundle')
  2453. or zipfile.is_zipfile(filename)):
  2454. self.unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
  2455. elif (content_type == 'application/x-gzip'
  2456. or tarfile.is_tarfile(filename)
  2457. or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
  2458. self.untar_file(filename, location)
  2459. elif (content_type and content_type.startswith('text/html')
  2460. and is_svn_page(file_contents(filename))):
  2461. # We don't really care about this
  2462. Subversion('svn+' + link.url).unpack(location)
  2463. else:
  2464. ## FIXME: handle?
  2465. ## FIXME: magic signatures?
  2466. logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
  2467. % (filename, location, content_type))
  2468. raise InstallationError('Cannot determine archive format of %s' % location)
  2469. def unzip_file(self, filename, location, flatten=True):
  2470. """Unzip the file (zip file located at filename) to the destination
  2471. location"""
  2472. if not os.path.exists(location):
  2473. os.makedirs(location)
  2474. zipfp = open(filename, 'rb')
  2475. try:
  2476. zip = zipfile.ZipFile(zipfp)
  2477. leading = has_leading_dir(zip.namelist()) and flatten
  2478. for name in zip.namelist():
  2479. data = zip.read(name)
  2480. fn = name
  2481. if leading:
  2482. fn = split_leading_dir(name)[1]
  2483. fn = os.path.join(location, fn)
  2484. dir = os.path.dirname(fn)
  2485. if not os.path.exists(dir):
  2486. os.makedirs(dir)
  2487. if fn.endswith('/') or fn.endswith('\\'):
  2488. # A directory
  2489. if not os.path.exists(fn):
  2490. os.makedirs(fn)
  2491. else:
  2492. fp = open(fn, 'wb')
  2493. try:
  2494. fp.write(data)
  2495. finally:
  2496. fp.close()
  2497. finally:
  2498. zipfp.close()
  2499. def untar_file(self, filename, location):
  2500. """Untar the file (tar file located at filename) to the destination location"""
  2501. if not os.path.exists(location):
  2502. os.makedirs(location)
  2503. if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
  2504. mode = 'r:gz'
  2505. elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
  2506. mode = 'r:bz2'
  2507. elif filename.lower().endswith('.tar'):
  2508. mode = 'r'
  2509. else:
  2510. logger.warn('Cannot determine compression type for file %s' % filename)
  2511. mode = 'r:*'
  2512. tar = tarfile.open(filename, mode)
  2513. try:
  2514. leading = has_leading_dir([member.name for member in tar.getmembers()])
  2515. for member in tar.getmembers():
  2516. fn = member.name
  2517. if leading:
  2518. fn = split_leading_dir(fn)[1]
  2519. path = os.path.join(location, fn)
  2520. if member.isdir():
  2521. if not os.path.exists(path):
  2522. os.makedirs(path)
  2523. else:
  2524. try:
  2525. fp = tar.extractfile(member)
  2526. except (KeyError, AttributeError), e:
  2527. # Some corrupt tar files seem to produce this
  2528. # (specifically bad symlinks)
  2529. logger.warn(
  2530. 'In the tar file %s the member %s is invalid: %s'
  2531. % (filename, member.name, e))
  2532. continue
  2533. if not os.path.exists(os.path.dirname(path)):
  2534. os.makedirs(os.path.dirname(path))
  2535. destfp = open(path, 'wb')
  2536. try:
  2537. shutil.copyfileobj(fp, destfp)
  2538. finally:
  2539. destfp.close()
  2540. fp.close()
  2541. finally:
  2542. tar.close()
  2543. def install(self, install_options):
  2544. """Install everything in this set (after having downloaded and unpacked the packages)"""
  2545. to_install = sorted([r for r in self.requirements.values()
  2546. if self.upgrade or not r.satisfied_by],
  2547. key=lambda p: p.name.lower())
  2548. if to_install:
  2549. logger.notify('Installing collected packages: %s' % (', '.join([req.name for req in to_install])))
  2550. logger.indent += 2
  2551. try:
  2552. for requirement in to_install:
  2553. if requirement.conflicts_with:
  2554. logger.notify('Found existing installation: %s'
  2555. % requirement.conflicts_with)
  2556. logger.indent += 2
  2557. try:
  2558. requirement.uninstall(auto_confirm=True)
  2559. finally:
  2560. logger.indent -= 2
  2561. try:
  2562. requirement.install(install_options)
  2563. except:
  2564. # if install did not succeed, rollback previous uninstall
  2565. if requirement.conflicts_with and not requirement.install_succeeded:
  2566. requirement.rollback_uninstall()
  2567. raise
  2568. requirement.remove_temporary_source()
  2569. finally:
  2570. logger.indent -= 2
  2571. self.successfully_installed = to_install
  2572. def create_bundle(self, bundle_filename):
  2573. ## FIXME: can't decide which is better; zip is easier to read
  2574. ## random files from, but tar.bz2 is smaller and not as lame a
  2575. ## format.
  2576. ## FIXME: this file should really include a manifest of the
  2577. ## packages, maybe some other metadata files. It would make
  2578. ## it easier to detect as well.
  2579. zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
  2580. vcs_dirs = []
  2581. for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
  2582. dir = os.path.normcase(os.path.abspath(dir))
  2583. for dirpath, dirnames, filenames in os.walk(dir):
  2584. for backend in vcs.backends:
  2585. vcs_backend = backend()
  2586. vcs_url = vcs_rev = None
  2587. if vcs_backend.dirname in dirnames:
  2588. for vcs_dir in vcs_dirs:
  2589. if dirpath.startswith(vcs_dir):
  2590. # vcs bundle file already in parent directory
  2591. break
  2592. else:
  2593. vcs_url, vcs_rev = vcs_backend.get_info(
  2594. os.path.join(dir, dirpath))
  2595. vcs_dirs.append(dirpath)
  2596. vcs_bundle_file = vcs_backend.bundle_file
  2597. vcs_guide = vcs_backend.guide % {'url': vcs_url,
  2598. 'rev': vcs_rev}
  2599. dirnames.remove(vcs_backend.dirname)
  2600. break
  2601. if 'pip-egg-info' in dirnames:
  2602. dirnames.remove('pip-egg-info')
  2603. for dirname in dirnames:
  2604. dirname = os.path.join(dirpath, dirname)
  2605. name = self._clean_zip_name(dirname, dir)
  2606. zip.writestr(basename + '/' + name + '/', '')
  2607. for filename in filenames:
  2608. if filename == 'pip-delete-this-directory.txt':
  2609. continue
  2610. filename = os.path.join(dirpath, filename)
  2611. name = self._clean_zip_name(filename, dir)
  2612. zip.write(filename, basename + '/' + name)
  2613. if vcs_url:
  2614. name = os.path.join(dirpath, vcs_bundle_file)
  2615. name = self._clean_zip_name(name, dir)
  2616. zip.writestr(basename + '/' + name, vcs_guide)
  2617. zip.writestr('pip-manifest.txt', self.bundle_requirements())
  2618. zip.close()
  2619. # Unlike installation, this will always delete the build directories
  2620. logger.info('Removing temporary build dir %s and source dir %s'
  2621. % (self.build_dir, self.src_dir))
  2622. for dir in self.build_dir, self.src_dir:
  2623. if os.path.exists(dir):
  2624. shutil.rmtree(dir)
  2625. BUNDLE_HEADER = '''\
  2626. # This is a pip bundle file, that contains many source packages
  2627. # that can be installed as a group. You can install this like:
  2628. # pip this_file.zip
  2629. # The rest of the file contains a list of all the packages included:
  2630. '''
  2631. def bundle_requirements(self):
  2632. parts = [self.BUNDLE_HEADER]
  2633. for req in sorted(
  2634. [req for req in self.requirements.values()
  2635. if not req.comes_from],
  2636. key=lambda x: x.name):
  2637. parts.append('%s==%s\n' % (req.name, req.installed_version))
  2638. parts.append('# These packages were installed to satisfy the above requirements:\n')
  2639. for req in sorted(
  2640. [req for req in self.requirements.values()
  2641. if req.comes_from],
  2642. key=lambda x: x.name):
  2643. parts.append('%s==%s\n' % (req.name, req.installed_version))
  2644. ## FIXME: should we do something with self.unnamed_requirements?
  2645. return ''.join(parts)
  2646. def _clean_zip_name(self, name, prefix):
  2647. assert name.startswith(prefix+'/'), (
  2648. "name %r doesn't start with prefix %r" % (name, prefix))
  2649. name = name[len(prefix)+1:]
  2650. name = name.replace(os.path.sep, '/')
  2651. return name
  2652. class HTMLPage(object):
  2653. """Represents one page, along with its URL"""
  2654. ## FIXME: these regexes are horrible hacks:
  2655. _homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
  2656. _download_re = re.compile(r'<th>\s*download\s+url', re.I)
  2657. ## These aren't so aweful:
  2658. _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
  2659. _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
  2660. _base_re = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I)
  2661. def __init__(self, content, url, headers=None):
  2662. self.content = content
  2663. self.url = url
  2664. self.headers = headers
  2665. def __str__(self):
  2666. return self.url
  2667. @classmethod
  2668. def get_page(cls, link, req, cache=None, skip_archives=True):
  2669. url = link.url
  2670. url = url.split('#', 1)[0]
  2671. if cache.too_many_failures(url):
  2672. return None
  2673. if url.lower().startswith('svn'):
  2674. logger.debug('Cannot look at svn URL %s' % link)
  2675. return None
  2676. if cache is not None:
  2677. inst = cache.get_page(url)
  2678. if inst is not None:
  2679. return inst
  2680. try:
  2681. if skip_archives:
  2682. if cache is not None:
  2683. if cache.is_archive(url):
  2684. return None
  2685. filename = link.filename
  2686. for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
  2687. if filename.endswith(bad_ext):
  2688. content_type = cls._get_content_type(url)
  2689. if content_type.lower().startswith('text/html'):
  2690. break
  2691. else:
  2692. logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
  2693. if cache is not None:
  2694. cache.set_is_archive(url)
  2695. return None
  2696. logger.debug('Getting page %s' % url)
  2697. resp = urllib2.urlopen(url)
  2698. real_url = resp.geturl()
  2699. headers = resp.info()
  2700. inst = cls(resp.read(), real_url, headers)
  2701. except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error), e:
  2702. desc = str(e)
  2703. if isinstance(e, socket.timeout):
  2704. log_meth = logger.info
  2705. level =1
  2706. desc = 'timed out'
  2707. elif isinstance(e, urllib2.URLError):
  2708. log_meth = logger.info
  2709. if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
  2710. desc = 'timed out'
  2711. level = 1
  2712. else:
  2713. level = 2
  2714. elif isinstance(e, urllib2.HTTPError) and e.code == 404:
  2715. ## FIXME: notify?
  2716. log_meth = logger.info
  2717. level = 2
  2718. else:
  2719. log_meth = logger.info
  2720. level = 1
  2721. log_meth('Could not fetch URL %s: %s' % (link, desc))
  2722. log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
  2723. if cache is not None:
  2724. cache.add_page_failure(url, level)
  2725. return None
  2726. if cache is not None:
  2727. cache.add_page([url, real_url], inst)
  2728. return inst
  2729. @staticmethod
  2730. def _get_content_type(url):
  2731. """Get the Content-Type of the given url, using a HEAD request"""
  2732. scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
  2733. if scheme == 'http':
  2734. ConnClass = httplib.HTTPConnection
  2735. elif scheme == 'https':
  2736. ConnClass = httplib.HTTPSConnection
  2737. else:
  2738. ## FIXME: some warning or something?
  2739. ## assertion error?
  2740. return ''
  2741. if query:
  2742. path += '?' + query
  2743. conn = ConnClass(netloc)
  2744. try:
  2745. conn.request('HEAD', path, headers={'Host': netloc})
  2746. resp = conn.getresponse()
  2747. if resp.status != 200:
  2748. ## FIXME: doesn't handle redirects
  2749. return ''
  2750. return resp.getheader('Content-Type') or ''
  2751. finally:
  2752. conn.close()
  2753. @property
  2754. def base_url(self):
  2755. if not hasattr(self, "_base_url"):
  2756. match = self._base_re.search(self.content)
  2757. if match:
  2758. self._base_url = match.group(1)
  2759. else:
  2760. self._base_url = self.url
  2761. return self._base_url
  2762. @property
  2763. def links(self):
  2764. """Yields all links in the page"""
  2765. for match in self._href_re.finditer(self.content):
  2766. url = match.group(1) or match.group(2) or match.group(3)
  2767. url = self.clean_link(urlparse.urljoin(self.base_url, url))
  2768. yield Link(url, self)
  2769. def rel_links(self):
  2770. for url in self.explicit_rel_links():
  2771. yield url
  2772. for url in self.scraped_rel_links():
  2773. yield url
  2774. def explicit_rel_links(self, rels=('homepage', 'download')):
  2775. """Yields all links with the given relations"""
  2776. for match in self._rel_re.finditer(self.content):
  2777. found_rels = match.group(1).lower().split()
  2778. for rel in rels:
  2779. if rel in found_rels:
  2780. break
  2781. else:
  2782. continue
  2783. match = self._href_re.search(match.group(0))
  2784. if not match:
  2785. continue
  2786. url = match.group(1) or match.group(2) or match.group(3)
  2787. url = self.clean_link(urlparse.urljoin(self.base_url, url))
  2788. yield Link(url, self)
  2789. def scraped_rel_links(self):
  2790. for regex in (self._homepage_re, self._download_re):
  2791. match = regex.search(self.content)
  2792. if not match:
  2793. continue
  2794. href_match = self._href_re.search(self.content, pos=match.end())
  2795. if not href_match:
  2796. continue
  2797. url = match.group(1) or match.group(2) or match.group(3)
  2798. if not url:
  2799. continue
  2800. url = self.clean_link(urlparse.urljoin(self.base_url, url))
  2801. yield Link(url, self)
  2802. _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
  2803. def clean_link(self, url):
  2804. """Makes sure a link is fully encoded. That is, if a ' ' shows up in
  2805. the link, it will be rewritten to %20 (while not over-quoting
  2806. % or other characters)."""
  2807. return self._clean_re.sub(
  2808. lambda match: '%%%2x' % ord(match.group(0)), url)
  2809. class PageCache(object):
  2810. """Cache of HTML pages"""
  2811. failure_limit = 3
  2812. def __init__(self):
  2813. self._failures = {}
  2814. self._pages = {}
  2815. self._archives = {}
  2816. def too_many_failures(self, url):
  2817. return self._failures.get(url, 0) >= self.failure_limit
  2818. def get_page(self, url):
  2819. return self._pages.get(url)
  2820. def is_archive(self, url):
  2821. return self._archives.get(url, False)
  2822. def set_is_archive(self, url, value=True):
  2823. self._archives[url] = value
  2824. def add_page_failure(self, url, level):
  2825. self._failures[url] = self._failures.get(url, 0)+level
  2826. def add_page(self, urls, page):
  2827. for url in urls:
  2828. self._pages[url] = page
  2829. class Link(object):
  2830. def __init__(self, url, comes_from=None):
  2831. self.url = url
  2832. self.comes_from = comes_from
  2833. def __str__(self):
  2834. if self.comes_from:
  2835. return '%s (from %s)' % (self.url, self.comes_from)
  2836. else:
  2837. return self.url
  2838. def __repr__(self):
  2839. return '<Link %s>' % self
  2840. @property
  2841. def filename(self):
  2842. url = self.url
  2843. url = url.split('#', 1)[0]
  2844. url = url.split('?', 1)[0]
  2845. url = url.rstrip('/')
  2846. name = posixpath.basename(url)
  2847. assert name, (
  2848. 'URL %r produced no filename' % url)
  2849. return name
  2850. @property
  2851. def scheme(self):
  2852. return urlparse.urlsplit(self.url)[0]
  2853. @property
  2854. def path(self):
  2855. return urlparse.urlsplit(self.url)[2]
  2856. def splitext(self):
  2857. return splitext(posixpath.basename(self.path.rstrip('/')))
  2858. _egg_fragment_re = re.compile(r'#egg=([^&]*)')
  2859. @property
  2860. def egg_fragment(self):
  2861. match = self._egg_fragment_re.search(self.url)
  2862. if not match:
  2863. return None
  2864. return match.group(1)
  2865. _md5_re = re.compile(r'md5=([a-f0-9]+)')
  2866. @property
  2867. def md5_hash(self):
  2868. match = self._md5_re.search(self.url)
  2869. if match:
  2870. return match.group(1)
  2871. return None
  2872. @property
  2873. def show_url(self):
  2874. return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
  2875. ############################################################
  2876. ## Writing freeze files
  2877. class FrozenRequirement(object):
  2878. def __init__(self, name, req, editable, comments=()):
  2879. self.name = name
  2880. self.req = req
  2881. self.editable = editable
  2882. self.comments = comments
  2883. _rev_re = re.compile(r'-r(\d+)$')
  2884. _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
  2885. @classmethod
  2886. def from_dist(cls, dist, dependency_links, find_tags=False):
  2887. location = os.path.normcase(os.path.abspath(dist.location))
  2888. comments = []
  2889. if vcs.get_backend_name(location):
  2890. editable = True
  2891. req = get_src_requirement(dist, location, find_tags)
  2892. if req is None:
  2893. logger.warn('Could not determine repository location of %s' % location)
  2894. comments.append('## !! Could not determine repository location')
  2895. req = dist.as_requirement()
  2896. editable = False
  2897. else:
  2898. editable = False
  2899. req = dist.as_requirement()
  2900. specs = req.specs
  2901. assert len(specs) == 1 and specs[0][0] == '=='
  2902. version = specs[0][1]
  2903. ver_match = cls._rev_re.search(version)
  2904. date_match = cls._date_re.search(version)
  2905. if ver_match or date_match:
  2906. svn_backend = vcs.get_backend('svn')
  2907. if svn_backend:
  2908. svn_location = svn_backend(
  2909. ).get_location(dist, dependency_links)
  2910. if not svn_location:
  2911. logger.warn(
  2912. 'Warning: cannot find svn location for %s' % req)
  2913. comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
  2914. else:
  2915. comments.append('# Installing as editable to satisfy requirement %s:' % req)
  2916. if ver_match:
  2917. rev = ver_match.group(1)
  2918. else:
  2919. rev = '{%s}' % date_match.group(1)
  2920. editable = True
  2921. req = 'svn+%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
  2922. return cls(dist.project_name, req, editable, comments)
  2923. @staticmethod
  2924. def egg_name(dist):
  2925. name = dist.egg_name()
  2926. match = re.search(r'-py\d\.\d$', name)
  2927. if match:
  2928. name = name[:match.start()]
  2929. return name
  2930. def __str__(self):
  2931. req = self.req
  2932. if self.editable:
  2933. req = '-e %s' % req
  2934. return '\n'.join(list(self.comments)+[str(req)])+'\n'
  2935. class VersionControl(object):
  2936. name = ''
  2937. dirname = ''
  2938. def __init__(self, url=None, *args, **kwargs):
  2939. self.url = url
  2940. self._cmd = None
  2941. super(VersionControl, self).__init__(*args, **kwargs)
  2942. def _filter(self, line):
  2943. return (Logger.INFO, line)
  2944. @property
  2945. def cmd(self):
  2946. if self._cmd is not None:
  2947. return self._cmd
  2948. command = find_command(self.name)
  2949. if command is None:
  2950. raise BadCommand('Cannot find command %s' % self.name)
  2951. logger.info('Found command %s at %s' % (self.name, command))
  2952. self._cmd = command
  2953. return command
  2954. def get_url_rev(self):
  2955. """
  2956. Returns the correct repository URL and revision by parsing the given
  2957. repository URL
  2958. """
  2959. url = self.url.split('+', 1)[1]
  2960. scheme, netloc, path, query, frag = urlparse.urlsplit(url)
  2961. rev = None
  2962. if '@' in path:
  2963. path, rev = path.rsplit('@', 1)
  2964. url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
  2965. return url, rev
  2966. def get_info(self, location):
  2967. """
  2968. Returns (url, revision), where both are strings
  2969. """
  2970. assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
  2971. return self.get_url(location), self.get_revision(location)
  2972. def normalize_url(self, url):
  2973. """
  2974. Normalize a URL for comparison by unquoting it and removing any trailing slash.
  2975. """
  2976. return urllib.unquote(url).rstrip('/')
  2977. def compare_urls(self, url1, url2):
  2978. """
  2979. Compare two repo URLs for identity, ignoring incidental differences.
  2980. """
  2981. return (self.normalize_url(url1) == self.normalize_url(url2))
  2982. def parse_vcs_bundle_file(self, content):
  2983. """
  2984. Takes the contents of the bundled text file that explains how to revert
  2985. the stripped off version control data of the given package and returns
  2986. the URL and revision of it.
  2987. """
  2988. raise NotImplementedError
  2989. def obtain(self, dest):
  2990. """
  2991. Called when installing or updating an editable package, takes the
  2992. source path of the checkout.
  2993. """
  2994. raise NotImplementedError
  2995. def switch(self, dest, url, rev_options):
  2996. """
  2997. Switch the repo at ``dest`` to point to ``URL``.
  2998. """
  2999. raise NotImplemented
  3000. def update(self, dest, rev_options):
  3001. """
  3002. Update an already-existing repo to the given ``rev_options``.
  3003. """
  3004. raise NotImplementedError
  3005. def check_destination(self, dest, url, rev_options, rev_display):
  3006. """
  3007. Prepare a location to receive a checkout/clone.
  3008. Return True if the location is ready for (and requires) a
  3009. checkout/clone, False otherwise.
  3010. """
  3011. checkout = True
  3012. prompt = False
  3013. if os.path.exists(dest):
  3014. checkout = False
  3015. if os.path.exists(os.path.join(dest, self.dirname)):
  3016. existing_url = self.get_url(dest)
  3017. if self.compare_urls(existing_url, url):
  3018. logger.info('%s in %s exists, and has correct URL (%s)'
  3019. % (self.repo_name.title(), display_path(dest), url))
  3020. logger.notify('Updating %s %s%s'
  3021. % (display_path(dest), self.repo_name, rev_display))
  3022. self.update(dest, rev_options)
  3023. else:
  3024. logger.warn('%s %s in %s exists with URL %s'
  3025. % (self.name, self.repo_name, display_path(dest), existing_url))
  3026. prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b'))
  3027. else:
  3028. logger.warn('Directory %s already exists, and is not a %s %s.'
  3029. % (dest, self.name, self.repo_name))
  3030. prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
  3031. if prompt:
  3032. logger.warn('The plan is to install the %s repository %s'
  3033. % (self.name, url))
  3034. response = ask('What to do? %s' % prompt[0], prompt[1])
  3035. if response == 's':
  3036. logger.notify('Switching %s %s to %s%s'
  3037. % (self.repo_name, display_path(dest), url, rev_display))
  3038. self.switch(dest, url, rev_options)
  3039. elif response == 'i':
  3040. # do nothing
  3041. pass
  3042. elif response == 'w':
  3043. logger.warn('Deleting %s' % display_path(dest))
  3044. shutil.rmtree(dest)
  3045. checkout = True
  3046. elif response == 'b':
  3047. dest_dir = backup_dir(dest)
  3048. logger.warn('Backing up %s to %s'
  3049. % (display_path(dest), dest_dir))
  3050. shutil.move(dest, dest_dir)
  3051. checkout = True
  3052. return checkout
  3053. def unpack(self, location):
  3054. raise NotImplementedError
  3055. def get_src_requirement(self, dist, location, find_tags=False):
  3056. raise NotImplementedError
  3057. _svn_xml_url_re = re.compile('url="([^"]+)"')
  3058. _svn_rev_re = re.compile('committed-rev="(\d+)"')
  3059. _svn_url_re = re.compile(r'URL: (.+)')
  3060. _svn_revision_re = re.compile(r'Revision: (.+)')
  3061. class Subversion(VersionControl):
  3062. name = 'svn'
  3063. dirname = '.svn'
  3064. repo_name = 'checkout'
  3065. schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https')
  3066. bundle_file = 'svn-checkout.txt'
  3067. guide = ('# This was an svn checkout; to make it a checkout again run:\n'
  3068. 'svn checkout --force -r %(rev)s %(url)s .\n')
  3069. def get_info(self, location):
  3070. """Returns (url, revision), where both are strings"""
  3071. assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
  3072. output = call_subprocess(
  3073. ['svn', 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
  3074. match = _svn_url_re.search(output)
  3075. if not match:
  3076. logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
  3077. logger.info('Output that cannot be parsed: \n%s' % output)
  3078. return None, None
  3079. url = match.group(1).strip()
  3080. match = _svn_revision_re.search(output)
  3081. if not match:
  3082. logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
  3083. logger.info('Output that cannot be parsed: \n%s' % output)
  3084. return url, None
  3085. return url, match.group(1)
  3086. def get_url(self, location):
  3087. return self.get_info(location)[0]
  3088. def get_revision(self, location):
  3089. return self.get_info(location)[1]
  3090. def parse_vcs_bundle_file(self, content):
  3091. for line in content.splitlines():
  3092. if not line.strip() or line.strip().startswith('#'):
  3093. continue
  3094. match = re.search(r'^-r\s*([^ ])?', line)
  3095. if not match:
  3096. return None, None
  3097. rev = match.group(1)
  3098. rest = line[match.end():].strip().split(None, 1)[0]
  3099. return rest, rev
  3100. return None, None
  3101. def unpack(self, location):
  3102. """Check out the svn repository at the url to the destination location"""
  3103. url, rev = self.get_url_rev()
  3104. logger.notify('Checking out svn repository %s to %s' % (url, location))
  3105. logger.indent += 2
  3106. try:
  3107. if os.path.exists(location):
  3108. # Subversion doesn't like to check out over an existing directory
  3109. # --force fixes this, but was only added in svn 1.5
  3110. shutil.rmtree(location, onerror=rmtree_errorhandler)
  3111. call_subprocess(
  3112. ['svn', 'checkout', url, location],
  3113. filter_stdout=self._filter, show_stdout=False)
  3114. finally:
  3115. logger.indent -= 2
  3116. def export(self, location):
  3117. """Export the svn repository at the url to the destination location"""
  3118. url, rev = self.get_url_rev()
  3119. logger.notify('Checking out svn repository %s to %s' % (url, location))
  3120. logger.indent += 2
  3121. try:
  3122. if os.path.exists(location):
  3123. # Subversion doesn't like to check out over an existing directory
  3124. # --force fixes this, but was only added in svn 1.5
  3125. shutil.rmtree(location, onerror=rmtree_errorhandler)
  3126. call_subprocess(
  3127. ['svn', 'export', url, location],
  3128. filter_stdout=self._filter, show_stdout=False)
  3129. finally:
  3130. logger.indent -= 2
  3131. def switch(self, dest, url, rev_options):
  3132. call_subprocess(
  3133. ['svn', 'switch'] + rev_options + [url, dest])
  3134. def update(self, dest, rev_options):
  3135. call_subprocess(
  3136. ['svn', 'update'] + rev_options + [dest])
  3137. def obtain(self, dest):
  3138. url, rev = self.get_url_rev()
  3139. if rev:
  3140. rev_options = ['-r', rev]
  3141. rev_display = ' (to revision %s)' % rev
  3142. else:
  3143. rev_options = []
  3144. rev_display = ''
  3145. if self.check_destination(dest, url, rev_options, rev_display):
  3146. logger.notify('Checking out %s%s to %s'
  3147. % (url, rev_display, display_path(dest)))
  3148. call_subprocess(
  3149. ['svn', 'checkout', '-q'] + rev_options + [url, dest])
  3150. def get_location(self, dist, dependency_links):
  3151. egg_fragment_re = re.compile(r'#egg=(.*)$')
  3152. for url in dependency_links:
  3153. egg_fragment = Link(url).egg_fragment
  3154. if not egg_fragment:
  3155. continue
  3156. if '-' in egg_fragment:
  3157. ## FIXME: will this work when a package has - in the name?
  3158. key = '-'.join(egg_fragment.split('-')[:-1]).lower()
  3159. else:
  3160. key = egg_fragment
  3161. if key == dist.key:
  3162. return url.split('#', 1)[0]
  3163. return None
  3164. def get_revision(self, location):
  3165. """
  3166. Return the maximum revision for all files under a given location
  3167. """
  3168. # Note: taken from setuptools.command.egg_info
  3169. revision = 0
  3170. for base, dirs, files in os.walk(location):
  3171. if self.dirname not in dirs:
  3172. dirs[:] = []
  3173. continue # no sense walking uncontrolled subdirs
  3174. dirs.remove(self.dirname)
  3175. entries_fn = os.path.join(base, self.dirname, 'entries')
  3176. if not os.path.exists(entries_fn):
  3177. ## FIXME: should we warn?
  3178. continue
  3179. f = open(entries_fn)
  3180. data = f.read()
  3181. f.close()
  3182. if data.startswith('8') or data.startswith('9') or data.startswith('10'):
  3183. data = map(str.splitlines,data.split('\n\x0c\n'))
  3184. del data[0][0] # get rid of the '8'
  3185. dirurl = data[0][3]
  3186. revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
  3187. if revs:
  3188. localrev = max(revs)
  3189. else:
  3190. localrev = 0
  3191. elif data.startswith('<?xml'):
  3192. dirurl = _svn_xml_url_re.search(data).group(1) # get repository URL
  3193. revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
  3194. if revs:
  3195. localrev = max(revs)
  3196. else:
  3197. localrev = 0
  3198. else:
  3199. logger.warn("Unrecognized .svn/entries format; skipping %s", base)
  3200. dirs[:] = []
  3201. continue
  3202. if base == location:
  3203. base_url = dirurl+'/' # save the root url
  3204. elif not dirurl.startswith(base_url):
  3205. dirs[:] = []
  3206. continue # not part of the same svn tree, skip it
  3207. revision = max(revision, localrev)
  3208. return revision
  3209. def get_url(self, location):
  3210. # In cases where the source is in a subdirectory, not alongside setup.py
  3211. # we have to look up in the location until we find a real setup.py
  3212. orig_location = location
  3213. while not os.path.exists(os.path.join(location, 'setup.py')):
  3214. last_location = location
  3215. location = os.path.dirname(location)
  3216. if location == last_location:
  3217. # We've traversed up to the root of the filesystem without finding setup.py
  3218. logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
  3219. % orig_location)
  3220. return None
  3221. f = open(os.path.join(location, self.dirname, 'entries'))
  3222. data = f.read()
  3223. f.close()
  3224. if data.startswith('8') or data.startswith('9') or data.startswith('10'):
  3225. data = map(str.splitlines,data.split('\n\x0c\n'))
  3226. del data[0][0] # get rid of the '8'
  3227. return data[0][3]
  3228. elif data.startswith('<?xml'):
  3229. match = _svn_xml_url_re.search(data)
  3230. if not match:
  3231. raise ValueError('Badly formatted data: %r' % data)
  3232. return match.group(1) # get repository URL
  3233. else:
  3234. logger.warn("Unrecognized .svn/entries format in %s" % location)
  3235. # Or raise exception?
  3236. return None
  3237. def get_tag_revs(self, svn_tag_url):
  3238. stdout = call_subprocess(
  3239. ['svn', 'ls', '-v', svn_tag_url], show_stdout=False)
  3240. results = []
  3241. for line in stdout.splitlines():
  3242. parts = line.split()
  3243. rev = int(parts[0])
  3244. tag = parts[-1].strip('/')
  3245. results.append((tag, rev))
  3246. return results
  3247. def find_tag_match(self, rev, tag_revs):
  3248. best_match_rev = None
  3249. best_tag = None
  3250. for tag, tag_rev in tag_revs:
  3251. if (tag_rev > rev and
  3252. (best_match_rev is None or best_match_rev > tag_rev)):
  3253. # FIXME: Is best_match > tag_rev really possible?
  3254. # or is it a sign something is wacky?
  3255. best_match_rev = tag_rev
  3256. best_tag = tag
  3257. return best_tag
  3258. def get_src_requirement(self, dist, location, find_tags=False):
  3259. repo = self.get_url(location)
  3260. if repo is None:
  3261. return None
  3262. parts = repo.split('/')
  3263. ## FIXME: why not project name?
  3264. egg_project_name = dist.egg_name().split('-', 1)[0]
  3265. rev = self.get_revision(location)
  3266. if parts[-2] in ('tags', 'tag'):
  3267. # It's a tag, perfect!
  3268. full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
  3269. elif parts[-2] in ('branches', 'branch'):
  3270. # It's a branch :(
  3271. full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
  3272. elif parts[-1] == 'trunk':
  3273. # Trunk :-/
  3274. full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
  3275. if find_tags:
  3276. tag_url = '/'.join(parts[:-1]) + '/tags'
  3277. tag_revs = self.get_tag_revs(tag_url)
  3278. match = self.find_tag_match(rev, tag_revs)
  3279. if match:
  3280. logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
  3281. repo = '%s/%s' % (tag_url, match)
  3282. full_egg_name = '%s-%s' % (egg_project_name, match)
  3283. else:
  3284. # Don't know what it is
  3285. logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
  3286. full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
  3287. return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
  3288. vcs.register(Subversion)
  3289. class Git(VersionControl):
  3290. name = 'git'
  3291. dirname = '.git'
  3292. repo_name = 'clone'
  3293. schemes = ('git', 'git+http', 'git+ssh', 'git+git')
  3294. bundle_file = 'git-clone.txt'
  3295. guide = ('# This was a Git repo; to make it a repo again run:\n'
  3296. 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
  3297. def parse_vcs_bundle_file(self, content):
  3298. url = rev = None
  3299. for line in content.splitlines():
  3300. if not line.strip() or line.strip().startswith('#'):
  3301. continue
  3302. url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
  3303. if url_match:
  3304. url = url_match.group(1).strip()
  3305. rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
  3306. if rev_match:
  3307. rev = rev_match.group(1).strip()
  3308. if url and rev:
  3309. return url, rev
  3310. return None, None
  3311. def unpack(self, location):
  3312. """Clone the Git repository at the url to the destination location"""
  3313. url, rev = self.get_url_rev()
  3314. logger.notify('Cloning Git repository %s to %s' % (url, location))
  3315. logger.indent += 2
  3316. try:
  3317. if os.path.exists(location):
  3318. os.rmdir(location)
  3319. call_subprocess(
  3320. [self.cmd, 'clone', url, location],
  3321. filter_stdout=self._filter, show_stdout=False)
  3322. finally:
  3323. logger.indent -= 2
  3324. def export(self, location):
  3325. """Export the Git repository at the url to the destination location"""
  3326. temp_dir = tempfile.mkdtemp('-export', 'pip-')
  3327. self.unpack(temp_dir)
  3328. try:
  3329. if not location.endswith('/'):
  3330. location = location + '/'
  3331. call_subprocess(
  3332. [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
  3333. filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
  3334. finally:
  3335. shutil.rmtree(temp_dir)
  3336. def check_rev_options(self, rev, dest, rev_options):
  3337. """Check the revision options before checkout to compensate that tags
  3338. and branches may need origin/ as a prefix"""
  3339. if rev is None:
  3340. # bail and use preset
  3341. return rev_options
  3342. revisions = self.get_tag_revs(dest)
  3343. revisions.update(self.get_branch_revs(dest))
  3344. if rev in revisions:
  3345. # if rev is a sha
  3346. return [rev]
  3347. inverse_revisions = dict((v,k) for k, v in revisions.iteritems())
  3348. if rev not in inverse_revisions: # is rev a name or tag?
  3349. origin_rev = 'origin/%s' % rev
  3350. if origin_rev in inverse_revisions:
  3351. rev = inverse_revisions[origin_rev]
  3352. else:
  3353. logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
  3354. return [rev]
  3355. def switch(self, dest, url, rev_options):
  3356. call_subprocess(
  3357. [self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
  3358. call_subprocess(
  3359. [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
  3360. def update(self, dest, rev_options):
  3361. call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
  3362. call_subprocess(
  3363. [self.cmd, 'checkout', '-q', '-f'] + rev_options, cwd=dest)
  3364. def obtain(self, dest):
  3365. url, rev = self.get_url_rev()
  3366. if rev:
  3367. rev_options = [rev]
  3368. rev_display = ' (to %s)' % rev
  3369. else:
  3370. rev_options = ['origin/master']
  3371. rev_display = ''
  3372. if self.check_destination(dest, url, rev_options, rev_display):
  3373. logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
  3374. call_subprocess(
  3375. [self.cmd, 'clone', '-q', url, dest])
  3376. rev_options = self.check_rev_options(rev, dest, rev_options)
  3377. call_subprocess(
  3378. [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
  3379. def get_url(self, location):
  3380. url = call_subprocess(
  3381. [self.cmd, 'config', 'remote.origin.url'],
  3382. show_stdout=False, cwd=location)
  3383. return url.strip()
  3384. def get_revision(self, location):
  3385. current_rev = call_subprocess(
  3386. [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
  3387. return current_rev.strip()
  3388. def get_tag_revs(self, location):
  3389. tags = call_subprocess(
  3390. [self.cmd, 'tag'], show_stdout=False, cwd=location)
  3391. tag_revs = []
  3392. for line in tags.splitlines():
  3393. tag = line.strip()
  3394. rev = call_subprocess(
  3395. [self.cmd, 'rev-parse', tag], show_stdout=False, cwd=location)
  3396. tag_revs.append((rev.strip(), tag))
  3397. tag_revs = dict(tag_revs)
  3398. return tag_revs
  3399. def get_branch_revs(self, location):
  3400. branches = call_subprocess(
  3401. [self.cmd, 'branch', '-r'], show_stdout=False, cwd=location)
  3402. branch_revs = []
  3403. for line in branches.splitlines():
  3404. line = line.split('->')[0].strip()
  3405. branch = "".join([b for b in line.split() if b != '*'])
  3406. rev = call_subprocess(
  3407. [self.cmd, 'rev-parse', branch], show_stdout=False, cwd=location)
  3408. branch_revs.append((rev.strip(), branch))
  3409. branch_revs = dict(branch_revs)
  3410. return branch_revs
  3411. def get_src_requirement(self, dist, location, find_tags):
  3412. repo = self.get_url(location)
  3413. if not repo.lower().startswith('git:'):
  3414. repo = 'git+' + repo
  3415. egg_project_name = dist.egg_name().split('-', 1)[0]
  3416. if not repo:
  3417. return None
  3418. current_rev = self.get_revision(location)
  3419. tag_revs = self.get_tag_revs(location)
  3420. branch_revs = self.get_branch_revs(location)
  3421. if current_rev in tag_revs:
  3422. # It's a tag
  3423. full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
  3424. elif (current_rev in branch_revs and
  3425. branch_revs[current_rev] != 'origin/master'):
  3426. # It's the head of a branch
  3427. full_egg_name = '%s-%s' % (dist.egg_name(),
  3428. branch_revs[current_rev].replace('origin/', ''))
  3429. else:
  3430. full_egg_name = '%s-dev' % dist.egg_name()
  3431. return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
  3432. def get_url_rev(self):
  3433. """
  3434. Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
  3435. That's required because although they use SSH they sometimes doesn't
  3436. work with a ssh:// scheme (e.g. Github). But we need a scheme for
  3437. parsing. Hence we remove it again afterwards and return it as a stub.
  3438. """
  3439. if not '://' in self.url:
  3440. self.url = self.url.replace('git+', 'git+ssh://')
  3441. url, rev = super(Git, self).get_url_rev()
  3442. url = url.replace('ssh://', '')
  3443. return url, rev
  3444. return super(Git, self).get_url_rev()
  3445. vcs.register(Git)
  3446. class Mercurial(VersionControl):
  3447. name = 'hg'
  3448. dirname = '.hg'
  3449. repo_name = 'clone'
  3450. schemes = ('hg', 'hg+http', 'hg+ssh')
  3451. bundle_file = 'hg-clone.txt'
  3452. guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
  3453. 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
  3454. def parse_vcs_bundle_file(self, content):
  3455. url = rev = None
  3456. for line in content.splitlines():
  3457. if not line.strip() or line.strip().startswith('#'):
  3458. continue
  3459. url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
  3460. if url_match:
  3461. url = url_match.group(1).strip()
  3462. rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
  3463. if rev_match:
  3464. rev = rev_match.group(1).strip()
  3465. if url and rev:
  3466. return url, rev
  3467. return None, None
  3468. def unpack(self, location):
  3469. """Clone the Hg repository at the url to the destination location"""
  3470. url, rev = self.get_url_rev()
  3471. logger.notify('Cloning Mercurial repository %s to %s' % (url, location))
  3472. logger.indent += 2
  3473. try:
  3474. if os.path.exists(location):
  3475. os.rmdir(location)
  3476. call_subprocess(
  3477. ['hg', 'clone', url, location],
  3478. filter_stdout=self._filter, show_stdout=False)
  3479. finally:
  3480. logger.indent -= 2
  3481. def export(self, location):
  3482. """Export the Hg repository at the url to the destination location"""
  3483. temp_dir = tempfile.mkdtemp('-export', 'pip-')
  3484. self.unpack(temp_dir)
  3485. try:
  3486. call_subprocess(
  3487. ['hg', 'archive', location],
  3488. filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
  3489. finally:
  3490. shutil.rmtree(temp_dir)
  3491. def switch(self, dest, url, rev_options):
  3492. repo_config = os.path.join(dest, self.dirname, 'hgrc')
  3493. config = ConfigParser.SafeConfigParser()
  3494. try:
  3495. config.read(repo_config)
  3496. config.set('paths', 'default', url)
  3497. config_file = open(repo_config, 'w')
  3498. config.write(config_file)
  3499. config_file.close()
  3500. except (OSError, ConfigParser.NoSectionError), e:
  3501. logger.warn(
  3502. 'Could not switch Mercurial repository to %s: %s'
  3503. % (url, e))
  3504. else:
  3505. call_subprocess(['hg', 'update', '-q'] + rev_options, cwd=dest)
  3506. def update(self, dest, rev_options):
  3507. call_subprocess(['hg', 'pull', '-q'], cwd=dest)
  3508. call_subprocess(
  3509. ['hg', 'update', '-q'] + rev_options, cwd=dest)
  3510. def obtain(self, dest):
  3511. url, rev = self.get_url_rev()
  3512. if rev:
  3513. rev_options = [rev]
  3514. rev_display = ' (to revision %s)' % rev
  3515. else:
  3516. rev_options = []
  3517. rev_display = ''
  3518. if self.check_destination(dest, url, rev_options, rev_display):
  3519. logger.notify('Cloning hg %s%s to %s'
  3520. % (url, rev_display, display_path(dest)))
  3521. call_subprocess(['hg', 'clone', '-q', url, dest])
  3522. call_subprocess(['hg', 'update', '-q'] + rev_options, cwd=dest)
  3523. def get_url(self, location):
  3524. url = call_subprocess(
  3525. ['hg', 'showconfig', 'paths.default'],
  3526. show_stdout=False, cwd=location).strip()
  3527. if url.startswith('/') or url.startswith('\\'):
  3528. url = filename_to_url(url)
  3529. return url.strip()
  3530. def get_tag_revs(self, location):
  3531. tags = call_subprocess(
  3532. ['hg', 'tags'], show_stdout=False, cwd=location)
  3533. tag_revs = []
  3534. for line in tags.splitlines():
  3535. tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
  3536. if tags_match:
  3537. tag = tags_match.group(1)
  3538. rev = tags_match.group(2)
  3539. tag_revs.append((rev.strip(), tag.strip()))
  3540. return dict(tag_revs)
  3541. def get_branch_revs(self, location):
  3542. branches = call_subprocess(
  3543. ['hg', 'branches'], show_stdout=False, cwd=location)
  3544. branch_revs = []
  3545. for line in branches.splitlines():
  3546. branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
  3547. if branches_match:
  3548. branch = branches_match.group(1)
  3549. rev = branches_match.group(2)
  3550. branch_revs.append((rev.strip(), branch.strip()))
  3551. return dict(branch_revs)
  3552. def get_revision(self, location):
  3553. current_revision = call_subprocess(
  3554. ['hg', 'parents', '--template={rev}'],
  3555. show_stdout=False, cwd=location).strip()
  3556. return current_revision
  3557. def get_revision_hash(self, location):
  3558. current_rev_hash = call_subprocess(
  3559. ['hg', 'parents', '--template={node}'],
  3560. show_stdout=False, cwd=location).strip()
  3561. return current_rev_hash
  3562. def get_src_requirement(self, dist, location, find_tags):
  3563. repo = self.get_url(location)
  3564. if not repo.lower().startswith('hg:'):
  3565. repo = 'hg+' + repo
  3566. egg_project_name = dist.egg_name().split('-', 1)[0]
  3567. if not repo:
  3568. return None
  3569. current_rev = self.get_revision(location)
  3570. current_rev_hash = self.get_revision_hash(location)
  3571. tag_revs = self.get_tag_revs(location)
  3572. branch_revs = self.get_branch_revs(location)
  3573. if current_rev in tag_revs:
  3574. # It's a tag
  3575. full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
  3576. elif current_rev in branch_revs:
  3577. # It's the tip of a branch
  3578. full_egg_name = '%s-%s' % (dist.egg_name(), branch_revs[current_rev])
  3579. else:
  3580. full_egg_name = '%s-dev' % dist.egg_name()
  3581. return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
  3582. vcs.register(Mercurial)
  3583. class Bazaar(VersionControl):
  3584. name = 'bzr'
  3585. dirname = '.bzr'
  3586. repo_name = 'branch'
  3587. bundle_file = 'bzr-branch.txt'
  3588. schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp')
  3589. guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
  3590. 'bzr branch -r %(rev)s %(url)s .\n')
  3591. def parse_vcs_bundle_file(self, content):
  3592. url = rev = None
  3593. for line in content.splitlines():
  3594. if not line.strip() or line.strip().startswith('#'):
  3595. continue
  3596. match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
  3597. if match:
  3598. rev = match.group(1).strip()
  3599. url = line[match.end():].strip().split(None, 1)[0]
  3600. if url and rev:
  3601. return url, rev
  3602. return None, None
  3603. def unpack(self, location):
  3604. """Get the bzr branch at the url to the destination location"""
  3605. url, rev = self.get_url_rev()
  3606. logger.notify('Checking out bzr repository %s to %s' % (url, location))
  3607. logger.indent += 2
  3608. try:
  3609. if os.path.exists(location):
  3610. os.rmdir(location)
  3611. call_subprocess(
  3612. [self.cmd, 'branch', url, location],
  3613. filter_stdout=self._filter, show_stdout=False)
  3614. finally:
  3615. logger.indent -= 2
  3616. def export(self, location):
  3617. """Export the Bazaar repository at the url to the destination location"""
  3618. temp_dir = tempfile.mkdtemp('-export', 'pip-')
  3619. self.unpack(temp_dir)
  3620. if os.path.exists(location):
  3621. # Remove the location to make sure Bazaar can export it correctly
  3622. shutil.rmtree(location, onerror=rmtree_errorhandler)
  3623. try:
  3624. call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
  3625. filter_stdout=self._filter, show_stdout=False)
  3626. finally:
  3627. shutil.rmtree(temp_dir)
  3628. def switch(self, dest, url, rev_options):
  3629. call_subprocess([self.cmd, 'switch', url], cwd=dest)
  3630. def update(self, dest, rev_options):
  3631. call_subprocess(
  3632. [self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
  3633. def obtain(self, dest):
  3634. url, rev = self.get_url_rev()
  3635. if rev:
  3636. rev_options = ['-r', rev]
  3637. rev_display = ' (to revision %s)' % rev
  3638. else:
  3639. rev_options = []
  3640. rev_display = ''
  3641. if self.check_destination(dest, url, rev_options, rev_display):
  3642. logger.notify('Checking out %s%s to %s'
  3643. % (url, rev_display, display_path(dest)))
  3644. call_subprocess(
  3645. [self.cmd, 'branch', '-q'] + rev_options + [url, dest])
  3646. def get_url_rev(self):
  3647. # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
  3648. url, rev = super(Bazaar, self).get_url_rev()
  3649. if url.startswith('ssh://'):
  3650. url = 'bzr+' + url
  3651. return url, rev
  3652. def get_url(self, location):
  3653. urls = call_subprocess(
  3654. [self.cmd, 'info'], show_stdout=False, cwd=location)
  3655. for line in urls.splitlines():
  3656. line = line.strip()
  3657. for x in ('checkout of branch: ',
  3658. 'parent branch: '):
  3659. if line.startswith(x):
  3660. return line.split(x)[1]
  3661. return None
  3662. def get_revision(self, location):
  3663. revision = call_subprocess(
  3664. [self.cmd, 'revno'], show_stdout=False, cwd=location)
  3665. return revision.splitlines()[-1]
  3666. def get_tag_revs(self, location):
  3667. tags = call_subprocess(
  3668. [self.cmd, 'tags'], show_stdout=False, cwd=location)
  3669. tag_revs = []
  3670. for line in tags.splitlines():
  3671. tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
  3672. if tags_match:
  3673. tag = tags_match.group(1)
  3674. rev = tags_match.group(2)
  3675. tag_revs.append((rev.strip(), tag.strip()))
  3676. return dict(tag_revs)
  3677. def get_src_requirement(self, dist, location, find_tags):
  3678. repo = self.get_url(location)
  3679. if not repo.lower().startswith('bzr:'):
  3680. repo = 'bzr+' + repo
  3681. egg_project_name = dist.egg_name().split('-', 1)[0]
  3682. if not repo:
  3683. return None
  3684. current_rev = self.get_revision(location)
  3685. tag_revs = self.get_tag_revs(location)
  3686. if current_rev in tag_revs:
  3687. # It's a tag
  3688. tag = tag_revs.get(current_rev, current_rev)
  3689. full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
  3690. else:
  3691. full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
  3692. return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
  3693. vcs.register(Bazaar)
  3694. def get_src_requirement(dist, location, find_tags):
  3695. version_control = vcs.get_backend_from_location(location)
  3696. if version_control:
  3697. return version_control().get_src_requirement(dist, location, find_tags)
  3698. logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
  3699. return dist.as_requirement()
  3700. ############################################################
  3701. ## Requirement files
  3702. _scheme_re = re.compile(r'^(http|https|file):', re.I)
  3703. _url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
  3704. def get_file_content(url, comes_from=None):
  3705. """Gets the content of a file; it may be a filename, file: URL, or
  3706. http: URL. Returns (location, content)"""
  3707. match = _scheme_re.search(url)
  3708. if match:
  3709. scheme = match.group(1).lower()
  3710. if (scheme == 'file' and comes_from
  3711. and comes_from.startswith('http')):
  3712. raise InstallationError(
  3713. 'Requirements file %s references URL %s, which is local'
  3714. % (comes_from, url))
  3715. if scheme == 'file':
  3716. path = url.split(':', 1)[1]
  3717. path = path.replace('\\', '/')
  3718. match = _url_slash_drive_re.match(path)
  3719. if match:
  3720. path = match.group(1) + ':' + path.split('|', 1)[1]
  3721. path = urllib.unquote(path)
  3722. if path.startswith('/'):
  3723. path = '/' + path.lstrip('/')
  3724. url = path
  3725. else:
  3726. ## FIXME: catch some errors
  3727. resp = urllib2.urlopen(url)
  3728. return resp.geturl(), resp.read()
  3729. f = open(url)
  3730. content = f.read()
  3731. f.close()
  3732. return url, content
  3733. def parse_requirements(filename, finder=None, comes_from=None, options=None):
  3734. skip_match = None
  3735. skip_regex = options.skip_requirements_regex
  3736. if skip_regex:
  3737. skip_match = re.compile(skip_regex)
  3738. filename, content = get_file_content(filename, comes_from=comes_from)
  3739. for line_number, line in enumerate(content.splitlines()):
  3740. line_number += 1
  3741. line = line.strip()
  3742. if not line or line.startswith('#'):
  3743. continue
  3744. if skip_match and skip_match.search(line):
  3745. continue
  3746. if line.startswith('-r') or line.startswith('--requirement'):
  3747. if line.startswith('-r'):
  3748. req_url = line[2:].strip()
  3749. else:
  3750. req_url = line[len('--requirement'):].strip().strip('=')
  3751. if _scheme_re.search(filename):
  3752. # Relative to a URL
  3753. req_url = urlparse.urljoin(filename, url)
  3754. elif not _scheme_re.search(req_url):
  3755. req_url = os.path.join(os.path.dirname(filename), req_url)
  3756. for item in parse_requirements(req_url, finder, comes_from=filename, options=options):
  3757. yield item
  3758. elif line.startswith('-Z') or line.startswith('--always-unzip'):
  3759. # No longer used, but previously these were used in
  3760. # requirement files, so we'll ignore.
  3761. pass
  3762. elif finder and line.startswith('-f') or line.startswith('--find-links'):
  3763. if line.startswith('-f'):
  3764. line = line[2:].strip()
  3765. else:
  3766. line = line[len('--find-links'):].strip().lstrip('=')
  3767. ## FIXME: it would be nice to keep track of the source of
  3768. ## the find_links:
  3769. finder.find_links.append(line)
  3770. elif line.startswith('-i') or line.startswith('--index-url'):
  3771. if line.startswith('-i'):
  3772. line = line[2:].strip()
  3773. else:
  3774. line = line[len('--index-url'):].strip().lstrip('=')
  3775. finder.index_urls = [line]
  3776. elif line.startswith('--extra-index-url'):
  3777. line = line[len('--extra-index-url'):].strip().lstrip('=')
  3778. finder.index_urls.append(line)
  3779. else:
  3780. comes_from = '-r %s (line %s)' % (filename, line_number)
  3781. if line.startswith('-e') or line.startswith('--editable'):
  3782. if line.startswith('-e'):
  3783. line = line[2:].strip()
  3784. else:
  3785. line = line[len('--editable'):].strip()
  3786. req = InstallRequirement.from_editable(
  3787. line, comes_from=comes_from, default_vcs=options.default_vcs)
  3788. else:
  3789. req = InstallRequirement.from_line(line, comes_from)
  3790. yield req
  3791. ############################################################
  3792. ## Logging
  3793. class Logger(object):
  3794. """
  3795. Logging object for use in command-line script. Allows ranges of
  3796. levels, to avoid some redundancy of displayed information.
  3797. """
  3798. VERBOSE_DEBUG = logging.DEBUG-1
  3799. DEBUG = logging.DEBUG
  3800. INFO = logging.INFO
  3801. NOTIFY = (logging.INFO+logging.WARN)/2
  3802. WARN = WARNING = logging.WARN
  3803. ERROR = logging.ERROR
  3804. FATAL = logging.FATAL
  3805. LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
  3806. def __init__(self, consumers):
  3807. self.consumers = consumers
  3808. self.indent = 0
  3809. self.explicit_levels = False
  3810. self.in_progress = None
  3811. self.in_progress_hanging = False
  3812. def debug(self, msg, *args, **kw):
  3813. self.log(self.DEBUG, msg, *args, **kw)
  3814. def info(self, msg, *args, **kw):
  3815. self.log(self.INFO, msg, *args, **kw)
  3816. def notify(self, msg, *args, **kw):
  3817. self.log(self.NOTIFY, msg, *args, **kw)
  3818. def warn(self, msg, *args, **kw):
  3819. self.log(self.WARN, msg, *args, **kw)
  3820. def error(self, msg, *args, **kw):
  3821. self.log(self.WARN, msg, *args, **kw)
  3822. def fatal(self, msg, *args, **kw):
  3823. self.log(self.FATAL, msg, *args, **kw)
  3824. def log(self, level, msg, *args, **kw):
  3825. if args:
  3826. if kw:
  3827. raise TypeError(
  3828. "You may give positional or keyword arguments, not both")
  3829. args = args or kw
  3830. rendered = None
  3831. for consumer_level, consumer in self.consumers:
  3832. if self.level_matches(level, consumer_level):
  3833. if (self.in_progress_hanging
  3834. and consumer in (sys.stdout, sys.stderr)):
  3835. self.in_progress_hanging = False
  3836. sys.stdout.write('\n')
  3837. sys.stdout.flush()
  3838. if rendered is None:
  3839. if args:
  3840. rendered = msg % args
  3841. else:
  3842. rendered = msg
  3843. rendered = ' '*self.indent + rendered
  3844. if self.explicit_levels:
  3845. ## FIXME: should this be a name, not a level number?
  3846. rendered = '%02i %s' % (level, rendered)
  3847. if hasattr(consumer, 'write'):
  3848. consumer.write(rendered+'\n')
  3849. else:
  3850. consumer(rendered)
  3851. def start_progress(self, msg):
  3852. assert not self.in_progress, (
  3853. "Tried to start_progress(%r) while in_progress %r"
  3854. % (msg, self.in_progress))
  3855. if self.level_matches(self.NOTIFY, self._stdout_level()):
  3856. sys.stdout.write(' '*self.indent + msg)
  3857. sys.stdout.flush()
  3858. self.in_progress_hanging = True
  3859. else:
  3860. self.in_progress_hanging = False
  3861. self.in_progress = msg
  3862. self.last_message = None
  3863. def end_progress(self, msg='done.'):
  3864. assert self.in_progress, (
  3865. "Tried to end_progress without start_progress")
  3866. if self.stdout_level_matches(self.NOTIFY):
  3867. if not self.in_progress_hanging:
  3868. # Some message has been printed out since start_progress
  3869. sys.stdout.write('...' + self.in_progress + msg + '\n')
  3870. sys.stdout.flush()
  3871. else:
  3872. # These erase any messages shown with show_progress (besides .'s)
  3873. logger.show_progress('')
  3874. logger.show_progress('')
  3875. sys.stdout.write(msg + '\n')
  3876. sys.stdout.flush()
  3877. self.in_progress = None
  3878. self.in_progress_hanging = False
  3879. def show_progress(self, message=None):
  3880. """If we are in a progress scope, and no log messages have been
  3881. shown, write out another '.'"""
  3882. if self.in_progress_hanging:
  3883. if message is None:
  3884. sys.stdout.write('.')
  3885. sys.stdout.flush()
  3886. else:
  3887. if self.last_message:
  3888. padding = ' ' * max(0, len(self.last_message)-len(message))
  3889. else:
  3890. padding = ''
  3891. sys.stdout.write('\r%s%s%s%s' % (' '*self.indent, self.in_progress, message, padding))
  3892. sys.stdout.flush()
  3893. self.last_message = message
  3894. def stdout_level_matches(self, level):
  3895. """Returns true if a message at this level will go to stdout"""
  3896. return self.level_matches(level, self._stdout_level())
  3897. def _stdout_level(self):
  3898. """Returns the level that stdout runs at"""
  3899. for level, consumer in self.consumers:
  3900. if consumer is sys.stdout:
  3901. return level
  3902. return self.FATAL
  3903. def level_matches(self, level, consumer_level):
  3904. """
  3905. >>> l = Logger()
  3906. >>> l.level_matches(3, 4)
  3907. False
  3908. >>> l.level_matches(3, 2)
  3909. True
  3910. >>> l.level_matches(slice(None, 3), 3)
  3911. False
  3912. >>> l.level_matches(slice(None, 3), 2)
  3913. True
  3914. >>> l.level_matches(slice(1, 3), 1)
  3915. True
  3916. >>> l.level_matches(slice(2, 3), 1)
  3917. False
  3918. """
  3919. if isinstance(level, slice):
  3920. start, stop = level.start, level.stop
  3921. if start is not None and start > consumer_level:
  3922. return False
  3923. if stop is not None or stop <= consumer_level:
  3924. return False
  3925. return True
  3926. else:
  3927. return level >= consumer_level
  3928. @classmethod
  3929. def level_for_integer(cls, level):
  3930. levels = cls.LEVELS
  3931. if level < 0:
  3932. return levels[0]
  3933. if level >= len(levels):
  3934. return levels[-1]
  3935. return levels[level]
  3936. def move_stdout_to_stderr(self):
  3937. to_remove = []
  3938. to_add = []
  3939. for consumer_level, consumer in self.consumers:
  3940. if consumer == sys.stdout:
  3941. to_remove.append((consumer_level, consumer))
  3942. to_add.append((consumer_level, sys.stderr))
  3943. for item in to_remove:
  3944. self.consumers.remove(item)
  3945. self.consumers.extend(to_add)
  3946. def call_subprocess(cmd, show_stdout=True,
  3947. filter_stdout=None, cwd=None,
  3948. raise_on_returncode=True,
  3949. command_level=Logger.DEBUG, command_desc=None,
  3950. extra_environ=None):
  3951. if command_desc is None:
  3952. cmd_parts = []
  3953. for part in cmd:
  3954. if ' ' in part or '\n' in part or '"' in part or "'" in part:
  3955. part = '"%s"' % part.replace('"', '\\"')
  3956. cmd_parts.append(part)
  3957. command_desc = ' '.join(cmd_parts)
  3958. if show_stdout:
  3959. stdout = None
  3960. else:
  3961. stdout = subprocess.PIPE
  3962. logger.log(command_level, "Running command %s" % command_desc)
  3963. env = os.environ.copy()
  3964. if extra_environ:
  3965. env.update(extra_environ)
  3966. try:
  3967. proc = subprocess.Popen(
  3968. cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
  3969. cwd=cwd, env=env)
  3970. except Exception, e:
  3971. logger.fatal(
  3972. "Error %s while executing command %s" % (e, command_desc))
  3973. raise
  3974. all_output = []
  3975. if stdout is not None:
  3976. stdout = proc.stdout
  3977. while 1:
  3978. line = stdout.readline()
  3979. if not line:
  3980. break
  3981. line = line.rstrip()
  3982. all_output.append(line + '\n')
  3983. if filter_stdout:
  3984. level = filter_stdout(line)
  3985. if isinstance(level, tuple):
  3986. level, line = level
  3987. logger.log(level, line)
  3988. if not logger.stdout_level_matches(level):
  3989. logger.show_progress()
  3990. else:
  3991. logger.info(line)
  3992. else:
  3993. returned_stdout, returned_stderr = proc.communicate()
  3994. all_output = [returned_stdout or '']
  3995. proc.wait()
  3996. if proc.returncode:
  3997. if raise_on_returncode:
  3998. if all_output:
  3999. logger.notify('Complete output from command %s:' % command_desc)
  4000. logger.notify('\n'.join(all_output) + '\n----------------------------------------')
  4001. raise InstallationError(
  4002. "Command %s failed with error code %s"
  4003. % (command_desc, proc.returncode))
  4004. else:
  4005. logger.warn(
  4006. "Command %s had error code %s"
  4007. % (command_desc, proc.returncode))
  4008. if stdout is not None:
  4009. return ''.join(all_output)
  4010. ############################################################
  4011. ## Utility functions
  4012. def is_svn_page(html):
  4013. """Returns true if the page appears to be the index page of an svn repository"""
  4014. return (re.search(r'<title>[^<]*Revision \d+:', html)
  4015. and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
  4016. def file_contents(filename):
  4017. fp = open(filename, 'rb')
  4018. try:
  4019. return fp.read()
  4020. finally:
  4021. fp.close()
  4022. def split_leading_dir(path):
  4023. path = str(path)
  4024. path = path.lstrip('/').lstrip('\\')
  4025. if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
  4026. or '\\' not in path):
  4027. return path.split('/', 1)
  4028. elif '\\' in path:
  4029. return path.split('\\', 1)
  4030. else:
  4031. return path, ''
  4032. def has_leading_dir(paths):
  4033. """Returns true if all the paths have the same leading path name
  4034. (i.e., everything is in one subdirectory in an archive)"""
  4035. common_prefix = None
  4036. for path in paths:
  4037. prefix, rest = split_leading_dir(path)
  4038. if not prefix:
  4039. return False
  4040. elif common_prefix is None:
  4041. common_prefix = prefix
  4042. elif prefix != common_prefix:
  4043. return False
  4044. return True
  4045. def format_size(bytes):
  4046. if bytes > 1000*1000:
  4047. return '%.1fMb' % (bytes/1000.0/1000)
  4048. elif bytes > 10*1000:
  4049. return '%iKb' % (bytes/1000)
  4050. elif bytes > 1000:
  4051. return '%.1fKb' % (bytes/1000.0)
  4052. else:
  4053. return '%ibytes' % bytes
  4054. _normalize_re = re.compile(r'[^a-z]', re.I)
  4055. def normalize_name(name):
  4056. return _normalize_re.sub('-', name.lower())
  4057. def make_path_relative(path, rel_to):
  4058. """
  4059. Make a filename relative, where the filename path, and it is
  4060. relative to rel_to
  4061. >>> make_relative_path('/usr/share/something/a-file.pth',
  4062. ... '/usr/share/another-place/src/Directory')
  4063. '../../../something/a-file.pth'
  4064. >>> make_relative_path('/usr/share/something/a-file.pth',
  4065. ... '/home/user/src/Directory')
  4066. '../../../usr/share/something/a-file.pth'
  4067. >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
  4068. 'a-file.pth'
  4069. """
  4070. path_filename = os.path.basename(path)
  4071. path = os.path.dirname(path)
  4072. path = os.path.normpath(os.path.abspath(path))
  4073. rel_to = os.path.normpath(os.path.abspath(rel_to))
  4074. path_parts = path.strip(os.path.sep).split(os.path.sep)
  4075. rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
  4076. while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
  4077. path_parts.pop(0)
  4078. rel_to_parts.pop(0)
  4079. full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
  4080. if full_parts == ['']:
  4081. return '.' + os.path.sep
  4082. return os.path.sep.join(full_parts)
  4083. def display_path(path):
  4084. """Gives the display value for a given path, making it relative to cwd
  4085. if possible."""
  4086. path = os.path.normcase(os.path.abspath(path))
  4087. if path.startswith(os.getcwd() + os.path.sep):
  4088. path = '.' + path[len(os.getcwd()):]
  4089. return path
  4090. def parse_editable(editable_req, default_vcs=None):
  4091. """Parses svn+http://blahblah@rev#egg=Foobar into a requirement
  4092. (Foobar) and a URL"""
  4093. url = editable_req
  4094. if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')):
  4095. # Treating it as code that has already been checked out
  4096. url = filename_to_url(url)
  4097. if url.lower().startswith('file:'):
  4098. return None, url
  4099. for version_control in vcs:
  4100. if url.lower().startswith('%s:' % version_control):
  4101. url = '%s+%s' % (version_control, url)
  4102. if '+' not in url:
  4103. if default_vcs:
  4104. url = default_vcs + '+' + url
  4105. else:
  4106. raise InstallationError(
  4107. '--editable=%s should be formatted with svn+URL, git+URL, hg+URL or bzr+URL' % editable_req)
  4108. vc_type = url.split('+', 1)[0].lower()
  4109. if not vcs.get_backend(vc_type):
  4110. raise InstallationError(
  4111. 'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL) and Bazaar (bzr+URL) is currently supported' % editable_req)
  4112. match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req)
  4113. if (not match or not match.group(1)) and vcs.get_backend(vc_type):
  4114. parts = [p for p in editable_req.split('#', 1)[0].split('/') if p]
  4115. if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
  4116. req = parts[-3]
  4117. elif parts[-1] == 'trunk':
  4118. req = parts[-2]
  4119. else:
  4120. raise InstallationError(
  4121. '--editable=%s is not the right format; it must have #egg=Package'
  4122. % editable_req)
  4123. else:
  4124. req = match.group(1)
  4125. ## FIXME: use package_to_requirement?
  4126. match = re.search(r'^(.*?)(?:-dev|-\d.*)', req)
  4127. if match:
  4128. # Strip off -dev, -0.2, etc.
  4129. req = match.group(1)
  4130. return req, url
  4131. def backup_dir(dir, ext='.bak'):
  4132. """Figure out the name of a directory to back up the given dir to
  4133. (adding .bak, .bak2, etc)"""
  4134. n = 1
  4135. extension = ext
  4136. while os.path.exists(dir + extension):
  4137. n += 1
  4138. extension = ext + str(n)
  4139. return dir + extension
  4140. def ask(message, options):
  4141. """Ask the message interactively, with the given possible responses"""
  4142. while 1:
  4143. if os.environ.get('PIP_NO_INPUT'):
  4144. raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
  4145. response = raw_input(message)
  4146. response = response.strip().lower()
  4147. if response not in options:
  4148. print 'Your response (%r) was not one of the expected responses: %s' % (
  4149. response, ', '.join(options))
  4150. else:
  4151. return response
  4152. def open_logfile_append(filename):
  4153. """Open the named log file in append mode.
  4154. If the file already exists, a separator will also be printed to
  4155. the file to separate past activity from current activity.
  4156. """
  4157. exists = os.path.exists(filename)
  4158. log_fp = open(filename, 'a')
  4159. if exists:
  4160. print >> log_fp, '-'*60
  4161. print >> log_fp, '%s run on %s' % (sys.argv[0], time.strftime('%c'))
  4162. return log_fp
  4163. def is_url(name):
  4164. """Returns true if the name looks like a URL"""
  4165. if ':' not in name:
  4166. return False
  4167. scheme = name.split(':', 1)[0].lower()
  4168. return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
  4169. def is_filename(name):
  4170. if (splitext(name)[1].lower() in ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle')
  4171. and os.path.exists(name)):
  4172. return True
  4173. if os.path.sep not in name and '/' not in name:
  4174. # Doesn't have any path components, probably a requirement like 'Foo'
  4175. return False
  4176. return True
  4177. _drive_re = re.compile('^([a-z]):', re.I)
  4178. _url_drive_re = re.compile('^([a-z])[:|]', re.I)
  4179. def filename_to_url(filename):
  4180. """
  4181. Convert a path to a file: URL. The path will be made absolute.
  4182. """
  4183. filename = os.path.normcase(os.path.abspath(filename))
  4184. if _drive_re.match(filename):
  4185. filename = filename[0] + '|' + filename[2:]
  4186. url = urllib.quote(filename)
  4187. url = url.replace(os.path.sep, '/')
  4188. url = url.lstrip('/')
  4189. return 'file:///' + url
  4190. def filename_to_url2(filename):
  4191. """
  4192. Convert a path to a file: URL. The path will be made absolute and have
  4193. quoted path parts.
  4194. """
  4195. filename = os.path.normcase(os.path.abspath(filename))
  4196. drive, filename = os.path.splitdrive(filename)
  4197. filepath = filename.split(os.path.sep)
  4198. url = '/'.join([urllib.quote(part) for part in filepath])
  4199. if not drive:
  4200. url = url.lstrip('/')
  4201. return 'file:///' + drive + url
  4202. def url_to_filename(url):
  4203. """
  4204. Convert a file: URL to a path.
  4205. """
  4206. assert url.startswith('file:'), (
  4207. "You can only turn file: urls into filenames (not %r)" % url)
  4208. filename = url[len('file:'):].lstrip('/')
  4209. filename = urllib.unquote(filename)
  4210. if _url_drive_re.match(filename):
  4211. filename = filename[0] + ':' + filename[2:]
  4212. else:
  4213. filename = '/' + filename
  4214. return filename
  4215. def get_requirement_from_url(url):
  4216. """Get a requirement from the URL, if possible. This looks for #egg
  4217. in the URL"""
  4218. link = Link(url)
  4219. egg_info = link.egg_fragment
  4220. if not egg_info:
  4221. egg_info = splitext(link.filename)[0]
  4222. return package_to_requirement(egg_info)
  4223. def package_to_requirement(package_name):
  4224. """Translate a name like Foo-1.2 to Foo==1.3"""
  4225. match = re.search(r'^(.*?)(-dev|-\d.*)', package_name)
  4226. if match:
  4227. name = match.group(1)
  4228. version = match.group(2)
  4229. else:
  4230. name = package_name
  4231. version = ''
  4232. if version:
  4233. return '%s==%s' % (name, version)
  4234. else:
  4235. return name
  4236. def is_framework_layout(path):
  4237. """Return True if the current platform is the default Python of Mac OS X
  4238. which installs scripts in /usr/local/bin"""
  4239. return (sys.platform[:6] == 'darwin' and
  4240. (path[:9] == '/Library/' or path[:16] == '/System/Library/'))
  4241. def strip_prefix(path, prefix):
  4242. """ If ``path`` begins with ``prefix``, return ``path`` with
  4243. ``prefix`` stripped off. Otherwise return None."""
  4244. prefixes = [prefix]
  4245. # Yep, we are special casing the framework layout of MacPython here
  4246. if is_framework_layout(sys.prefix):
  4247. for location in ('/Library', '/usr/local'):
  4248. if path.startswith(location):
  4249. prefixes.append(location)
  4250. for prefix in prefixes:
  4251. if path.startswith(prefix):
  4252. return prefix, path.replace(prefix + os.path.sep, '')
  4253. return None, None
  4254. class UninstallPathSet(object):
  4255. """A set of file paths to be removed in the uninstallation of a
  4256. requirement."""
  4257. def __init__(self, dist, restrict_to_prefix):
  4258. self.paths = set()
  4259. self._refuse = set()
  4260. self.pth = {}
  4261. self.prefix = os.path.normcase(os.path.realpath(restrict_to_prefix))
  4262. self.dist = dist
  4263. self.location = dist.location
  4264. self.save_dir = None
  4265. self._moved_paths = []
  4266. def _can_uninstall(self):
  4267. prefix, stripped = strip_prefix(self.location, self.prefix)
  4268. if not stripped:
  4269. logger.notify("Not uninstalling %s at %s, outside environment %s"
  4270. % (self.dist.project_name, self.dist.location,
  4271. self.prefix))
  4272. return False
  4273. return True
  4274. def add(self, path):
  4275. path = os.path.abspath(path)
  4276. if not os.path.exists(path):
  4277. return
  4278. prefix, stripped = strip_prefix(os.path.normcase(path), self.prefix)
  4279. if stripped:
  4280. self.paths.add((prefix, stripped))
  4281. else:
  4282. self._refuse.add((prefix, path))
  4283. def add_pth(self, pth_file, entry):
  4284. prefix, stripped = strip_prefix(os.path.normcase(pth_file), self.prefix)
  4285. if stripped:
  4286. entry = os.path.normcase(entry)
  4287. if stripped not in self.pth:
  4288. self.pth[stripped] = UninstallPthEntries(os.path.join(prefix, stripped))
  4289. self.pth[stripped].add(os.path.normcase(entry))
  4290. else:
  4291. self._refuse.add((prefix, pth_file))
  4292. def compact(self, paths):
  4293. """Compact a path set to contain the minimal number of paths
  4294. necessary to contain all paths in the set. If /a/path/ and
  4295. /a/path/to/a/file.txt are both in the set, leave only the
  4296. shorter path."""
  4297. short_paths = set()
  4298. def sort_set(x, y):
  4299. prefix_x, path_x = x
  4300. prefix_y, path_y = y
  4301. return cmp(len(path_x), len(path_y))
  4302. for prefix, path in sorted(paths, sort_set):
  4303. if not any([(path.startswith(shortpath) and
  4304. path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
  4305. for shortprefix, shortpath in short_paths]):
  4306. short_paths.add((prefix, path))
  4307. return short_paths
  4308. def remove(self, auto_confirm=False):
  4309. """Remove paths in ``self.paths`` with confirmation (unless
  4310. ``auto_confirm`` is True)."""
  4311. if not self._can_uninstall():
  4312. return
  4313. logger.notify('Uninstalling %s:' % self.dist.project_name)
  4314. logger.indent += 2
  4315. paths = sorted(self.compact(self.paths))
  4316. try:
  4317. if auto_confirm:
  4318. response = 'y'
  4319. else:
  4320. for prefix, path in paths:
  4321. logger.notify(os.path.join(prefix, path))
  4322. response = ask('Proceed (y/n)? ', ('y', 'n'))
  4323. if self._refuse:
  4324. logger.notify('Not removing or modifying (outside of prefix):')
  4325. for prefix, path in self.compact(self._refuse):
  4326. logger.notify(os.path.join(prefix, path))
  4327. if response == 'y':
  4328. self.save_dir = tempfile.mkdtemp('-uninstall', 'pip-')
  4329. for prefix, path in paths:
  4330. full_path = os.path.join(prefix, path)
  4331. new_path = os.path.join(self.save_dir, path)
  4332. new_dir = os.path.dirname(new_path)
  4333. logger.info('Removing file or directory %s' % full_path)
  4334. self._moved_paths.append((prefix, path))
  4335. os.renames(full_path, new_path)
  4336. for pth in self.pth.values():
  4337. pth.remove()
  4338. logger.notify('Successfully uninstalled %s' % self.dist.project_name)
  4339. finally:
  4340. logger.indent -= 2
  4341. def rollback(self):
  4342. """Rollback the changes previously made by remove()."""
  4343. if self.save_dir is None:
  4344. logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
  4345. return False
  4346. logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
  4347. for prefix, path in self._moved_paths:
  4348. tmp_path = os.path.join(self.save_dir, path)
  4349. real_path = os.path.join(prefix, path)
  4350. logger.info('Replacing %s' % real_path)
  4351. os.renames(tmp_path, real_path)
  4352. for pth in self.pth:
  4353. pth.rollback()
  4354. def commit(self):
  4355. """Remove temporary save dir: rollback will no longer be possible."""
  4356. if self.save_dir is not None:
  4357. shutil.rmtree(self.save_dir)
  4358. self.save_dir = None
  4359. self._moved_paths = []
  4360. class UninstallPthEntries(object):
  4361. def __init__(self, pth_file):
  4362. if not os.path.isfile(pth_file):
  4363. raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
  4364. self.file = pth_file
  4365. self.entries = set()
  4366. self._saved_lines = None
  4367. def add(self, entry):
  4368. self.entries.add(entry)
  4369. def remove(self):
  4370. logger.info('Removing pth entries from %s:' % self.file)
  4371. fh = open(self.file, 'r')
  4372. lines = fh.readlines()
  4373. self._saved_lines = lines
  4374. fh.close()
  4375. try:
  4376. for entry in self.entries:
  4377. logger.info('Removing entry: %s' % entry)
  4378. try:
  4379. lines.remove(entry + '\n')
  4380. except ValueError:
  4381. pass
  4382. finally:
  4383. pass
  4384. fh = open(self.file, 'w')
  4385. fh.writelines(lines)
  4386. fh.close()
  4387. def rollback(self):
  4388. if self._saved_lines is None:
  4389. logger.error('Cannot roll back changes to %s, none were made' % self.file)
  4390. return False
  4391. logger.info('Rolling %s back to previous state' % self.file)
  4392. fh = open(self.file, 'w')
  4393. fh.writelines(self._saved_lines)
  4394. fh.close()
  4395. return True
  4396. class FakeFile(object):
  4397. """Wrap a list of lines in an object with readline() to make
  4398. ConfigParser happy."""
  4399. def __init__(self, lines):
  4400. self._gen = (l for l in lines)
  4401. def readline(self):
  4402. try:
  4403. return self._gen.next()
  4404. except StopIteration:
  4405. return ''
  4406. def splitext(path):
  4407. """Like os.path.splitext, but take off .tar too"""
  4408. base, ext = posixpath.splitext(path)
  4409. if base.lower().endswith('.tar'):
  4410. ext = base[-4:] + ext
  4411. base = base[:-4]
  4412. return base, ext
  4413. def find_command(cmd, paths=None, pathext=None):
  4414. """Searches the PATH for the given command and returns its path"""
  4415. if paths is None:
  4416. paths = os.environ.get('PATH', []).split(os.pathsep)
  4417. if isinstance(paths, basestring):
  4418. paths = [paths]
  4419. # check if there are funny path extensions for executables, e.g. Windows
  4420. if pathext is None:
  4421. pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
  4422. pathext = [ext for ext in pathext.lower().split(os.pathsep)]
  4423. # don't use extensions if the command ends with one of them
  4424. if os.path.splitext(cmd)[1].lower() in pathext:
  4425. pathext = ['']
  4426. # check if we find the command on PATH
  4427. for path in paths:
  4428. # try without extension first
  4429. cmd_path = os.path.join(path, cmd)
  4430. for ext in pathext:
  4431. # then including the extension
  4432. cmd_path_ext = cmd_path + ext
  4433. if os.path.exists(cmd_path_ext):
  4434. return cmd_path_ext
  4435. if os.path.exists(cmd_path):
  4436. return cmd_path
  4437. return None
  4438. class _Inf(object):
  4439. """I am bigger than everything!"""
  4440. def __cmp__(self, a):
  4441. if self is a:
  4442. return 0
  4443. return 1
  4444. def __repr__(self):
  4445. return 'Inf'
  4446. Inf = _Inf()
  4447. del _Inf
  4448. if __name__ == '__main__':
  4449. exit = main()
  4450. if exit:
  4451. sys.exit(exit)