PageRenderTime 59ms CodeModel.GetById 28ms RepoModel.GetById 0ms app.codeStats 1ms

/mercurial/scmutil.py

https://bitbucket.org/mirror/mercurial/
Python | 970 lines | 905 code | 30 blank | 35 comment | 84 complexity | 09bf1a793bccd764ec77be26df84e9f0 MD5 | raw file
Possible License(s): GPL-2.0
  1. # scmutil.py - Mercurial core utility functions
  2. #
  3. # Copyright Matt Mackall <mpm@selenic.com>
  4. #
  5. # This software may be used and distributed according to the terms of the
  6. # GNU General Public License version 2 or any later version.
  7. from i18n import _
  8. from mercurial.node import nullrev
  9. import util, error, osutil, revset, similar, encoding, phases, parsers
  10. import pathutil
  11. import match as matchmod
  12. import os, errno, re, glob, tempfile
  13. if os.name == 'nt':
  14. import scmwindows as scmplatform
  15. else:
  16. import scmposix as scmplatform
  17. systemrcpath = scmplatform.systemrcpath
  18. userrcpath = scmplatform.userrcpath
  19. def itersubrepos(ctx1, ctx2):
  20. """find subrepos in ctx1 or ctx2"""
  21. # Create a (subpath, ctx) mapping where we prefer subpaths from
  22. # ctx1. The subpaths from ctx2 are important when the .hgsub file
  23. # has been modified (in ctx2) but not yet committed (in ctx1).
  24. subpaths = dict.fromkeys(ctx2.substate, ctx2)
  25. subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
  26. for subpath, ctx in sorted(subpaths.iteritems()):
  27. yield subpath, ctx.sub(subpath)
  28. def nochangesfound(ui, repo, excluded=None):
  29. '''Report no changes for push/pull, excluded is None or a list of
  30. nodes excluded from the push/pull.
  31. '''
  32. secretlist = []
  33. if excluded:
  34. for n in excluded:
  35. if n not in repo:
  36. # discovery should not have included the filtered revision,
  37. # we have to explicitly exclude it until discovery is cleanup.
  38. continue
  39. ctx = repo[n]
  40. if ctx.phase() >= phases.secret and not ctx.extinct():
  41. secretlist.append(n)
  42. if secretlist:
  43. ui.status(_("no changes found (ignored %d secret changesets)\n")
  44. % len(secretlist))
  45. else:
  46. ui.status(_("no changes found\n"))
  47. def checknewlabel(repo, lbl, kind):
  48. # Do not use the "kind" parameter in ui output.
  49. # It makes strings difficult to translate.
  50. if lbl in ['tip', '.', 'null']:
  51. raise util.Abort(_("the name '%s' is reserved") % lbl)
  52. for c in (':', '\0', '\n', '\r'):
  53. if c in lbl:
  54. raise util.Abort(_("%r cannot be used in a name") % c)
  55. try:
  56. int(lbl)
  57. raise util.Abort(_("cannot use an integer as a name"))
  58. except ValueError:
  59. pass
  60. def checkfilename(f):
  61. '''Check that the filename f is an acceptable filename for a tracked file'''
  62. if '\r' in f or '\n' in f:
  63. raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
  64. def checkportable(ui, f):
  65. '''Check if filename f is portable and warn or abort depending on config'''
  66. checkfilename(f)
  67. abort, warn = checkportabilityalert(ui)
  68. if abort or warn:
  69. msg = util.checkwinfilename(f)
  70. if msg:
  71. msg = "%s: %r" % (msg, f)
  72. if abort:
  73. raise util.Abort(msg)
  74. ui.warn(_("warning: %s\n") % msg)
  75. def checkportabilityalert(ui):
  76. '''check if the user's config requests nothing, a warning, or abort for
  77. non-portable filenames'''
  78. val = ui.config('ui', 'portablefilenames', 'warn')
  79. lval = val.lower()
  80. bval = util.parsebool(val)
  81. abort = os.name == 'nt' or lval == 'abort'
  82. warn = bval or lval == 'warn'
  83. if bval is None and not (warn or abort or lval == 'ignore'):
  84. raise error.ConfigError(
  85. _("ui.portablefilenames value is invalid ('%s')") % val)
  86. return abort, warn
  87. class casecollisionauditor(object):
  88. def __init__(self, ui, abort, dirstate):
  89. self._ui = ui
  90. self._abort = abort
  91. allfiles = '\0'.join(dirstate._map)
  92. self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
  93. self._dirstate = dirstate
  94. # The purpose of _newfiles is so that we don't complain about
  95. # case collisions if someone were to call this object with the
  96. # same filename twice.
  97. self._newfiles = set()
  98. def __call__(self, f):
  99. if f in self._newfiles:
  100. return
  101. fl = encoding.lower(f)
  102. if fl in self._loweredfiles and f not in self._dirstate:
  103. msg = _('possible case-folding collision for %s') % f
  104. if self._abort:
  105. raise util.Abort(msg)
  106. self._ui.warn(_("warning: %s\n") % msg)
  107. self._loweredfiles.add(fl)
  108. self._newfiles.add(f)
  109. class abstractvfs(object):
  110. """Abstract base class; cannot be instantiated"""
  111. def __init__(self, *args, **kwargs):
  112. '''Prevent instantiation; don't call this from subclasses.'''
  113. raise NotImplementedError('attempted instantiating ' + str(type(self)))
  114. def tryread(self, path):
  115. '''gracefully return an empty string for missing files'''
  116. try:
  117. return self.read(path)
  118. except IOError, inst:
  119. if inst.errno != errno.ENOENT:
  120. raise
  121. return ""
  122. def open(self, path, mode="r", text=False, atomictemp=False):
  123. self.open = self.__call__
  124. return self.__call__(path, mode, text, atomictemp)
  125. def read(self, path):
  126. fp = self(path, 'rb')
  127. try:
  128. return fp.read()
  129. finally:
  130. fp.close()
  131. def write(self, path, data):
  132. fp = self(path, 'wb')
  133. try:
  134. return fp.write(data)
  135. finally:
  136. fp.close()
  137. def append(self, path, data):
  138. fp = self(path, 'ab')
  139. try:
  140. return fp.write(data)
  141. finally:
  142. fp.close()
  143. def chmod(self, path, mode):
  144. return os.chmod(self.join(path), mode)
  145. def exists(self, path=None):
  146. return os.path.exists(self.join(path))
  147. def fstat(self, fp):
  148. return util.fstat(fp)
  149. def isdir(self, path=None):
  150. return os.path.isdir(self.join(path))
  151. def isfile(self, path=None):
  152. return os.path.isfile(self.join(path))
  153. def islink(self, path=None):
  154. return os.path.islink(self.join(path))
  155. def lexists(self, path=None):
  156. return os.path.lexists(self.join(path))
  157. def lstat(self, path=None):
  158. return os.lstat(self.join(path))
  159. def listdir(self, path=None):
  160. return os.listdir(self.join(path))
  161. def makedir(self, path=None, notindexed=True):
  162. return util.makedir(self.join(path), notindexed)
  163. def makedirs(self, path=None, mode=None):
  164. return util.makedirs(self.join(path), mode)
  165. def makelock(self, info, path):
  166. return util.makelock(info, self.join(path))
  167. def mkdir(self, path=None):
  168. return os.mkdir(self.join(path))
  169. def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
  170. fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
  171. dir=self.join(dir), text=text)
  172. dname, fname = util.split(name)
  173. if dir:
  174. return fd, os.path.join(dir, fname)
  175. else:
  176. return fd, fname
  177. def readdir(self, path=None, stat=None, skip=None):
  178. return osutil.listdir(self.join(path), stat, skip)
  179. def readlock(self, path):
  180. return util.readlock(self.join(path))
  181. def rename(self, src, dst):
  182. return util.rename(self.join(src), self.join(dst))
  183. def readlink(self, path):
  184. return os.readlink(self.join(path))
  185. def setflags(self, path, l, x):
  186. return util.setflags(self.join(path), l, x)
  187. def stat(self, path=None):
  188. return os.stat(self.join(path))
  189. def unlink(self, path=None):
  190. return util.unlink(self.join(path))
  191. def unlinkpath(self, path=None, ignoremissing=False):
  192. return util.unlinkpath(self.join(path), ignoremissing)
  193. def utime(self, path=None, t=None):
  194. return os.utime(self.join(path), t)
  195. class vfs(abstractvfs):
  196. '''Operate files relative to a base directory
  197. This class is used to hide the details of COW semantics and
  198. remote file access from higher level code.
  199. '''
  200. def __init__(self, base, audit=True, expandpath=False, realpath=False):
  201. if expandpath:
  202. base = util.expandpath(base)
  203. if realpath:
  204. base = os.path.realpath(base)
  205. self.base = base
  206. self._setmustaudit(audit)
  207. self.createmode = None
  208. self._trustnlink = None
  209. def _getmustaudit(self):
  210. return self._audit
  211. def _setmustaudit(self, onoff):
  212. self._audit = onoff
  213. if onoff:
  214. self.audit = pathutil.pathauditor(self.base)
  215. else:
  216. self.audit = util.always
  217. mustaudit = property(_getmustaudit, _setmustaudit)
  218. @util.propertycache
  219. def _cansymlink(self):
  220. return util.checklink(self.base)
  221. @util.propertycache
  222. def _chmod(self):
  223. return util.checkexec(self.base)
  224. def _fixfilemode(self, name):
  225. if self.createmode is None or not self._chmod:
  226. return
  227. os.chmod(name, self.createmode & 0666)
  228. def __call__(self, path, mode="r", text=False, atomictemp=False):
  229. if self._audit:
  230. r = util.checkosfilename(path)
  231. if r:
  232. raise util.Abort("%s: %r" % (r, path))
  233. self.audit(path)
  234. f = self.join(path)
  235. if not text and "b" not in mode:
  236. mode += "b" # for that other OS
  237. nlink = -1
  238. if mode not in ('r', 'rb'):
  239. dirname, basename = util.split(f)
  240. # If basename is empty, then the path is malformed because it points
  241. # to a directory. Let the posixfile() call below raise IOError.
  242. if basename:
  243. if atomictemp:
  244. util.ensuredirs(dirname, self.createmode)
  245. return util.atomictempfile(f, mode, self.createmode)
  246. try:
  247. if 'w' in mode:
  248. util.unlink(f)
  249. nlink = 0
  250. else:
  251. # nlinks() may behave differently for files on Windows
  252. # shares if the file is open.
  253. fd = util.posixfile(f)
  254. nlink = util.nlinks(f)
  255. if nlink < 1:
  256. nlink = 2 # force mktempcopy (issue1922)
  257. fd.close()
  258. except (OSError, IOError), e:
  259. if e.errno != errno.ENOENT:
  260. raise
  261. nlink = 0
  262. util.ensuredirs(dirname, self.createmode)
  263. if nlink > 0:
  264. if self._trustnlink is None:
  265. self._trustnlink = nlink > 1 or util.checknlink(f)
  266. if nlink > 1 or not self._trustnlink:
  267. util.rename(util.mktempcopy(f), f)
  268. fp = util.posixfile(f, mode)
  269. if nlink == 0:
  270. self._fixfilemode(f)
  271. return fp
  272. def symlink(self, src, dst):
  273. self.audit(dst)
  274. linkname = self.join(dst)
  275. try:
  276. os.unlink(linkname)
  277. except OSError:
  278. pass
  279. util.ensuredirs(os.path.dirname(linkname), self.createmode)
  280. if self._cansymlink:
  281. try:
  282. os.symlink(src, linkname)
  283. except OSError, err:
  284. raise OSError(err.errno, _('could not symlink to %r: %s') %
  285. (src, err.strerror), linkname)
  286. else:
  287. self.write(dst, src)
  288. def join(self, path):
  289. if path:
  290. return os.path.join(self.base, path)
  291. else:
  292. return self.base
  293. opener = vfs
  294. class auditvfs(object):
  295. def __init__(self, vfs):
  296. self.vfs = vfs
  297. def _getmustaudit(self):
  298. return self.vfs.mustaudit
  299. def _setmustaudit(self, onoff):
  300. self.vfs.mustaudit = onoff
  301. mustaudit = property(_getmustaudit, _setmustaudit)
  302. class filtervfs(abstractvfs, auditvfs):
  303. '''Wrapper vfs for filtering filenames with a function.'''
  304. def __init__(self, vfs, filter):
  305. auditvfs.__init__(self, vfs)
  306. self._filter = filter
  307. def __call__(self, path, *args, **kwargs):
  308. return self.vfs(self._filter(path), *args, **kwargs)
  309. def join(self, path):
  310. if path:
  311. return self.vfs.join(self._filter(path))
  312. else:
  313. return self.vfs.join(path)
  314. filteropener = filtervfs
  315. class readonlyvfs(abstractvfs, auditvfs):
  316. '''Wrapper vfs preventing any writing.'''
  317. def __init__(self, vfs):
  318. auditvfs.__init__(self, vfs)
  319. def __call__(self, path, mode='r', *args, **kw):
  320. if mode not in ('r', 'rb'):
  321. raise util.Abort('this vfs is read only')
  322. return self.vfs(path, mode, *args, **kw)
  323. def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
  324. '''yield every hg repository under path, always recursively.
  325. The recurse flag will only control recursion into repo working dirs'''
  326. def errhandler(err):
  327. if err.filename == path:
  328. raise err
  329. samestat = getattr(os.path, 'samestat', None)
  330. if followsym and samestat is not None:
  331. def adddir(dirlst, dirname):
  332. match = False
  333. dirstat = os.stat(dirname)
  334. for lstdirstat in dirlst:
  335. if samestat(dirstat, lstdirstat):
  336. match = True
  337. break
  338. if not match:
  339. dirlst.append(dirstat)
  340. return not match
  341. else:
  342. followsym = False
  343. if (seen_dirs is None) and followsym:
  344. seen_dirs = []
  345. adddir(seen_dirs, path)
  346. for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
  347. dirs.sort()
  348. if '.hg' in dirs:
  349. yield root # found a repository
  350. qroot = os.path.join(root, '.hg', 'patches')
  351. if os.path.isdir(os.path.join(qroot, '.hg')):
  352. yield qroot # we have a patch queue repo here
  353. if recurse:
  354. # avoid recursing inside the .hg directory
  355. dirs.remove('.hg')
  356. else:
  357. dirs[:] = [] # don't descend further
  358. elif followsym:
  359. newdirs = []
  360. for d in dirs:
  361. fname = os.path.join(root, d)
  362. if adddir(seen_dirs, fname):
  363. if os.path.islink(fname):
  364. for hgname in walkrepos(fname, True, seen_dirs):
  365. yield hgname
  366. else:
  367. newdirs.append(d)
  368. dirs[:] = newdirs
  369. def osrcpath():
  370. '''return default os-specific hgrc search path'''
  371. path = systemrcpath()
  372. path.extend(userrcpath())
  373. path = [os.path.normpath(f) for f in path]
  374. return path
  375. _rcpath = None
  376. def rcpath():
  377. '''return hgrc search path. if env var HGRCPATH is set, use it.
  378. for each item in path, if directory, use files ending in .rc,
  379. else use item.
  380. make HGRCPATH empty to only look in .hg/hgrc of current repo.
  381. if no HGRCPATH, use default os-specific path.'''
  382. global _rcpath
  383. if _rcpath is None:
  384. if 'HGRCPATH' in os.environ:
  385. _rcpath = []
  386. for p in os.environ['HGRCPATH'].split(os.pathsep):
  387. if not p:
  388. continue
  389. p = util.expandpath(p)
  390. if os.path.isdir(p):
  391. for f, kind in osutil.listdir(p):
  392. if f.endswith('.rc'):
  393. _rcpath.append(os.path.join(p, f))
  394. else:
  395. _rcpath.append(p)
  396. else:
  397. _rcpath = osrcpath()
  398. return _rcpath
  399. def revsingle(repo, revspec, default='.'):
  400. if not revspec and revspec != 0:
  401. return repo[default]
  402. l = revrange(repo, [revspec])
  403. if len(l) < 1:
  404. raise util.Abort(_('empty revision set'))
  405. return repo[l[-1]]
  406. def revpair(repo, revs):
  407. if not revs:
  408. return repo.dirstate.p1(), None
  409. l = revrange(repo, revs)
  410. if not l:
  411. first = second = None
  412. elif l.isascending():
  413. first = l.min()
  414. second = l.max()
  415. elif l.isdescending():
  416. first = l.max()
  417. second = l.min()
  418. else:
  419. l = list(l)
  420. first = l[0]
  421. second = l[-1]
  422. if first is None:
  423. raise util.Abort(_('empty revision range'))
  424. if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
  425. return repo.lookup(first), None
  426. return repo.lookup(first), repo.lookup(second)
  427. _revrangesep = ':'
  428. def revrange(repo, revs):
  429. """Yield revision as strings from a list of revision specifications."""
  430. def revfix(repo, val, defval):
  431. if not val and val != 0 and defval is not None:
  432. return defval
  433. return repo[val].rev()
  434. seen, l = set(), revset.baseset([])
  435. for spec in revs:
  436. if l and not seen:
  437. seen = set(l)
  438. # attempt to parse old-style ranges first to deal with
  439. # things like old-tag which contain query metacharacters
  440. try:
  441. if isinstance(spec, int):
  442. seen.add(spec)
  443. l = l + revset.baseset([spec])
  444. continue
  445. if _revrangesep in spec:
  446. start, end = spec.split(_revrangesep, 1)
  447. start = revfix(repo, start, 0)
  448. end = revfix(repo, end, len(repo) - 1)
  449. if end == nullrev and start < 0:
  450. start = nullrev
  451. rangeiter = repo.changelog.revs(start, end)
  452. if not seen and not l:
  453. # by far the most common case: revs = ["-1:0"]
  454. l = revset.baseset(rangeiter)
  455. # defer syncing seen until next iteration
  456. continue
  457. newrevs = set(rangeiter)
  458. if seen:
  459. newrevs.difference_update(seen)
  460. seen.update(newrevs)
  461. else:
  462. seen = newrevs
  463. l = l + revset.baseset(sorted(newrevs, reverse=start > end))
  464. continue
  465. elif spec and spec in repo: # single unquoted rev
  466. rev = revfix(repo, spec, None)
  467. if rev in seen:
  468. continue
  469. seen.add(rev)
  470. l = l + revset.baseset([rev])
  471. continue
  472. except error.RepoLookupError:
  473. pass
  474. # fall through to new-style queries if old-style fails
  475. m = revset.match(repo.ui, spec, repo)
  476. if seen or l:
  477. dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
  478. l = l + revset.baseset(dl)
  479. seen.update(dl)
  480. else:
  481. l = m(repo, revset.spanset(repo))
  482. return l
  483. def expandpats(pats):
  484. '''Expand bare globs when running on windows.
  485. On posix we assume it already has already been done by sh.'''
  486. if not util.expandglobs:
  487. return list(pats)
  488. ret = []
  489. for kindpat in pats:
  490. kind, pat = matchmod._patsplit(kindpat, None)
  491. if kind is None:
  492. try:
  493. globbed = glob.glob(pat)
  494. except re.error:
  495. globbed = [pat]
  496. if globbed:
  497. ret.extend(globbed)
  498. continue
  499. ret.append(kindpat)
  500. return ret
  501. def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
  502. '''Return a matcher and the patterns that were used.
  503. The matcher will warn about bad matches.'''
  504. if pats == ("",):
  505. pats = []
  506. if not globbed and default == 'relpath':
  507. pats = expandpats(pats or [])
  508. m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
  509. default)
  510. def badfn(f, msg):
  511. ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
  512. m.bad = badfn
  513. return m, pats
  514. def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
  515. '''Return a matcher that will warn about bad matches.'''
  516. return matchandpats(ctx, pats, opts, globbed, default)[0]
  517. def matchall(repo):
  518. '''Return a matcher that will efficiently match everything.'''
  519. return matchmod.always(repo.root, repo.getcwd())
  520. def matchfiles(repo, files):
  521. '''Return a matcher that will efficiently match exactly these files.'''
  522. return matchmod.exact(repo.root, repo.getcwd(), files)
  523. def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
  524. if dry_run is None:
  525. dry_run = opts.get('dry_run')
  526. if similarity is None:
  527. similarity = float(opts.get('similarity') or 0)
  528. # we'd use status here, except handling of symlinks and ignore is tricky
  529. m = match(repo[None], pats, opts)
  530. rejected = []
  531. m.bad = lambda x, y: rejected.append(x)
  532. added, unknown, deleted, removed = _interestingfiles(repo, m)
  533. unknownset = set(unknown)
  534. toprint = unknownset.copy()
  535. toprint.update(deleted)
  536. for abs in sorted(toprint):
  537. if repo.ui.verbose or not m.exact(abs):
  538. rel = m.rel(abs)
  539. if abs in unknownset:
  540. status = _('adding %s\n') % ((pats and rel) or abs)
  541. else:
  542. status = _('removing %s\n') % ((pats and rel) or abs)
  543. repo.ui.status(status)
  544. renames = _findrenames(repo, m, added + unknown, removed + deleted,
  545. similarity)
  546. if not dry_run:
  547. _markchanges(repo, unknown, deleted, renames)
  548. for f in rejected:
  549. if f in m.files():
  550. return 1
  551. return 0
  552. def marktouched(repo, files, similarity=0.0):
  553. '''Assert that files have somehow been operated upon. files are relative to
  554. the repo root.'''
  555. m = matchfiles(repo, files)
  556. rejected = []
  557. m.bad = lambda x, y: rejected.append(x)
  558. added, unknown, deleted, removed = _interestingfiles(repo, m)
  559. if repo.ui.verbose:
  560. unknownset = set(unknown)
  561. toprint = unknownset.copy()
  562. toprint.update(deleted)
  563. for abs in sorted(toprint):
  564. if abs in unknownset:
  565. status = _('adding %s\n') % abs
  566. else:
  567. status = _('removing %s\n') % abs
  568. repo.ui.status(status)
  569. renames = _findrenames(repo, m, added + unknown, removed + deleted,
  570. similarity)
  571. _markchanges(repo, unknown, deleted, renames)
  572. for f in rejected:
  573. if f in m.files():
  574. return 1
  575. return 0
  576. def _interestingfiles(repo, matcher):
  577. '''Walk dirstate with matcher, looking for files that addremove would care
  578. about.
  579. This is different from dirstate.status because it doesn't care about
  580. whether files are modified or clean.'''
  581. added, unknown, deleted, removed = [], [], [], []
  582. audit_path = pathutil.pathauditor(repo.root)
  583. ctx = repo[None]
  584. dirstate = repo.dirstate
  585. walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
  586. full=False)
  587. for abs, st in walkresults.iteritems():
  588. dstate = dirstate[abs]
  589. if dstate == '?' and audit_path.check(abs):
  590. unknown.append(abs)
  591. elif dstate != 'r' and not st:
  592. deleted.append(abs)
  593. # for finding renames
  594. elif dstate == 'r':
  595. removed.append(abs)
  596. elif dstate == 'a':
  597. added.append(abs)
  598. return added, unknown, deleted, removed
  599. def _findrenames(repo, matcher, added, removed, similarity):
  600. '''Find renames from removed files to added ones.'''
  601. renames = {}
  602. if similarity > 0:
  603. for old, new, score in similar.findrenames(repo, added, removed,
  604. similarity):
  605. if (repo.ui.verbose or not matcher.exact(old)
  606. or not matcher.exact(new)):
  607. repo.ui.status(_('recording removal of %s as rename to %s '
  608. '(%d%% similar)\n') %
  609. (matcher.rel(old), matcher.rel(new),
  610. score * 100))
  611. renames[new] = old
  612. return renames
  613. def _markchanges(repo, unknown, deleted, renames):
  614. '''Marks the files in unknown as added, the files in deleted as removed,
  615. and the files in renames as copied.'''
  616. wctx = repo[None]
  617. wlock = repo.wlock()
  618. try:
  619. wctx.forget(deleted)
  620. wctx.add(unknown)
  621. for new, old in renames.iteritems():
  622. wctx.copy(old, new)
  623. finally:
  624. wlock.release()
  625. def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
  626. """Update the dirstate to reflect the intent of copying src to dst. For
  627. different reasons it might not end with dst being marked as copied from src.
  628. """
  629. origsrc = repo.dirstate.copied(src) or src
  630. if dst == origsrc: # copying back a copy?
  631. if repo.dirstate[dst] not in 'mn' and not dryrun:
  632. repo.dirstate.normallookup(dst)
  633. else:
  634. if repo.dirstate[origsrc] == 'a' and origsrc == src:
  635. if not ui.quiet:
  636. ui.warn(_("%s has not been committed yet, so no copy "
  637. "data will be stored for %s.\n")
  638. % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
  639. if repo.dirstate[dst] in '?r' and not dryrun:
  640. wctx.add([dst])
  641. elif not dryrun:
  642. wctx.copy(origsrc, dst)
  643. def readrequires(opener, supported):
  644. '''Reads and parses .hg/requires and checks if all entries found
  645. are in the list of supported features.'''
  646. requirements = set(opener.read("requires").splitlines())
  647. missings = []
  648. for r in requirements:
  649. if r not in supported:
  650. if not r or not r[0].isalnum():
  651. raise error.RequirementError(_(".hg/requires file is corrupt"))
  652. missings.append(r)
  653. missings.sort()
  654. if missings:
  655. raise error.RequirementError(
  656. _("repository requires features unknown to this Mercurial: %s")
  657. % " ".join(missings),
  658. hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
  659. " for more information"))
  660. return requirements
  661. class filecachesubentry(object):
  662. def __init__(self, path, stat):
  663. self.path = path
  664. self.cachestat = None
  665. self._cacheable = None
  666. if stat:
  667. self.cachestat = filecachesubentry.stat(self.path)
  668. if self.cachestat:
  669. self._cacheable = self.cachestat.cacheable()
  670. else:
  671. # None means we don't know yet
  672. self._cacheable = None
  673. def refresh(self):
  674. if self.cacheable():
  675. self.cachestat = filecachesubentry.stat(self.path)
  676. def cacheable(self):
  677. if self._cacheable is not None:
  678. return self._cacheable
  679. # we don't know yet, assume it is for now
  680. return True
  681. def changed(self):
  682. # no point in going further if we can't cache it
  683. if not self.cacheable():
  684. return True
  685. newstat = filecachesubentry.stat(self.path)
  686. # we may not know if it's cacheable yet, check again now
  687. if newstat and self._cacheable is None:
  688. self._cacheable = newstat.cacheable()
  689. # check again
  690. if not self._cacheable:
  691. return True
  692. if self.cachestat != newstat:
  693. self.cachestat = newstat
  694. return True
  695. else:
  696. return False
  697. @staticmethod
  698. def stat(path):
  699. try:
  700. return util.cachestat(path)
  701. except OSError, e:
  702. if e.errno != errno.ENOENT:
  703. raise
  704. class filecacheentry(object):
  705. def __init__(self, paths, stat=True):
  706. self._entries = []
  707. for path in paths:
  708. self._entries.append(filecachesubentry(path, stat))
  709. def changed(self):
  710. '''true if any entry has changed'''
  711. for entry in self._entries:
  712. if entry.changed():
  713. return True
  714. return False
  715. def refresh(self):
  716. for entry in self._entries:
  717. entry.refresh()
  718. class filecache(object):
  719. '''A property like decorator that tracks files under .hg/ for updates.
  720. Records stat info when called in _filecache.
  721. On subsequent calls, compares old stat info with new info, and recreates the
  722. object when any of the files changes, updating the new stat info in
  723. _filecache.
  724. Mercurial either atomic renames or appends for files under .hg,
  725. so to ensure the cache is reliable we need the filesystem to be able
  726. to tell us if a file has been replaced. If it can't, we fallback to
  727. recreating the object on every call (essentially the same behaviour as
  728. propertycache).
  729. '''
  730. def __init__(self, *paths):
  731. self.paths = paths
  732. def join(self, obj, fname):
  733. """Used to compute the runtime path of a cached file.
  734. Users should subclass filecache and provide their own version of this
  735. function to call the appropriate join function on 'obj' (an instance
  736. of the class that its member function was decorated).
  737. """
  738. return obj.join(fname)
  739. def __call__(self, func):
  740. self.func = func
  741. self.name = func.__name__
  742. return self
  743. def __get__(self, obj, type=None):
  744. # do we need to check if the file changed?
  745. if self.name in obj.__dict__:
  746. assert self.name in obj._filecache, self.name
  747. return obj.__dict__[self.name]
  748. entry = obj._filecache.get(self.name)
  749. if entry:
  750. if entry.changed():
  751. entry.obj = self.func(obj)
  752. else:
  753. paths = [self.join(obj, path) for path in self.paths]
  754. # We stat -before- creating the object so our cache doesn't lie if
  755. # a writer modified between the time we read and stat
  756. entry = filecacheentry(paths, True)
  757. entry.obj = self.func(obj)
  758. obj._filecache[self.name] = entry
  759. obj.__dict__[self.name] = entry.obj
  760. return entry.obj
  761. def __set__(self, obj, value):
  762. if self.name not in obj._filecache:
  763. # we add an entry for the missing value because X in __dict__
  764. # implies X in _filecache
  765. paths = [self.join(obj, path) for path in self.paths]
  766. ce = filecacheentry(paths, False)
  767. obj._filecache[self.name] = ce
  768. else:
  769. ce = obj._filecache[self.name]
  770. ce.obj = value # update cached copy
  771. obj.__dict__[self.name] = value # update copy returned by obj.x
  772. def __delete__(self, obj):
  773. try:
  774. del obj.__dict__[self.name]
  775. except KeyError:
  776. raise AttributeError(self.name)
  777. class dirs(object):
  778. '''a multiset of directory names from a dirstate or manifest'''
  779. def __init__(self, map, skip=None):
  780. self._dirs = {}
  781. addpath = self.addpath
  782. if util.safehasattr(map, 'iteritems') and skip is not None:
  783. for f, s in map.iteritems():
  784. if s[0] != skip:
  785. addpath(f)
  786. else:
  787. for f in map:
  788. addpath(f)
  789. def addpath(self, path):
  790. dirs = self._dirs
  791. for base in finddirs(path):
  792. if base in dirs:
  793. dirs[base] += 1
  794. return
  795. dirs[base] = 1
  796. def delpath(self, path):
  797. dirs = self._dirs
  798. for base in finddirs(path):
  799. if dirs[base] > 1:
  800. dirs[base] -= 1
  801. return
  802. del dirs[base]
  803. def __iter__(self):
  804. return self._dirs.iterkeys()
  805. def __contains__(self, d):
  806. return d in self._dirs
  807. if util.safehasattr(parsers, 'dirs'):
  808. dirs = parsers.dirs
  809. def finddirs(path):
  810. pos = path.rfind('/')
  811. while pos != -1:
  812. yield path[:pos]
  813. pos = path.rfind('/', 0, pos)