PageRenderTime 78ms CodeModel.GetById 36ms RepoModel.GetById 1ms app.codeStats 0ms

/mercurial/context.py

https://bitbucket.org/mirror/mercurial/
Python | 1645 lines | 1606 code | 11 blank | 28 comment | 11 complexity | 6ed0bdc7a9883ed47a743b413b50b394 MD5 | raw file
Possible License(s): GPL-2.0

Large files files are truncated, but you can click here to view the full file

  1. # context.py - changeset and file context objects for mercurial
  2. #
  3. # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
  4. #
  5. # This software may be used and distributed according to the terms of the
  6. # GNU General Public License version 2 or any later version.
  7. from node import nullid, nullrev, short, hex, bin
  8. from i18n import _
  9. import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
  10. import match as matchmod
  11. import os, errno, stat
  12. import obsolete as obsmod
  13. import repoview
  14. import fileset
  15. import revlog
  16. propertycache = util.propertycache
  17. class basectx(object):
  18. """A basectx object represents the common logic for its children:
  19. changectx: read-only context that is already present in the repo,
  20. workingctx: a context that represents the working directory and can
  21. be committed,
  22. memctx: a context that represents changes in-memory and can also
  23. be committed."""
  24. def __new__(cls, repo, changeid='', *args, **kwargs):
  25. if isinstance(changeid, basectx):
  26. return changeid
  27. o = super(basectx, cls).__new__(cls)
  28. o._repo = repo
  29. o._rev = nullrev
  30. o._node = nullid
  31. return o
  32. def __str__(self):
  33. return short(self.node())
  34. def __int__(self):
  35. return self.rev()
  36. def __repr__(self):
  37. return "<%s %s>" % (type(self).__name__, str(self))
  38. def __eq__(self, other):
  39. try:
  40. return type(self) == type(other) and self._rev == other._rev
  41. except AttributeError:
  42. return False
  43. def __ne__(self, other):
  44. return not (self == other)
  45. def __contains__(self, key):
  46. return key in self._manifest
  47. def __getitem__(self, key):
  48. return self.filectx(key)
  49. def __iter__(self):
  50. for f in sorted(self._manifest):
  51. yield f
  52. def _manifestmatches(self, match, s):
  53. """generate a new manifest filtered by the match argument
  54. This method is for internal use only and mainly exists to provide an
  55. object oriented way for other contexts to customize the manifest
  56. generation.
  57. """
  58. mf = self.manifest().copy()
  59. if match.always():
  60. return mf
  61. for fn in mf.keys():
  62. if not match(fn):
  63. del mf[fn]
  64. return mf
  65. def _matchstatus(self, other, s, match, listignored, listclean,
  66. listunknown):
  67. """return match.always if match is none
  68. This internal method provides a way for child objects to override the
  69. match operator.
  70. """
  71. return match or matchmod.always(self._repo.root, self._repo.getcwd())
  72. def _prestatus(self, other, s, match, listignored, listclean, listunknown):
  73. """provide a hook to allow child objects to preprocess status results
  74. For example, this allows other contexts, such as workingctx, to query
  75. the dirstate before comparing the manifests.
  76. """
  77. # load earliest manifest first for caching reasons
  78. if self.rev() < other.rev():
  79. self.manifest()
  80. return s
  81. def _poststatus(self, other, s, match, listignored, listclean, listunknown):
  82. """provide a hook to allow child objects to postprocess status results
  83. For example, this allows other contexts, such as workingctx, to filter
  84. suspect symlinks in the case of FAT32 and NTFS filesytems.
  85. """
  86. return s
  87. def _buildstatus(self, other, s, match, listignored, listclean,
  88. listunknown):
  89. """build a status with respect to another context"""
  90. mf1 = other._manifestmatches(match, s)
  91. mf2 = self._manifestmatches(match, s)
  92. modified, added, clean = [], [], []
  93. deleted, unknown, ignored = s[3], [], []
  94. withflags = mf1.withflags() | mf2.withflags()
  95. for fn, mf2node in mf2.iteritems():
  96. if fn in mf1:
  97. if (fn not in deleted and
  98. ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
  99. (mf1[fn] != mf2node and
  100. (mf2node or self[fn].cmp(other[fn]))))):
  101. modified.append(fn)
  102. elif listclean:
  103. clean.append(fn)
  104. del mf1[fn]
  105. elif fn not in deleted:
  106. added.append(fn)
  107. removed = mf1.keys()
  108. return [modified, added, removed, deleted, unknown, ignored, clean]
  109. @propertycache
  110. def substate(self):
  111. return subrepo.state(self, self._repo.ui)
  112. def subrev(self, subpath):
  113. return self.substate[subpath][1]
  114. def rev(self):
  115. return self._rev
  116. def node(self):
  117. return self._node
  118. def hex(self):
  119. return hex(self.node())
  120. def manifest(self):
  121. return self._manifest
  122. def phasestr(self):
  123. return phases.phasenames[self.phase()]
  124. def mutable(self):
  125. return self.phase() > phases.public
  126. def getfileset(self, expr):
  127. return fileset.getfileset(self, expr)
  128. def obsolete(self):
  129. """True if the changeset is obsolete"""
  130. return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
  131. def extinct(self):
  132. """True if the changeset is extinct"""
  133. return self.rev() in obsmod.getrevs(self._repo, 'extinct')
  134. def unstable(self):
  135. """True if the changeset is not obsolete but it's ancestor are"""
  136. return self.rev() in obsmod.getrevs(self._repo, 'unstable')
  137. def bumped(self):
  138. """True if the changeset try to be a successor of a public changeset
  139. Only non-public and non-obsolete changesets may be bumped.
  140. """
  141. return self.rev() in obsmod.getrevs(self._repo, 'bumped')
  142. def divergent(self):
  143. """Is a successors of a changeset with multiple possible successors set
  144. Only non-public and non-obsolete changesets may be divergent.
  145. """
  146. return self.rev() in obsmod.getrevs(self._repo, 'divergent')
  147. def troubled(self):
  148. """True if the changeset is either unstable, bumped or divergent"""
  149. return self.unstable() or self.bumped() or self.divergent()
  150. def troubles(self):
  151. """return the list of troubles affecting this changesets.
  152. Troubles are returned as strings. possible values are:
  153. - unstable,
  154. - bumped,
  155. - divergent.
  156. """
  157. troubles = []
  158. if self.unstable():
  159. troubles.append('unstable')
  160. if self.bumped():
  161. troubles.append('bumped')
  162. if self.divergent():
  163. troubles.append('divergent')
  164. return troubles
  165. def parents(self):
  166. """return contexts for each parent changeset"""
  167. return self._parents
  168. def p1(self):
  169. return self._parents[0]
  170. def p2(self):
  171. if len(self._parents) == 2:
  172. return self._parents[1]
  173. return changectx(self._repo, -1)
  174. def _fileinfo(self, path):
  175. if '_manifest' in self.__dict__:
  176. try:
  177. return self._manifest[path], self._manifest.flags(path)
  178. except KeyError:
  179. raise error.ManifestLookupError(self._node, path,
  180. _('not found in manifest'))
  181. if '_manifestdelta' in self.__dict__ or path in self.files():
  182. if path in self._manifestdelta:
  183. return (self._manifestdelta[path],
  184. self._manifestdelta.flags(path))
  185. node, flag = self._repo.manifest.find(self._changeset[0], path)
  186. if not node:
  187. raise error.ManifestLookupError(self._node, path,
  188. _('not found in manifest'))
  189. return node, flag
  190. def filenode(self, path):
  191. return self._fileinfo(path)[0]
  192. def flags(self, path):
  193. try:
  194. return self._fileinfo(path)[1]
  195. except error.LookupError:
  196. return ''
  197. def sub(self, path):
  198. return subrepo.subrepo(self, path)
  199. def match(self, pats=[], include=None, exclude=None, default='glob'):
  200. r = self._repo
  201. return matchmod.match(r.root, r.getcwd(), pats,
  202. include, exclude, default,
  203. auditor=r.auditor, ctx=self)
  204. def diff(self, ctx2=None, match=None, **opts):
  205. """Returns a diff generator for the given contexts and matcher"""
  206. if ctx2 is None:
  207. ctx2 = self.p1()
  208. if ctx2 is not None:
  209. ctx2 = self._repo[ctx2]
  210. diffopts = patch.diffopts(self._repo.ui, opts)
  211. return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
  212. @propertycache
  213. def _dirs(self):
  214. return scmutil.dirs(self._manifest)
  215. def dirs(self):
  216. return self._dirs
  217. def dirty(self):
  218. return False
  219. def status(self, other=None, match=None, listignored=False,
  220. listclean=False, listunknown=False, listsubrepos=False):
  221. """return status of files between two nodes or node and working
  222. directory.
  223. If other is None, compare this node with working directory.
  224. returns (modified, added, removed, deleted, unknown, ignored, clean)
  225. """
  226. ctx1 = self
  227. ctx2 = self._repo[other]
  228. # This next code block is, admittedly, fragile logic that tests for
  229. # reversing the contexts and wouldn't need to exist if it weren't for
  230. # the fast (and common) code path of comparing the working directory
  231. # with its first parent.
  232. #
  233. # What we're aiming for here is the ability to call:
  234. #
  235. # workingctx.status(parentctx)
  236. #
  237. # If we always built the manifest for each context and compared those,
  238. # then we'd be done. But the special case of the above call means we
  239. # just copy the manifest of the parent.
  240. reversed = False
  241. if (not isinstance(ctx1, changectx)
  242. and isinstance(ctx2, changectx)):
  243. reversed = True
  244. ctx1, ctx2 = ctx2, ctx1
  245. r = [[], [], [], [], [], [], []]
  246. match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
  247. listunknown)
  248. r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
  249. r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
  250. listunknown)
  251. r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
  252. listunknown)
  253. if reversed:
  254. r[1], r[2], r[3], r[4] = r[2], r[1], r[4], r[3]
  255. if listsubrepos:
  256. for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
  257. rev2 = ctx2.subrev(subpath)
  258. try:
  259. submatch = matchmod.narrowmatcher(subpath, match)
  260. s = sub.status(rev2, match=submatch, ignored=listignored,
  261. clean=listclean, unknown=listunknown,
  262. listsubrepos=True)
  263. for rfiles, sfiles in zip(r, s):
  264. rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
  265. except error.LookupError:
  266. self._repo.ui.status(_("skipping missing "
  267. "subrepository: %s\n") % subpath)
  268. for l in r:
  269. l.sort()
  270. # we return a tuple to signify that this list isn't changing
  271. return tuple(r)
  272. def makememctx(repo, parents, text, user, date, branch, files, store,
  273. editor=None):
  274. def getfilectx(repo, memctx, path):
  275. data, (islink, isexec), copied = store.getfile(path)
  276. return memfilectx(repo, path, data, islink=islink, isexec=isexec,
  277. copied=copied, memctx=memctx)
  278. extra = {}
  279. if branch:
  280. extra['branch'] = encoding.fromlocal(branch)
  281. ctx = memctx(repo, parents, text, files, getfilectx, user,
  282. date, extra, editor)
  283. return ctx
  284. class changectx(basectx):
  285. """A changecontext object makes access to data related to a particular
  286. changeset convenient. It represents a read-only context already present in
  287. the repo."""
  288. def __init__(self, repo, changeid=''):
  289. """changeid is a revision number, node, or tag"""
  290. # since basectx.__new__ already took care of copying the object, we
  291. # don't need to do anything in __init__, so we just exit here
  292. if isinstance(changeid, basectx):
  293. return
  294. if changeid == '':
  295. changeid = '.'
  296. self._repo = repo
  297. if isinstance(changeid, int):
  298. try:
  299. self._node = repo.changelog.node(changeid)
  300. except IndexError:
  301. raise error.RepoLookupError(
  302. _("unknown revision '%s'") % changeid)
  303. self._rev = changeid
  304. return
  305. if isinstance(changeid, long):
  306. changeid = str(changeid)
  307. if changeid == '.':
  308. self._node = repo.dirstate.p1()
  309. self._rev = repo.changelog.rev(self._node)
  310. return
  311. if changeid == 'null':
  312. self._node = nullid
  313. self._rev = nullrev
  314. return
  315. if changeid == 'tip':
  316. self._node = repo.changelog.tip()
  317. self._rev = repo.changelog.rev(self._node)
  318. return
  319. if len(changeid) == 20:
  320. try:
  321. self._node = changeid
  322. self._rev = repo.changelog.rev(changeid)
  323. return
  324. except LookupError:
  325. pass
  326. try:
  327. r = int(changeid)
  328. if str(r) != changeid:
  329. raise ValueError
  330. l = len(repo.changelog)
  331. if r < 0:
  332. r += l
  333. if r < 0 or r >= l:
  334. raise ValueError
  335. self._rev = r
  336. self._node = repo.changelog.node(r)
  337. return
  338. except (ValueError, OverflowError, IndexError):
  339. pass
  340. if len(changeid) == 40:
  341. try:
  342. self._node = bin(changeid)
  343. self._rev = repo.changelog.rev(self._node)
  344. return
  345. except (TypeError, LookupError):
  346. pass
  347. if changeid in repo._bookmarks:
  348. self._node = repo._bookmarks[changeid]
  349. self._rev = repo.changelog.rev(self._node)
  350. return
  351. if changeid in repo._tagscache.tags:
  352. self._node = repo._tagscache.tags[changeid]
  353. self._rev = repo.changelog.rev(self._node)
  354. return
  355. try:
  356. self._node = repo.branchtip(changeid)
  357. self._rev = repo.changelog.rev(self._node)
  358. return
  359. except error.RepoLookupError:
  360. pass
  361. self._node = repo.changelog._partialmatch(changeid)
  362. if self._node is not None:
  363. self._rev = repo.changelog.rev(self._node)
  364. return
  365. # lookup failed
  366. # check if it might have come from damaged dirstate
  367. #
  368. # XXX we could avoid the unfiltered if we had a recognizable exception
  369. # for filtered changeset access
  370. if changeid in repo.unfiltered().dirstate.parents():
  371. raise error.Abort(_("working directory has unknown parent '%s'!")
  372. % short(changeid))
  373. try:
  374. if len(changeid) == 20:
  375. changeid = hex(changeid)
  376. except TypeError:
  377. pass
  378. raise error.RepoLookupError(
  379. _("unknown revision '%s'") % changeid)
  380. def __hash__(self):
  381. try:
  382. return hash(self._rev)
  383. except AttributeError:
  384. return id(self)
  385. def __nonzero__(self):
  386. return self._rev != nullrev
  387. @propertycache
  388. def _changeset(self):
  389. return self._repo.changelog.read(self.rev())
  390. @propertycache
  391. def _manifest(self):
  392. return self._repo.manifest.read(self._changeset[0])
  393. @propertycache
  394. def _manifestdelta(self):
  395. return self._repo.manifest.readdelta(self._changeset[0])
  396. @propertycache
  397. def _parents(self):
  398. p = self._repo.changelog.parentrevs(self._rev)
  399. if p[1] == nullrev:
  400. p = p[:-1]
  401. return [changectx(self._repo, x) for x in p]
  402. def changeset(self):
  403. return self._changeset
  404. def manifestnode(self):
  405. return self._changeset[0]
  406. def user(self):
  407. return self._changeset[1]
  408. def date(self):
  409. return self._changeset[2]
  410. def files(self):
  411. return self._changeset[3]
  412. def description(self):
  413. return self._changeset[4]
  414. def branch(self):
  415. return encoding.tolocal(self._changeset[5].get("branch"))
  416. def closesbranch(self):
  417. return 'close' in self._changeset[5]
  418. def extra(self):
  419. return self._changeset[5]
  420. def tags(self):
  421. return self._repo.nodetags(self._node)
  422. def bookmarks(self):
  423. return self._repo.nodebookmarks(self._node)
  424. def phase(self):
  425. return self._repo._phasecache.phase(self._repo, self._rev)
  426. def hidden(self):
  427. return self._rev in repoview.filterrevs(self._repo, 'visible')
  428. def children(self):
  429. """return contexts for each child changeset"""
  430. c = self._repo.changelog.children(self._node)
  431. return [changectx(self._repo, x) for x in c]
  432. def ancestors(self):
  433. for a in self._repo.changelog.ancestors([self._rev]):
  434. yield changectx(self._repo, a)
  435. def descendants(self):
  436. for d in self._repo.changelog.descendants([self._rev]):
  437. yield changectx(self._repo, d)
  438. def filectx(self, path, fileid=None, filelog=None):
  439. """get a file context from this changeset"""
  440. if fileid is None:
  441. fileid = self.filenode(path)
  442. return filectx(self._repo, path, fileid=fileid,
  443. changectx=self, filelog=filelog)
  444. def ancestor(self, c2, warn=False):
  445. """
  446. return the "best" ancestor context of self and c2
  447. """
  448. # deal with workingctxs
  449. n2 = c2._node
  450. if n2 is None:
  451. n2 = c2._parents[0]._node
  452. cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
  453. if not cahs:
  454. anc = nullid
  455. elif len(cahs) == 1:
  456. anc = cahs[0]
  457. else:
  458. for r in self._repo.ui.configlist('merge', 'preferancestor'):
  459. ctx = changectx(self._repo, r)
  460. anc = ctx.node()
  461. if anc in cahs:
  462. break
  463. else:
  464. anc = self._repo.changelog.ancestor(self._node, n2)
  465. if warn:
  466. self._repo.ui.status(
  467. (_("note: using %s as ancestor of %s and %s\n") %
  468. (short(anc), short(self._node), short(n2))) +
  469. ''.join(_(" alternatively, use --config "
  470. "merge.preferancestor=%s\n") %
  471. short(n) for n in sorted(cahs) if n != anc))
  472. return changectx(self._repo, anc)
  473. def descendant(self, other):
  474. """True if other is descendant of this changeset"""
  475. return self._repo.changelog.descendant(self._rev, other._rev)
  476. def walk(self, match):
  477. fset = set(match.files())
  478. # for dirstate.walk, files=['.'] means "walk the whole tree".
  479. # follow that here, too
  480. fset.discard('.')
  481. # avoid the entire walk if we're only looking for specific files
  482. if fset and not match.anypats():
  483. if util.all([fn in self for fn in fset]):
  484. for fn in sorted(fset):
  485. if match(fn):
  486. yield fn
  487. raise StopIteration
  488. for fn in self:
  489. if fn in fset:
  490. # specified pattern is the exact name
  491. fset.remove(fn)
  492. if match(fn):
  493. yield fn
  494. for fn in sorted(fset):
  495. if fn in self._dirs:
  496. # specified pattern is a directory
  497. continue
  498. match.bad(fn, _('no such file in rev %s') % self)
  499. class basefilectx(object):
  500. """A filecontext object represents the common logic for its children:
  501. filectx: read-only access to a filerevision that is already present
  502. in the repo,
  503. workingfilectx: a filecontext that represents files from the working
  504. directory,
  505. memfilectx: a filecontext that represents files in-memory."""
  506. def __new__(cls, repo, path, *args, **kwargs):
  507. return super(basefilectx, cls).__new__(cls)
  508. @propertycache
  509. def _filelog(self):
  510. return self._repo.file(self._path)
  511. @propertycache
  512. def _changeid(self):
  513. if '_changeid' in self.__dict__:
  514. return self._changeid
  515. elif '_changectx' in self.__dict__:
  516. return self._changectx.rev()
  517. else:
  518. return self._filelog.linkrev(self._filerev)
  519. @propertycache
  520. def _filenode(self):
  521. if '_fileid' in self.__dict__:
  522. return self._filelog.lookup(self._fileid)
  523. else:
  524. return self._changectx.filenode(self._path)
  525. @propertycache
  526. def _filerev(self):
  527. return self._filelog.rev(self._filenode)
  528. @propertycache
  529. def _repopath(self):
  530. return self._path
  531. def __nonzero__(self):
  532. try:
  533. self._filenode
  534. return True
  535. except error.LookupError:
  536. # file is missing
  537. return False
  538. def __str__(self):
  539. return "%s@%s" % (self.path(), self._changectx)
  540. def __repr__(self):
  541. return "<%s %s>" % (type(self).__name__, str(self))
  542. def __hash__(self):
  543. try:
  544. return hash((self._path, self._filenode))
  545. except AttributeError:
  546. return id(self)
  547. def __eq__(self, other):
  548. try:
  549. return (type(self) == type(other) and self._path == other._path
  550. and self._filenode == other._filenode)
  551. except AttributeError:
  552. return False
  553. def __ne__(self, other):
  554. return not (self == other)
  555. def filerev(self):
  556. return self._filerev
  557. def filenode(self):
  558. return self._filenode
  559. def flags(self):
  560. return self._changectx.flags(self._path)
  561. def filelog(self):
  562. return self._filelog
  563. def rev(self):
  564. return self._changeid
  565. def linkrev(self):
  566. return self._filelog.linkrev(self._filerev)
  567. def node(self):
  568. return self._changectx.node()
  569. def hex(self):
  570. return self._changectx.hex()
  571. def user(self):
  572. return self._changectx.user()
  573. def date(self):
  574. return self._changectx.date()
  575. def files(self):
  576. return self._changectx.files()
  577. def description(self):
  578. return self._changectx.description()
  579. def branch(self):
  580. return self._changectx.branch()
  581. def extra(self):
  582. return self._changectx.extra()
  583. def phase(self):
  584. return self._changectx.phase()
  585. def phasestr(self):
  586. return self._changectx.phasestr()
  587. def manifest(self):
  588. return self._changectx.manifest()
  589. def changectx(self):
  590. return self._changectx
  591. def path(self):
  592. return self._path
  593. def isbinary(self):
  594. try:
  595. return util.binary(self.data())
  596. except IOError:
  597. return False
  598. def cmp(self, fctx):
  599. """compare with other file context
  600. returns True if different than fctx.
  601. """
  602. if (fctx._filerev is None
  603. and (self._repo._encodefilterpats
  604. # if file data starts with '\1\n', empty metadata block is
  605. # prepended, which adds 4 bytes to filelog.size().
  606. or self.size() - 4 == fctx.size())
  607. or self.size() == fctx.size()):
  608. return self._filelog.cmp(self._filenode, fctx.data())
  609. return True
  610. def parents(self):
  611. p = self._path
  612. fl = self._filelog
  613. pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
  614. r = self._filelog.renamed(self._filenode)
  615. if r:
  616. pl[0] = (r[0], r[1], None)
  617. return [filectx(self._repo, p, fileid=n, filelog=l)
  618. for p, n, l in pl if n != nullid]
  619. def p1(self):
  620. return self.parents()[0]
  621. def p2(self):
  622. p = self.parents()
  623. if len(p) == 2:
  624. return p[1]
  625. return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
  626. def annotate(self, follow=False, linenumber=None, diffopts=None):
  627. '''returns a list of tuples of (ctx, line) for each line
  628. in the file, where ctx is the filectx of the node where
  629. that line was last changed.
  630. This returns tuples of ((ctx, linenumber), line) for each line,
  631. if "linenumber" parameter is NOT "None".
  632. In such tuples, linenumber means one at the first appearance
  633. in the managed file.
  634. To reduce annotation cost,
  635. this returns fixed value(False is used) as linenumber,
  636. if "linenumber" parameter is "False".'''
  637. def decorate_compat(text, rev):
  638. return ([rev] * len(text.splitlines()), text)
  639. def without_linenumber(text, rev):
  640. return ([(rev, False)] * len(text.splitlines()), text)
  641. def with_linenumber(text, rev):
  642. size = len(text.splitlines())
  643. return ([(rev, i) for i in xrange(1, size + 1)], text)
  644. decorate = (((linenumber is None) and decorate_compat) or
  645. (linenumber and with_linenumber) or
  646. without_linenumber)
  647. def pair(parent, child):
  648. blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
  649. refine=True)
  650. for (a1, a2, b1, b2), t in blocks:
  651. # Changed blocks ('!') or blocks made only of blank lines ('~')
  652. # belong to the child.
  653. if t == '=':
  654. child[0][b1:b2] = parent[0][a1:a2]
  655. return child
  656. getlog = util.lrucachefunc(lambda x: self._repo.file(x))
  657. def parents(f):
  658. pl = f.parents()
  659. # Don't return renamed parents if we aren't following.
  660. if not follow:
  661. pl = [p for p in pl if p.path() == f.path()]
  662. # renamed filectx won't have a filelog yet, so set it
  663. # from the cache to save time
  664. for p in pl:
  665. if not '_filelog' in p.__dict__:
  666. p._filelog = getlog(p.path())
  667. return pl
  668. # use linkrev to find the first changeset where self appeared
  669. if self.rev() != self.linkrev():
  670. base = self.filectx(self.filenode())
  671. else:
  672. base = self
  673. # This algorithm would prefer to be recursive, but Python is a
  674. # bit recursion-hostile. Instead we do an iterative
  675. # depth-first search.
  676. visit = [base]
  677. hist = {}
  678. pcache = {}
  679. needed = {base: 1}
  680. while visit:
  681. f = visit[-1]
  682. pcached = f in pcache
  683. if not pcached:
  684. pcache[f] = parents(f)
  685. ready = True
  686. pl = pcache[f]
  687. for p in pl:
  688. if p not in hist:
  689. ready = False
  690. visit.append(p)
  691. if not pcached:
  692. needed[p] = needed.get(p, 0) + 1
  693. if ready:
  694. visit.pop()
  695. reusable = f in hist
  696. if reusable:
  697. curr = hist[f]
  698. else:
  699. curr = decorate(f.data(), f)
  700. for p in pl:
  701. if not reusable:
  702. curr = pair(hist[p], curr)
  703. if needed[p] == 1:
  704. del hist[p]
  705. del needed[p]
  706. else:
  707. needed[p] -= 1
  708. hist[f] = curr
  709. pcache[f] = []
  710. return zip(hist[base][0], hist[base][1].splitlines(True))
  711. def ancestors(self, followfirst=False):
  712. visit = {}
  713. c = self
  714. cut = followfirst and 1 or None
  715. while True:
  716. for parent in c.parents()[:cut]:
  717. visit[(parent.rev(), parent.node())] = parent
  718. if not visit:
  719. break
  720. c = visit.pop(max(visit))
  721. yield c
  722. class filectx(basefilectx):
  723. """A filecontext object makes access to data related to a particular
  724. filerevision convenient."""
  725. def __init__(self, repo, path, changeid=None, fileid=None,
  726. filelog=None, changectx=None):
  727. """changeid can be a changeset revision, node, or tag.
  728. fileid can be a file revision or node."""
  729. self._repo = repo
  730. self._path = path
  731. assert (changeid is not None
  732. or fileid is not None
  733. or changectx is not None), \
  734. ("bad args: changeid=%r, fileid=%r, changectx=%r"
  735. % (changeid, fileid, changectx))
  736. if filelog is not None:
  737. self._filelog = filelog
  738. if changeid is not None:
  739. self._changeid = changeid
  740. if changectx is not None:
  741. self._changectx = changectx
  742. if fileid is not None:
  743. self._fileid = fileid
  744. @propertycache
  745. def _changectx(self):
  746. try:
  747. return changectx(self._repo, self._changeid)
  748. except error.RepoLookupError:
  749. # Linkrev may point to any revision in the repository. When the
  750. # repository is filtered this may lead to `filectx` trying to build
  751. # `changectx` for filtered revision. In such case we fallback to
  752. # creating `changectx` on the unfiltered version of the reposition.
  753. # This fallback should not be an issue because `changectx` from
  754. # `filectx` are not used in complex operations that care about
  755. # filtering.
  756. #
  757. # This fallback is a cheap and dirty fix that prevent several
  758. # crashes. It does not ensure the behavior is correct. However the
  759. # behavior was not correct before filtering either and "incorrect
  760. # behavior" is seen as better as "crash"
  761. #
  762. # Linkrevs have several serious troubles with filtering that are
  763. # complicated to solve. Proper handling of the issue here should be
  764. # considered when solving linkrev issue are on the table.
  765. return changectx(self._repo.unfiltered(), self._changeid)
  766. def filectx(self, fileid):
  767. '''opens an arbitrary revision of the file without
  768. opening a new filelog'''
  769. return filectx(self._repo, self._path, fileid=fileid,
  770. filelog=self._filelog)
  771. def data(self):
  772. return self._filelog.read(self._filenode)
  773. def size(self):
  774. return self._filelog.size(self._filerev)
  775. def renamed(self):
  776. """check if file was actually renamed in this changeset revision
  777. If rename logged in file revision, we report copy for changeset only
  778. if file revisions linkrev points back to the changeset in question
  779. or both changeset parents contain different file revisions.
  780. """
  781. renamed = self._filelog.renamed(self._filenode)
  782. if not renamed:
  783. return renamed
  784. if self.rev() == self.linkrev():
  785. return renamed
  786. name = self.path()
  787. fnode = self._filenode
  788. for p in self._changectx.parents():
  789. try:
  790. if fnode == p.filenode(name):
  791. return None
  792. except error.LookupError:
  793. pass
  794. return renamed
  795. def children(self):
  796. # hard for renames
  797. c = self._filelog.children(self._filenode)
  798. return [filectx(self._repo, self._path, fileid=x,
  799. filelog=self._filelog) for x in c]
  800. class committablectx(basectx):
  801. """A committablectx object provides common functionality for a context that
  802. wants the ability to commit, e.g. workingctx or memctx."""
  803. def __init__(self, repo, text="", user=None, date=None, extra=None,
  804. changes=None):
  805. self._repo = repo
  806. self._rev = None
  807. self._node = None
  808. self._text = text
  809. if date:
  810. self._date = util.parsedate(date)
  811. if user:
  812. self._user = user
  813. if changes:
  814. self._status = changes
  815. self._extra = {}
  816. if extra:
  817. self._extra = extra.copy()
  818. if 'branch' not in self._extra:
  819. try:
  820. branch = encoding.fromlocal(self._repo.dirstate.branch())
  821. except UnicodeDecodeError:
  822. raise util.Abort(_('branch name not in UTF-8!'))
  823. self._extra['branch'] = branch
  824. if self._extra['branch'] == '':
  825. self._extra['branch'] = 'default'
  826. def __str__(self):
  827. return str(self._parents[0]) + "+"
  828. def __nonzero__(self):
  829. return True
  830. def _buildflagfunc(self):
  831. # Create a fallback function for getting file flags when the
  832. # filesystem doesn't support them
  833. copiesget = self._repo.dirstate.copies().get
  834. if len(self._parents) < 2:
  835. # when we have one parent, it's easy: copy from parent
  836. man = self._parents[0].manifest()
  837. def func(f):
  838. f = copiesget(f, f)
  839. return man.flags(f)
  840. else:
  841. # merges are tricky: we try to reconstruct the unstored
  842. # result from the merge (issue1802)
  843. p1, p2 = self._parents
  844. pa = p1.ancestor(p2)
  845. m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
  846. def func(f):
  847. f = copiesget(f, f) # may be wrong for merges with copies
  848. fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
  849. if fl1 == fl2:
  850. return fl1
  851. if fl1 == fla:
  852. return fl2
  853. if fl2 == fla:
  854. return fl1
  855. return '' # punt for conflicts
  856. return func
  857. @propertycache
  858. def _flagfunc(self):
  859. return self._repo.dirstate.flagfunc(self._buildflagfunc)
  860. @propertycache
  861. def _manifest(self):
  862. """generate a manifest corresponding to the values in self._status"""
  863. man = self._parents[0].manifest().copy()
  864. if len(self._parents) > 1:
  865. man2 = self.p2().manifest()
  866. def getman(f):
  867. if f in man:
  868. return man
  869. return man2
  870. else:
  871. getman = lambda f: man
  872. copied = self._repo.dirstate.copies()
  873. ff = self._flagfunc
  874. modified, added, removed, deleted = self._status[:4]
  875. for i, l in (("a", added), ("m", modified)):
  876. for f in l:
  877. orig = copied.get(f, f)
  878. man[f] = getman(orig).get(orig, nullid) + i
  879. try:
  880. man.set(f, ff(f))
  881. except OSError:
  882. pass
  883. for f in deleted + removed:
  884. if f in man:
  885. del man[f]
  886. return man
  887. @propertycache
  888. def _status(self):
  889. return self._repo.status()
  890. @propertycache
  891. def _user(self):
  892. return self._repo.ui.username()
  893. @propertycache
  894. def _date(self):
  895. return util.makedate()
  896. def subrev(self, subpath):
  897. return None
  898. def user(self):
  899. return self._user or self._repo.ui.username()
  900. def date(self):
  901. return self._date
  902. def description(self):
  903. return self._text
  904. def files(self):
  905. return sorted(self._status[0] + self._status[1] + self._status[2])
  906. def modified(self):
  907. return self._status[0]
  908. def added(self):
  909. return self._status[1]
  910. def removed(self):
  911. return self._status[2]
  912. def deleted(self):
  913. return self._status[3]
  914. def unknown(self):
  915. return self._status[4]
  916. def ignored(self):
  917. return self._status[5]
  918. def clean(self):
  919. return self._status[6]
  920. def branch(self):
  921. return encoding.tolocal(self._extra['branch'])
  922. def closesbranch(self):
  923. return 'close' in self._extra
  924. def extra(self):
  925. return self._extra
  926. def tags(self):
  927. t = []
  928. for p in self.parents():
  929. t.extend(p.tags())
  930. return t
  931. def bookmarks(self):
  932. b = []
  933. for p in self.parents():
  934. b.extend(p.bookmarks())
  935. return b
  936. def phase(self):
  937. phase = phases.draft # default phase to draft
  938. for p in self.parents():
  939. phase = max(phase, p.phase())
  940. return phase
  941. def hidden(self):
  942. return False
  943. def children(self):
  944. return []
  945. def flags(self, path):
  946. if '_manifest' in self.__dict__:
  947. try:
  948. return self._manifest.flags(path)
  949. except KeyError:
  950. return ''
  951. try:
  952. return self._flagfunc(path)
  953. except OSError:
  954. return ''
  955. def ancestor(self, c2):
  956. """return the ancestor context of self and c2"""
  957. return self._parents[0].ancestor(c2) # punt on two parents for now
  958. def walk(self, match):
  959. return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
  960. True, False))
  961. def ancestors(self):
  962. for a in self._repo.changelog.ancestors(
  963. [p.rev() for p in self._parents]):
  964. yield changectx(self._repo, a)
  965. def markcommitted(self, node):
  966. """Perform post-commit cleanup necessary after committing this ctx
  967. Specifically, this updates backing stores this working context
  968. wraps to reflect the fact that the changes reflected by this
  969. workingctx have been committed. For example, it marks
  970. modified and added files as normal in the dirstate.
  971. """
  972. for f in self.modified() + self.added():
  973. self._repo.dirstate.normal(f)
  974. for f in self.removed():
  975. self._repo.dirstate.drop(f)
  976. self._repo.dirstate.setparents(node)
  977. def dirs(self):
  978. return self._repo.dirstate.dirs()
  979. class workingctx(committablectx):
  980. """A workingctx object makes access to data related to
  981. the current working directory convenient.
  982. date - any valid date string or (unixtime, offset), or None.
  983. user - username string, or None.
  984. extra - a dictionary of extra values, or None.
  985. changes - a list of file lists as returned by localrepo.status()
  986. or None to use the repository status.
  987. """
  988. def __init__(self, repo, text="", user=None, date=None, extra=None,
  989. changes=None):
  990. super(workingctx, self).__init__(repo, text, user, date, extra, changes)
  991. def __iter__(self):
  992. d = self._repo.dirstate
  993. for f in d:
  994. if d[f] != 'r':
  995. yield f
  996. def __contains__(self, key):
  997. return self._repo.dirstate[key] not in "?r"
  998. @propertycache
  999. def _parents(self):
  1000. p = self._repo.dirstate.parents()
  1001. if p[1] == nullid:
  1002. p = p[:-1]
  1003. return [changectx(self._repo, x) for x in p]
  1004. def filectx(self, path, filelog=None):
  1005. """get a file context from the working directory"""
  1006. return workingfilectx(self._repo, path, workingctx=self,
  1007. filelog=filelog)
  1008. def dirty(self, missing=False, merge=True, branch=True):
  1009. "check whether a working directory is modified"
  1010. # check subrepos first
  1011. for s in sorted(self.substate):
  1012. if self.sub(s).dirty():
  1013. return True
  1014. # check current working dir
  1015. return ((merge and self.p2()) or
  1016. (branch and self.branch() != self.p1().branch()) or
  1017. self.modified() or self.added() or self.removed() or
  1018. (missing and self.deleted()))
  1019. def add(self, list, prefix=""):
  1020. join = lambda f: os.path.join(prefix, f)
  1021. wlock = self._repo.wlock()
  1022. ui, ds = self._repo.ui, self._repo.dirstate
  1023. try:
  1024. rejected = []
  1025. lstat = self._repo.wvfs.lstat
  1026. for f in list:
  1027. scmutil.checkportable(ui, join(f))
  1028. try:
  1029. st = lstat(f)
  1030. except OSError:
  1031. ui.warn(_("%s does not exist!\n") % join(f))
  1032. rejected.append(f)
  1033. continue
  1034. if st.st_size > 10000000:
  1035. ui.warn(_("%s: up to %d MB of RAM may be required "
  1036. "to manage this file\n"
  1037. "(use 'hg revert %s' to cancel the "
  1038. "pending addition)\n")
  1039. % (f, 3 * st.st_size // 1000000, join(f)))
  1040. if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
  1041. ui.warn(_("%s not added: only files and symlinks "
  1042. "supported currently\n") % join(f))
  1043. rejected.append(f)
  1044. elif ds[f] in 'amn':
  1045. ui.warn(_("%s already tracked!\n") % join(f))
  1046. elif ds[f] == 'r':
  1047. ds.normallookup(f)
  1048. else:
  1049. ds.add(f)
  1050. return rejected
  1051. finally:
  1052. wlock.release()
  1053. def forget(self, files, prefix=""):
  1054. join = lambda f: os.path.join(prefix, f)
  1055. wlock = self._repo.wlock()
  1056. try:
  1057. rejected = []
  1058. for f in files:
  1059. if f not in self._repo.dirstate:
  1060. self._repo.ui.warn(_("%s not tracked!\n") % join(f))
  1061. rejected.append(f)
  1062. elif self._repo.dirstate[f] != 'a':
  1063. self._repo.dirstate.remove(f)
  1064. else:
  1065. self._repo.dirstate.drop(f)
  1066. return rejected
  1067. finally:
  1068. wlock.release()
  1069. def undelete(self, list):
  1070. pctxs = self.parents()
  1071. wlock = self._repo.wlock()
  1072. try:
  1073. for f in list:
  1074. if self._repo.dirstate[f] != 'r':
  1075. self._repo.ui.warn(_("%s not removed!\n") % f)
  1076. else:
  1077. fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
  1078. t = fctx.data()
  1079. self._repo.wwrite(f, t, fctx.flags())
  1080. self._repo.dirstate.normal(f)
  1081. finally:
  1082. wlock.release()
  1083. def copy(self, source, dest):
  1084. try:
  1085. st = self._repo.wvfs.lstat(dest)
  1086. except OSError, err:
  1087. if err.errno != errno.ENOENT:
  1088. raise
  1089. self._repo.ui.warn(_("%s does not exist!\n") % dest)
  1090. return
  1091. if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
  1092. self._repo.ui.warn(_("copy failed: %s is not a file or a "
  1093. "symbolic link\n") % dest)
  1094. else:
  1095. wlock = self._repo.wlock()
  1096. try:
  1097. if self._repo.dirstate[dest] in '?r':
  1098. self._repo.dirstate.add(dest)
  1099. self._repo.dirstate.copy(source, dest)
  1100. finally:
  1101. wlock.release()
  1102. def _filtersuspectsymlink(self, files):
  1103. if not files or self._repo.dirstate._checklink:
  1104. return files
  1105. # Symlink placeholders may get non-symlink-like contents
  1106. # via user error or dereferencing by NFS or Samba servers,
  1107. # so we filter out any placeholders that don't look like a
  1108. # symlink
  1109. sane = []
  1110. for f in files:
  1111. if self.flags(f) == 'l':
  1112. d = self[f].data()
  1113. if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
  1114. self._repo.ui.debug('ignoring suspect symlink placeholder'
  1115. ' "%s"\n' % f)
  1116. continue
  1117. sane.append(f)
  1118. return sane
  1119. def _checklookup(self, files):
  1120. # check for any possibly clean files
  1121. if not files:
  1122. return [], []
  1123. modified = []
  1124. fixup = []
  1125. pctx = self._parents[0]
  1126. # do a full compare of any files that might have changed
  1127. for f in sorted(files):
  1128. if (f not in pctx or self.flags(f) != pctx.flags(f)
  1129. or pctx[f].cmp(self[f])):
  1130. modified.append(f)
  1131. else:
  1132. fixup.append(f)
  1133. # update dirstate for files that are actually clean
  1134. if fixup:
  1135. try:
  1136. # updating the dirstate is optional
  1137. # so we don't wait on the lock
  1138. normal = self._repo.dirstate.normal
  1139. wlock = self._repo.wlock(False)
  1140. try:
  1141. for f in fixup:
  1142. normal(f)
  1143. finally:
  1144. wlock.release()
  1145. except error.LockError:
  1146. pass
  1147. return modified, fixup
  1148. def _manifestmatches(self, match, s):
  1149. """Slow path for workingctx
  1150. The fast path is when we compare the working directory to its parent
  1151. which means this function is comparing with a non-parent; therefore we
  1152. need to build a manifest and return what matches.
  1153. """
  1154. mf = self._repo['.']._manifestmatches(match, s)
  1155. modified, added, removed = s[0:3]
  1156. for f in modified + added:
  1157. mf[f] = None
  1158. mf.set(f, self.flags(f))
  1159. for f in removed:
  1160. if f in mf:
  1161. del mf[f]
  1162. return mf
  1163. def _prestatus(self, other, s, match, listignored, listclean, listunknown):
  1164. """override the parent hook with a dirstate query
  1165. We use this prestatus hook to populate the status with information from
  1166. the dirstate.
  1167. """
  1168. # doesn't need to call super; if that changes, be aware that super
  1169. # calls self.manifest which would slow down the common case of calling
  1170. # status against a workingctx's parent
  1171. return self._dirstatestatus(match, listignored, listclean, listunknown)
  1172. def _poststatus(self, other, s, match, listignored, listclean, listunknown):
  1173. """override the parent hook with a filter for suspect symlinks
  1174. We use this poststatus hook to filter out symlinks that might have
  1175. accidentally ended up with the entire contents of the file they are
  1176. susposed to be linking to.
  1177. """
  1178. s[0] = self._filtersuspectsymlink(s[0])
  1179. self._status = s[:]
  1180. return s
  1181. def _dirstatestatus(self, match=None, ignored=False, clean=False,
  1182. unknown=False):
  1183. '''Gets the status from the dirstate -- internal use only.'''
  1184. listignored, listclean, listunknown = ignored, clean, unknown
  1185. match = match or matchmod.always(self._repo.root, self._repo.getcwd())
  1186. subrepos = []
  1187. if '.hgsub' in self:
  1188. subrepos = sorted(self.substate)
  1189. s = self._repo.dirstate.status(match, subrepos, listignored,
  1190. listclean, listunknown)
  1191. cmp, modified, added, removed, deleted, unknown, ignored, clean = s
  1192. # check for any possibly clean files
  1193. if cmp:
  1194. modified2, fixup = self._checklookup(cmp)
  1195. modified += modified2
  1196. # update dirstate for files that are actually clean
  1197. if fixup and listclean:
  1198. clean += fixup
  1199. return [modified, added, removed, deleted, unknown, ignored, clean]
  1200. def _buildstatus(self, other, s, match, listignored, listclean,
  1201. listunknown):
  1202. """build a status with respect to another context
  1203. This includes logic for maintaining the fast path of status when
  1204. comparing the working directory against its parent, which is to skip
  1205. building a new manifest if self (working directory) is not comparing
  1206. against its parent (repo['.']).
  1207. """
  1208. if other != self._repo['.']:
  1209. s = super(workingctx, self)._buildstatus(other, s, match,
  1210. listignored, listclean,
  1211. listunknown)
  1212. return s
  1213. def _matchstatus(self, other, s, match, listignored, listclean,
  1214. listunknown):
  1215. """override the match method with a filter for directory patterns
  1216. We use inheritance to customize the match.bad method only in cases of
  1217. workingctx since it belongs only to the working directory when
  1218. comparing against the parent changeset.
  1219. If we aren't comparing against the working directory's parent, then we
  1220. just use the default match object sent to us.
  1221. """
  1222. superself = super(workingctx, self)
  1223. match = superself._matchstatus(other, s, match, listignored, listclean,
  1224. listunknown)
  1225. if other != self._repo['.']:
  1226. def bad(f, msg):
  1227. # 'f' may be a directory pattern from 'match.files()',
  1228. # so 'f not in ctx1' is not enough
  1229. if f not in other and f not in other.dirs():
  1230. self._repo.ui.warn('%s: %s\n' %
  1231. (self._repo.dirstate.pathto(f), msg))
  1232. match.bad = bad
  1233. return match
  1234. def status(self, other='.', match=None, listignored=False,
  1235. listclean=False, listunknown=False, listsubrepos=False):
  1236. # yet to be determined: what to do if 'other' is a 'workingctx' or a
  1237. # 'memctx'?
  1238. s = super(workingctx, s

Large files files are truncated, but you can click here to view the full file