Mercurial > evolve
view hgext/obsolete.py @ 307:9ac56d36d6ff
obsolete: add latecomer computation and display
author | Pierre-Yves David <pierre-yves.david@logilab.fr> |
---|---|
date | Tue, 26 Jun 2012 11:33:39 +0200 |
parents | 8cfa3163dfaa |
children | 23ef1c71d164 |
line wrap: on
line source
# obsolete.py - introduce the obsolete concept in mercurial. # # Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org> # Logilab SA <contact@logilab.fr> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Introduce the Obsolete concept to mercurial General concept =============== This extension introduces the *obsolete* concept. It adds a new *obsolete* relation between two changesets. A relation ``<changeset B> obsolete <changeset A>`` is set to denote that ``<changeset B>`` is new version of ``<changeset A>``. The *obsolete* relation act as a **perpendicular history** to the standard changeset history. Standard changeset history versions files. The *obsolete* relation versions changesets. :obsolete: a changeset that has been replaced by another one. :unstable: a changeset that is not obsolete but has an obsolete ancestor. :suspended: an obsolete changeset with unstable descendant. :extinct: an obsolete changeset without unstable descendant. (subject to garbage collection) Another name for unstable could be out of sync. Usage and Feature ================= Display and Exchange -------------------- obsolete changesets are hidden. (except if they have non obsolete changeset) obsolete changesets are not exchanged. This will probably change later but it was the simpler solution for now. New commands ------------ Note that rebased changesets are not marked obsolete rather than being stripped In this experimental extensions, this is done forcing the --keep option. Trying to use the --keep option of rebase with this extensionn this experimental extension will cause such a call to abort. Until better releasen please use graft command to rebase and copy changesets. Context object -------------- Context gains a ``obsolete`` method that will return True if a changeset is obsolete False otherwise. revset ------ Add an ``obsolete()`` entry. repo extension -------------- To Do ~~~~~ - refuse to obsolete published changesets - handle split - handle conflict - handle unstable // out of sync """ import os try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from mercurial.i18n import _ import base64 import json from mercurial import util from mercurial import context from mercurial import revset from mercurial import scmutil from mercurial import extensions from mercurial import pushkey from mercurial import discovery from mercurial import error from mercurial import commands from mercurial import changelog from mercurial import phases from mercurial.node import hex, bin, short, nullid from mercurial.lock import release from mercurial import localrepo from mercurial import cmdutil from mercurial import templatekw try: from mercurial.localrepo import storecache storecache('babar') # to trigger import except (TypeError, ImportError): def storecache(*args): return scmutil.filecache(*args, instore=True) ### Patch changectx ############################# def obsolete(ctx): """is the changeset obsolete by other""" if ctx.node()is None: return False return bool(ctx._repo.obsoletedby(ctx.node())) and ctx.phase() context.changectx.obsolete = obsolete def unstable(ctx): """is the changeset unstable (have obsolete ancestor)""" if ctx.node() is None: return False return ctx.rev() in ctx._repo._unstableset context.changectx.unstable = unstable def extinct(ctx): """is the changeset extinct by other""" if ctx.node() is None: return False return ctx.rev() in ctx._repo._extinctset context.changectx.extinct = extinct def latecomer(ctx): """is the changeset latecomer (Try to succeed to public change)""" if ctx.node() is None: return False return ctx.rev() in ctx._repo._latecomerset context.changectx.latecomer = latecomer ### revset ############################# def revsetobsolete(repo, subset, x): """obsolete changesets""" args = revset.getargs(x, 0, 0, 'obsolete takes no argument') return [r for r in subset if r in repo._obsoleteset and repo._phasecache.phase(repo, r) > 0] # XXX Backward compatibility, to be removed once stabilized if '_phasecache' not in vars(localrepo.localrepository): # new api def revsetobsolete(repo, subset, x): """obsolete changesets""" args = revset.getargs(x, 0, 0, 'obsolete takes no argument') return [r for r in subset if r in repo._obsoleteset and repo._phaserev[r] > 0] def revsetunstable(repo, subset, x): """non obsolete changesets descendant of obsolete one""" args = revset.getargs(x, 0, 0, 'unstable takes no arguments') return [r for r in subset if r in repo._unstableset] def revsetsuspended(repo, subset, x): """obsolete changesets with non obsolete descendants""" args = revset.getargs(x, 0, 0, 'unstable takes no arguments') return [r for r in subset if r in repo._suspendedset] def revsetextinct(repo, subset, x): """obsolete changesets without obsolete descendants""" args = revset.getargs(x, 0, 0, 'unstable takes no arguments') return [r for r in subset if r in repo._extinctset] def revsetlatecomer(repo, subset, x): """latecomer, Try to succeed to public change""" args = revset.getargs(x, 0, 0, 'unstable takes no arguments') return [r for r in subset if r in repo._latecomerset] def _precursors(repo, s): """Precursor of a changeset""" cs = set() nm = repo.changelog.nodemap markerbysubj = repo.obsoletestore.subjects for r in s: for p in markerbysubj.get(repo[r].node(), ()): pr = nm.get(p['object']) if pr is not None: cs.add(pr) return cs def revsetprecursors(repo, subset, x): """precursors of a subset""" s = revset.getset(repo, range(len(repo)), x) cs = _precursors(repo, s) return [r for r in subset if r in cs] def _allprecursors(repo, s): # XXX we need a better naming """transitive precursors of a subset""" toproceed = [repo[r].node() for r in s] seen = set() allsubjects = repo.obsoletestore.subjects while toproceed: nc = toproceed.pop() for mark in allsubjects.get(nc, ()): np = mark['object'] if np not in seen: seen.add(np) toproceed.append(np) nm = repo.changelog.nodemap cs = set() for p in seen: pr = nm.get(p) if pr is not None: cs.add(pr) return cs def revsetallprecursors(repo, subset, x): """obsolete parents""" s = revset.getset(repo, range(len(repo)), x) cs = _allprecursors(repo, s) return [r for r in subset if r in cs] def _successors(repo, s): """Successors of a changeset""" cs = set() nm = repo.changelog.nodemap markerbyobj = repo.obsoletestore.objects for r in s: for p in markerbyobj.get(repo[r].node(), ()): for sub in p['subjects']: sr = nm.get(sub) if sr is not None: cs.add(sr) return cs def revsetsuccessors(repo, subset, x): """successors of a subset""" s = revset.getset(repo, range(len(repo)), x) cs = _successors(repo, s) return [r for r in subset if r in cs] def _allsuccessors(repo, s): # XXX we need a better naming """transitive successors of a subset""" toproceed = [repo[r].node() for r in s] seen = set() allobjects = repo.obsoletestore.objects while toproceed: nc = toproceed.pop() for mark in allobjects.get(nc, ()): for sub in mark['subjects']: if sub not in seen: seen.add(sub) toproceed.append(sub) nm = repo.changelog.nodemap cs = set() for s in seen: sr = nm.get(s) if sr is not None: cs.add(sr) return cs def revsetallsuccessors(repo, subset, x): """obsolete parents""" s = revset.getset(repo, range(len(repo)), x) cs = _allsuccessors(repo, s) return [r for r in subset if r in cs] ### template keywords ##################### def obsoletekw(repo, ctx, templ, **args): """:obsolete: String. The obsolescence level of the node, could be ``stable``, ``unstable``, ``suspended`` or ``extinct``. """ rev = ctx.rev() if rev in repo._extinctset: return 'extinct' if rev in repo._suspendedset: return 'suspended' if rev in repo._unstableset: return 'unstable' return 'stable' ### Other Extension compat ############################ def buildstate(orig, repo, dest, rebaseset, *ags, **kws): """wrapper for rebase 's buildstate that exclude obsolete changeset""" rebaseset = repo.revs('%ld - extinct()', rebaseset) return orig(repo, dest, rebaseset, *ags, **kws) def defineparents(orig, repo, rev, target, state, *args, **kwargs): rebasestate = getattr(repo, '_rebasestate', None) if rebasestate is not None: repo._rebasestate = dict(state) repo._rebasetarget = target return orig(repo, rev, target, state, *args, **kwargs) def concludenode(orig, repo, rev, p1, *args, **kwargs): """wrapper for rebase 's concludenode that set obsolete relation""" newrev = orig(repo, rev, p1, *args, **kwargs) rebasestate = getattr(repo, '_rebasestate', None) if rebasestate is not None: if newrev is not None: nrev = repo[newrev].rev() else: nrev = p1 repo._rebasestate[rev] = nrev return newrev def cmdrebase(orig, ui, repo, *args, **kwargs): if kwargs.get('keep', False): raise util.Abort(_('rebase --keep option is unsupported with obsolete ' 'extension'), hint=_("see 'hg help obsolete'")) kwargs = dict(kwargs) kwargs['keep'] = True # We want to mark rebased revision as obsolete and set their # replacements if any. Doing it in concludenode() prevents # aborting the rebase, and is not called with all relevant # revisions in --collapse case. Instead, we try to track the # rebase state structure by sampling/updating it in # defineparents() and concludenode(). The obsolete markers are # added from this state after a successful call. repo._rebasestate = {} repo._rebasetarget = None try: res = orig(ui, repo, *args, **kwargs) # Filter nullmerge or unrebased entries repo._rebasestate = dict(p for p in repo._rebasestate.iteritems() if p[1] >= 0) if not res and not kwargs.get('abort') and repo._rebasestate: # Rebased revisions are assumed to be descendants of # targetrev. If a source revision is mapped to targetrev # or to another rebased revision, it must have been # removed. targetrev = repo[repo._rebasetarget].rev() newrevs = set([targetrev]) replacements = {} for rev, newrev in sorted(repo._rebasestate.items()): oldnode = repo[rev].node() if newrev not in newrevs: newnode = repo[newrev].node() newrevs.add(newrev) else: newnode = nullid replacements[oldnode] = newnode if kwargs.get('collapse'): newnodes = set(n for n in replacements.values() if n != nullid) if newnodes: # Collapsing into more than one revision? assert len(newnodes) == 1, newnodes newnode = newnodes.pop() else: newnode = nullid repo.addcollapsedobsolete(replacements, newnode) else: for oldnode, newnode in replacements.iteritems(): repo.addobsolete(newnode, oldnode) return res finally: delattr(repo, '_rebasestate') delattr(repo, '_rebasetarget') def extsetup(ui): revset.symbols["obsolete"] = revsetobsolete revset.symbols["unstable"] = revsetunstable revset.symbols["suspended"] = revsetsuspended revset.symbols["extinct"] = revsetextinct revset.symbols["latecomer"] = revsetlatecomer revset.symbols["obsparents"] = revsetprecursors # DEPR revset.symbols["precursors"] = revsetprecursors revset.symbols["obsancestors"] = revsetallprecursors # DEPR revset.symbols["allprecursors"] = revsetallprecursors # bad name revset.symbols["successors"] = revsetsuccessors revset.symbols["allsuccessors"] = revsetallsuccessors # bad name templatekw.keywords['obsolete'] = obsoletekw try: rebase = extensions.find('rebase') if rebase: extensions.wrapfunction(rebase, 'buildstate', buildstate) extensions.wrapfunction(rebase, 'defineparents', defineparents) extensions.wrapfunction(rebase, 'concludenode', concludenode) extensions.wrapcommand(rebase.cmdtable, "rebase", cmdrebase) except KeyError: pass # rebase not found # Pushkey mechanism for mutable ######################################### def pushobsolete(repo, key, old, raw): """push obsolete relation through pushkey""" assert key == "markers" l = repo.lock() try: tmp = StringIO() tmp.write(raw) tmp.seek(0) repo.obsoletestore.load(tmp) repo.obsoletestore._dirty = True # XXX meh return 1 finally: l.release() def listobsolete(repo): """dump all obsolete relation in XXX this have be improved""" tmp = StringIO() repo.obsoletestore.save(tmp) return {'markers': base64.b64encode(tmp.getvalue())} pushkey.register('obsolete', pushobsolete, listobsolete) ### Discovery wrapping ############################# class blist(list, object): """silly class to have non False but empty list""" def __nonzero__(self): return bool(len(self.orig)) def wrapfindcommonoutgoing(orig, repo, *args, **kwargs): """wrap mercurial.discovery.findcommonoutgoing to remove extinct changeset Such excluded changeset are removed from excluded and will *not* appear are excluded secret changeset. """ outgoing = orig(repo, *args, **kwargs) orig = outgoing.excluded outgoing.excluded = blist(n for n in orig if not repo[n].extinct()) # when no revision is specified (push everything) a shortcut is taken when # nothign was exclude. taking this code path when extinct changeset have # been excluded leads to repository corruption. outgoing.excluded.orig = orig return outgoing def wrapcheckheads(orig, repo, remote, outgoing, *args, **kwargs): """wrap mercurial.discovery.checkheads * prevent unstability to be pushed * patch remote to ignore obsolete heads on remote """ # do not push instability for h in outgoing.missingheads: # checking heads only is enought because any thing base on obsolete # changeset is either obsolete or unstable. ctx = repo[h] hint = _("use 'hg stabilize' to get a stable history (or --force to proceed)") if ctx.unstable(): raise util.Abort(_("Trying to push unstable changeset: %s!") % ctx, hint=hint) if ctx.obsolete(): raise util.Abort(_("Trying to push obsolete changeset: %s!") % ctx, hint=hint) ### patch remote branch map # do not read it this burn eyes try: if 'oldbranchmap' not in vars(remote): remote.oldbranchmap = remote.branchmap def branchmap(): newbm = {} oldbm = None if (util.safehasattr(phases, 'visiblebranchmap') and not util.safehasattr(remote, 'ignorevisiblebranchmap') ): remote.ignorevisiblebranchmap = False remote.branchmap = remote.oldbranchmap oldbm = phases.visiblebranchmap(remote) remote.branchmap = remote.newbranchmap remote.ignorevisiblebranchmap = True if oldbm is None: oldbm = remote.oldbranchmap() for branch, nodes in oldbm.iteritems(): nodes = list(nodes) new = set() while nodes: n = nodes.pop() if n in repo.obsoletestore.objects: markers = repo.obsoletestore.objects[n] for mark in markers: for newernode in mark['subjects']: if newernode is not None: nodes.append(newernode) else: new.add(n) if new: newbm[branch] = list(new) return newbm remote.ignorevisiblebranchmap = True remote.branchmap = branchmap remote.newbranchmap = branchmap return orig(repo, remote, outgoing, *args, **kwargs) finally: remote.__dict__.pop('branchmap', None) # restore class one remote.__dict__.pop('oldbranchmap', None) remote.__dict__.pop('newbranchmap', None) remote.__dict__.pop('ignorevisiblebranchmap', None) # eye are still burning def wrapvisiblebranchmap(orig, repo): ignore = getattr(repo, 'ignorevisiblebranchmap', None) if ignore is None: return orig(repo) elif ignore: return repo.branchmap() else: return None # break recursion def wrapclearcache(orig, repo, *args, **kwargs): try: return orig(repo, *args, **kwargs) finally: repo._clearobsoletecache() ### New commands ############################# cmdtable = {} command = cmdutil.command(cmdtable) @command('debugobsolete', [], _('SUBJECT OBJECT')) def cmddebugobsolete(ui, repo, subject, object): """add an obsolete relation between two nodes The subject is expected to be a newer version of the object. """ lock = repo.lock() try: sub = repo[subject] obj = repo[object] repo.addobsolete(sub.node(), obj.node()) finally: lock.release() return 0 @command('debugconvertobsolete', [], '') def cmddebugconvertobsolete(ui, repo): """import markers from an .hg/obsolete-relations file""" cnt = 0 l = repo.lock() try: repo._importoldobsolete = True store = repo.obsoletestore try: f = repo.opener('obsolete-relations') try: for line in f: subhex, objhex = line.split() sub = bin(subhex) obj = bin(objhex) newmarker = { 'subjects': (sub==nullid) and [] or [sub], 'object': obj, 'date': util.makedate(), 'user': ui.username(), 'reason': 'import from older format.', } store.new(newmarker) store._dirty = True cnt += 1 finally: f.close() util.unlink(repo.join('obsolete-relations')) except IOError: ui.warn('nothing to do\n') pass finally: del repo._importoldobsolete l.release() ui.status('%i obsolete marker converted\n' % cnt) @command('debugsuccessors', [], '') def cmddebugsuccessors(ui, repo): """dump obsolete changesets and their successors Each line matches an existing marker, the first identifier is the obsolete changeset identifier, followed by it successors. """ lock = repo.lock() try: allsuccessors = repo.obsoletestore.objects for old in sorted(allsuccessors): successors = [sorted(m['subjects']) for m in allsuccessors[old]] for i, group in enumerate(sorted(successors)): ui.write('%s' % short(old)) for new in group: ui.write(' %s' % short(new)) ui.write('\n') finally: lock.release() ### Altering existing command ############################# def wrapmayobsoletewc(origfn, ui, repo, *args, **opts): res = origfn(ui, repo, *args, **opts) if repo['.'].obsolete(): ui.warn(_('Working directory parent is obsolete\n')) return res def noextinctsvisibleheads(orig, repo): repo._turn_extinct_secret() return orig(repo) def uisetup(ui): extensions.wrapcommand(commands.table, "update", wrapmayobsoletewc) extensions.wrapcommand(commands.table, "pull", wrapmayobsoletewc) extensions.wrapfunction(discovery, 'findcommonoutgoing', wrapfindcommonoutgoing) extensions.wrapfunction(discovery, 'checkheads', wrapcheckheads) extensions.wrapfunction(phases, 'visibleheads', noextinctsvisibleheads) extensions.wrapfunction(phases, 'advanceboundary', wrapclearcache) if util.safehasattr(phases, 'visiblebranchmap'): extensions.wrapfunction(phases, 'visiblebranchmap', wrapvisiblebranchmap) ### serialisation ############################# def _obsserialise(obssubrels, flike): """serialise an obsolete relation mapping in a plain text one this is for subject -> [objects] mapping format is:: <subject-full-hex> <object-full-hex>\n""" for sub, objs in obssubrels.iteritems(): for obj in objs: if sub is None: sub = nullid flike.write('%s %s\n' % (hex(sub), hex(obj))) def _obsdeserialise(flike): """read a file like object serialised with _obsserialise this desierialize into a {subject -> objects} mapping""" rels = {} for line in flike: subhex, objhex = line.split() subnode = bin(subhex) if subnode == nullid: subnode = None rels.setdefault( subnode, set()).add(bin(objhex)) return rels ### diagnostique tools ############################# def unstables(repo): """Return all unstable changeset""" return scmutil.revrange(repo, ['obsolete():: and (not obsolete())']) def newerversion(repo, obs): """Return the newer version of an obsolete changeset""" toproceed = set([(obs,)]) # XXX known optimization available newer = set() objectrels = repo.obsoletestore.objects while toproceed: current = toproceed.pop() assert len(current) <= 1, 'splitting not handled yet. %r' % current if current: n, = current if n in objectrels: markers = objectrels[n] for mark in markers: toproceed.add(tuple(mark['subjects'])) else: newer.add(tuple(current)) else: newer.add(()) return sorted(newer) ### obsolete relation storage ############################# def add2set(d, key, mark): """add <mark> to a `set` in <d>[<key>]""" d.setdefault(key, []).append(mark) def markerid(marker): KEYS = ['subjects', "object", "date", "user", "reason"] for key in KEYS: assert key in marker keys = sorted(marker.keys()) a = util.sha1() for key in keys: if key == 'subjects': for sub in sorted(marker[key]): a.update(sub) elif key == 'id': pass else: a.update(str(marker[key])) a.update('\0') return a.digest() class obsoletestore(object): """Store obsolete relations Relation are stored in three mapping. All mapping have "obsolete markers" as values:: {'id': "unique id of the obsolete marker" 'subjects': "0-N newer version of changeset in "object" (as ordered list) 'object': "old and obsolete version" 'date': "When was this marker created ?" 'user': "Who did that ?" 'reason': "Why was it done" } Three keys exists :self._markers: "id" -> marker :self.subjects: "subject" -> set(marker) :self.objects: "object" -> set(marker) """ def __init__(self): self._markers = {} self.subjects = {} self.objects = {} self._dirty = False # should be on repo def new(self, marker): """Add a *new* marker to the store. computing it's ID""" mid = marker['id'] = markerid(marker) self._insert(marker) self._dirty = True return mid def _insert(self, marker): if marker['id'] not in self._markers: self._markers[marker['id']] = marker add2set(self.objects, marker['object'], marker) for subj in marker['subjects']: add2set(self.subjects, subj, marker) def save(self, stream): markers = [] for mark in self._markers.itervalues(): jmark = mark.copy() jmark['id'] = hex(jmark['id']) jmark['subjects'] = [hex(n) for n in jmark['subjects']] jmark['object'] = hex(jmark['object']) markers.append(jmark) json.dump(markers, stream, indent=4) def load(self, stream): for mark in json.load(stream): mark['id'] = bin(mark['id']) mark['subjects'] = [bin(n) for n in mark['subjects']] mark['object'] = bin(mark['object']) self._insert(mark) def writeobsolete(repo): """wire obsolete data on disk""" f = repo.sopener('obsoletemarkers', 'w', atomictemp=True) try: repo.obsoletestore.save(f) repo._dirty = False finally: f.close() ### repo subclassing ############################# def reposetup(ui, repo): if not repo.local(): return opull = repo.pull opush = repo.push olock = repo.lock o_rollback = repo._rollback o_updatebranchcache = repo.updatebranchcache # /!\ api change in Hg 2.2 (97efd26eb9576f39590812ea9) /!\ if util.safehasattr(repo, '_journalfiles'): # Hg 2.2 o_journalfiles = repo._journalfiles o_writejournal = repo._writejournal class obsoletingrepo(repo.__class__): ### Public method def obsoletedby(self, node): """return the set of node that make <node> obsolete (obj)""" others = set() for marker in self.obsoletestore.objects.get(node, []): others.update(marker['subjects']) return others def obsolete(self, node): """return the set of node that <node> make obsolete (sub)""" return set(marker['object'] for marker in self.obsoletestore.subjects.get(node, [])) @util.propertycache def obsoletestore(self): if not getattr(self, '_importoldobsolete', False): try: f = self.opener('obsolete-relations') f.close() raise util.Abort('old format of obsolete marker detected!\n' 'run `hg debugconvertobsolete` once.') except IOError: pass store = obsoletestore() try: f = self.sopener('obsoletemarkers') store.load(f) except IOError: pass return store @util.propertycache def _obsoleteset(self): """the set of obsolete revision""" obs = set() nm = self.changelog.nodemap for obj in self.obsoletestore.objects: try: # /!\api change in Hg 2.2 (e8d37b78acfb22ae2c1fb126c2)/!\ rev = nm.get(obj) except TypeError: #XXX to remove while breaking Hg 2.1 support rev = nm.get(obj, None) if rev is not None: obs.add(rev) return obs @util.propertycache def _unstableset(self): """the set of non obsolete revision with obsolete parent""" return set(self.revs('(obsolete()::) - obsolete()')) @util.propertycache def _suspendedset(self): """the set of obsolete parent with non obsolete descendant""" return set(self.revs('obsolete() and obsolete()::unstable()')) @util.propertycache def _extinctset(self): """the set of obsolete parent without non obsolete descendant""" return set(self.revs('obsolete() - obsolete()::unstable()')) @util.propertycache def _latecomerset(self): """the set of rev trying to obsolete public revision""" return set(self.revs('allsuccessors(public()) - obsolete()')) def _clearobsoletecache(self): if '_obsoleteset' in vars(self): del self._obsoleteset self._clearunstablecache() def updatebranchcache(self): o_updatebranchcache() self._clearunstablecache() def _clearunstablecache(self): if '_unstableset' in vars(self): del self._unstableset if '_suspendedset' in vars(self): del self._suspendedset if '_extinctset' in vars(self): del self._extinctset if '_latecomerset' in vars(self): del self._latecomerset def addobsolete(self, sub, obj): """Add a relation marking that node <sub> is a new version of <obj>""" assert sub != obj if not repo[obj].phase(): if sub is None: self.ui.warn( _("trying to kill immutable changeset %(obj)s\n") % {'obj': short(obj)}) if sub is not None: self.ui.warn( _("%(sub)s try to obsolete immutable changeset %(obj)s\n") % {'sub': short(sub), 'obj': short(obj)}) lock = self.lock() try: newmarker = { 'subjects': (sub==nullid) and [] or [sub], 'object': obj, 'date': util.makedate(), 'user': ui.username(), 'reason': 'unknown', } mid = self.obsoletestore.new(newmarker) self._clearobsoletecache() self._turn_extinct_secret() return mid finally: lock.release() def addcollapsedobsolete(self, oldnodes, newnode): """Mark oldnodes as collapsed into newnode.""" # Assume oldnodes are all descendants of a single rev rootrevs = self.revs('roots(%ln)', oldnodes) assert len(rootrevs) == 1, rootrevs rootnode = self[rootrevs[0]].node() for n in oldnodes: self.addobsolete(newnode, n) def _turn_extinct_secret(self): """ensure all extinct changeset are secret""" self._clearobsoletecache() # this is mainly for safety purpose # both pull and push query = '(obsolete() - obsolete()::(unstable() - secret())) - secret()' expobs = [c.node() for c in repo.set(query)] phases.retractboundary(repo, 2, expobs) ### Disk IO def lock(self, *args, **kwargs): l = olock(*args, **kwargs) if not getattr(l.releasefn, 'obspatched', False): oreleasefn = l.releasefn def releasefn(*args, **kwargs): if self.obsoletestore._dirty: writeobsolete(self) oreleasefn(*args, **kwargs) releasefn.obspatched = True l.releasefn = releasefn return l def _readobsrels(self): """Read obsolete relation on disk""" # XXX handle lock try: f = self.opener('obsolete-relations') try: return _obsdeserialise(f) finally: f.close() except IOError: return {} ### pull // push support def pull(self, remote, *args, **kwargs): """wrapper around push that push obsolete relation""" l = repo.lock() try: result = opull(remote, *args, **kwargs) if 'obsolete' in remote.listkeys('namespaces'): tmp = StringIO() rels = remote.listkeys('obsolete')['markers'] tmp.write(base64.b64decode(rels)) tmp.seek(0) repo.obsoletestore.load(tmp) repo.obsoletestore._dirty = True # XXX meh self._clearobsoletecache() self._turn_extinct_secret() return result finally: l.release() def push(self, remote, *args, **opts): """wrapper around pull that pull obsolete relation""" self._turn_extinct_secret() result = opush(remote, *args, **opts) if 'obsolete' in remote.listkeys('namespaces'): tmp = StringIO() self.obsoletestore.save(tmp) remote.pushkey('obsolete', 'markers', '', tmp.getvalue()) self._turn_extinct_secret() return result ### rollback support # /!\ api change in Hg 2.2 (97efd26eb9576f39590812ea9) /!\ if util.safehasattr(repo, '_journalfiles'): # Hg 2.2 def _journalfiles(self): return o_journalfiles() + (self.sjoin('journal.obsoletemarkers'),) def _writejournal(self, desc): """wrapped version of _writejournal that save obsolete data""" o_writejournal(desc) filename = 'obsoletemarkers' filepath = self.sjoin(filename) if os.path.exists(filepath): journalname = 'journal.' + filename journalpath = self.sjoin(journalname) util.copyfile(filepath, journalpath) else: # XXX removing this bloc will break Hg 2.1 support def _writejournal(self, desc): """wrapped version of _writejournal that save obsolete data""" entries = list(o_writejournal(desc)) filename = 'obsoletemarkers' filepath = self.sjoin(filename) if os.path.exists(filepath): journalname = 'journal.' + filename journalpath = self.sjoin(journalname) util.copyfile(filepath, journalpath) entries.append(journalpath) return tuple(entries) def _rollback(self, dryrun, force): """wrapped version of _rollback that restore obsolete data""" ret = o_rollback(dryrun, force) if not (ret or dryrun): #rollback did not failed src = self.sjoin('undo.obsoletemarkers') dst = self.sjoin('obsoletemarkers') if os.path.exists(src): util.rename(src, dst) elif os.path.exists(dst): # If no state was saved because the file did not existed before. os.unlink(dst) # invalidate cache self.__dict__.pop('obsoletestore', None) return ret @storecache('00changelog.i') def changelog(self): # << copy pasted from mercurial source c = changelog.changelog(self.sopener) if 'HG_PENDING' in os.environ: p = os.environ['HG_PENDING'] if p.startswith(self.root): c.readpending('00changelog.i.a') # >> end of the copy paste old = c.__dict__.pop('hiddenrevs', ()) if old: ui.warn("old wasn't empty ? %r" % old) def _sethidden(c, value): assert not value class hchangelog(c.__class__): @util.propertycache def hiddenrevs(c): shown = ['not obsolete()', '.', 'bookmark()', 'tagged()', 'public()'] basicquery = 'obsolete() - (::(%s))' % (' or '.join(shown)) # !!! self is repo not changelog result = set(scmutil.revrange(self, [basicquery])) return result c.__class__ = hchangelog return c repo.__class__ = obsoletingrepo