Mercurial > evolve
comparison hgext/evolve.py @ 1070:527b12a59628
drop relevant marker compat
author | Pierre-Yves David <pierre-yves.david@fb.com> |
---|---|
date | Wed, 27 Aug 2014 10:22:50 +0200 |
parents | 356552e55489 |
children | 3009e6eaea4c |
comparison
equal
deleted
inserted
replaced
1069:356552e55489 | 1070:527b12a59628 |
---|---|
345 if not opts.get('date') and opts.get('current_date'): | 345 if not opts.get('date') and opts.get('current_date'): |
346 opts['date'] = '%d %d' % util.makedate() | 346 opts['date'] = '%d %d' % util.makedate() |
347 if not opts.get('user') and opts.get('current_user'): | 347 if not opts.get('user') and opts.get('current_user'): |
348 opts['user'] = ui.username() | 348 opts['user'] = ui.username() |
349 | 349 |
350 | |
351 createmarkers = obsolete.createmarkers | |
352 if not util.safehasattr(obsolete.obsstore, 'relevantmarkers'): | |
353 | |
354 @eh.wrapfunction(mercurial.obsolete, 'createmarkers') | |
355 def _createmarkers(orig, repo, relations, *args, **kwargs): | |
356 """register parent information at prune time""" | |
357 # every time this test is run, a kitten is slain. | |
358 # Change it as soon as possible | |
359 if '[,{metadata}]' in orig.__doc__: | |
360 relations = list(relations) | |
361 for idx, rel in enumerate(relations): | |
362 prec = rel[0] | |
363 sucs = rel[1] | |
364 if not sucs: | |
365 meta = {} | |
366 if 2 < len(rel): | |
367 meta.update(rel[2]) | |
368 for i, p in enumerate(prec.parents(), 1): | |
369 meta['p%i' % i] = p.hex() | |
370 relations[idx] = (prec, sucs, meta) | |
371 return orig(repo, relations, *args, **kwargs) | |
372 | |
373 def createmarkers(*args, **kwargs): | |
374 return obsolete.createmarkers(*args, **kwargs) | |
375 | |
376 class pruneobsstore(obsolete.obsstore): | |
377 | |
378 def __init__(self, *args, **kwargs): | |
379 self.prunedchildren = {} | |
380 return super(pruneobsstore, self).__init__(*args, **kwargs) | |
381 | |
382 def _load(self, markers): | |
383 markers = self._prunedetectingmarkers(markers) | |
384 return super(pruneobsstore, self)._load(markers) | |
385 | |
386 | |
387 def _prunedetectingmarkers(self, markers): | |
388 for m in markers: | |
389 if not m[1]: # no successors | |
390 meta = obsolete.decodemeta(m[3]) | |
391 if 'p1' in meta: | |
392 p1 = node.bin(meta['p1']) | |
393 self.prunedchildren.setdefault(p1, set()).add(m) | |
394 if 'p2' in meta: | |
395 p2 = node.bin(meta['p2']) | |
396 self.prunedchildren.setdefault(p2, set()).add(m) | |
397 yield m | |
398 | |
399 obsolete.obsstore = pruneobsstore | |
400 | |
401 @eh.addattr(obsolete.obsstore, 'relevantmarkers') | |
402 def relevantmarkers(self, nodes): | |
403 """return a set of all obsolescence marker relevant to a set of node. | |
404 | |
405 "relevant" to a set of node mean: | |
406 | |
407 - marker that use this changeset as successors | |
408 - prune marker of direct children on this changeset. | |
409 - recursive application of the two rules on precursors of these markers | |
410 | |
411 It a set so you cannot rely on order""" | |
412 seennodes = set(nodes) | |
413 seenmarkers = set() | |
414 pendingnodes = set(nodes) | |
415 precursorsmarkers = self.precursors | |
416 prunedchildren = self.prunedchildren | |
417 while pendingnodes: | |
418 direct = set() | |
419 for current in pendingnodes: | |
420 direct.update(precursorsmarkers.get(current, ())) | |
421 direct.update(prunedchildren.get(current, ())) | |
422 direct -= seenmarkers | |
423 pendingnodes = set([m[0] for m in direct]) | |
424 seenmarkers |= direct | |
425 pendingnodes -= seennodes | |
426 seennodes |= pendingnodes | |
427 return seenmarkers | |
428 | 350 |
429 ##################################################################### | 351 ##################################################################### |
430 ### Critical fix ### | 352 ### Critical fix ### |
431 ##################################################################### | 353 ##################################################################### |
432 | 354 |
895 exc.__class__ = LocalMergeFailure | 817 exc.__class__ = LocalMergeFailure |
896 raise | 818 raise |
897 oldbookmarks = repo.nodebookmarks(nodesrc) | 819 oldbookmarks = repo.nodebookmarks(nodesrc) |
898 if nodenew is not None: | 820 if nodenew is not None: |
899 phases.retractboundary(repo, tr, destphase, [nodenew]) | 821 phases.retractboundary(repo, tr, destphase, [nodenew]) |
900 createmarkers(repo, [(repo[nodesrc], (repo[nodenew],))]) | 822 obsolete.createmarkers(repo, [(repo[nodesrc], (repo[nodenew],))]) |
901 for book in oldbookmarks: | 823 for book in oldbookmarks: |
902 repo._bookmarks[book] = nodenew | 824 repo._bookmarks[book] = nodenew |
903 else: | 825 else: |
904 createmarkers(repo, [(repo[nodesrc], ())]) | 826 obsolete.createmarkers(repo, [(repo[nodesrc], ())]) |
905 # Behave like rebase, move bookmarks to dest | 827 # Behave like rebase, move bookmarks to dest |
906 for book in oldbookmarks: | 828 for book in oldbookmarks: |
907 repo._bookmarks[book] = dest.node() | 829 repo._bookmarks[book] = dest.node() |
908 for book in destbookmarks: # restore bookmark that rebase move | 830 for book in destbookmarks: # restore bookmark that rebase move |
909 repo._bookmarks[book] = dest.node() | 831 repo._bookmarks[book] = dest.node() |
1454 _('rebasing to destination parent: %s\n') % prec.p1()) | 1376 _('rebasing to destination parent: %s\n') % prec.p1()) |
1455 try: | 1377 try: |
1456 tmpid = relocate(repo, bumped, prec.p1()) | 1378 tmpid = relocate(repo, bumped, prec.p1()) |
1457 if tmpid is not None: | 1379 if tmpid is not None: |
1458 tmpctx = repo[tmpid] | 1380 tmpctx = repo[tmpid] |
1459 createmarkers(repo, [(bumped, (tmpctx,))]) | 1381 obsolete.createmarkers(repo, [(bumped, (tmpctx,))]) |
1460 except MergeFailure: | 1382 except MergeFailure: |
1461 repo.opener.write('graftstate', bumped.hex() + '\n') | 1383 repo.opener.write('graftstate', bumped.hex() + '\n') |
1462 repo.ui.write_err(_('evolution failed!\n')) | 1384 repo.ui.write_err(_('evolution failed!\n')) |
1463 repo.ui.write_err( | 1385 repo.ui.write_err( |
1464 _('fix conflict and run "hg evolve --continue"\n')) | 1386 _('fix conflict and run "hg evolve --continue"\n')) |
1496 date=bumped.date(), | 1418 date=bumped.date(), |
1497 extra=bumped.extra()) | 1419 extra=bumped.extra()) |
1498 | 1420 |
1499 newid = repo.commitctx(new) | 1421 newid = repo.commitctx(new) |
1500 if newid is None: | 1422 if newid is None: |
1501 createmarkers(repo, [(tmpctx, ())]) | 1423 obsolete.createmarkers(repo, [(tmpctx, ())]) |
1502 newid = prec.node() | 1424 newid = prec.node() |
1503 else: | 1425 else: |
1504 phases.retractboundary(repo, tr, bumped.phase(), [newid]) | 1426 phases.retractboundary(repo, tr, bumped.phase(), [newid]) |
1505 createmarkers(repo, [(tmpctx, (repo[newid],))], | 1427 obsolete.createmarkers(repo, [(tmpctx, (repo[newid],))], |
1506 flag=obsolete.bumpedfix) | 1428 flag=obsolete.bumpedfix) |
1507 bmupdate(newid) | 1429 bmupdate(newid) |
1508 tr.close() | 1430 tr.close() |
1509 repo.ui.status(_('committed as %s\n') % node.short(newid)) | 1431 repo.ui.status(_('committed as %s\n') % node.short(newid)) |
1510 finally: | 1432 finally: |
1604 if oldlen == len(repo): | 1526 if oldlen == len(repo): |
1605 new = divergent | 1527 new = divergent |
1606 # no changes | 1528 # no changes |
1607 else: | 1529 else: |
1608 new = repo['.'] | 1530 new = repo['.'] |
1609 createmarkers(repo, [(other, (new,))]) | 1531 obsolete.createmarkers(repo, [(other, (new,))]) |
1610 phases.retractboundary(repo, tr, other.phase(), [new.node()]) | 1532 phases.retractboundary(repo, tr, other.phase(), [new.node()]) |
1611 tr.close() | 1533 tr.close() |
1612 finally: | 1534 finally: |
1613 tr.release() | 1535 tr.release() |
1614 | 1536 |
1809 relations = [(p, sucs) for p in precs] | 1731 relations = [(p, sucs) for p in precs] |
1810 if biject: | 1732 if biject: |
1811 relations = [(p, (s,)) for p, s in zip(precs, sucs)] | 1733 relations = [(p, (s,)) for p, s in zip(precs, sucs)] |
1812 | 1734 |
1813 # create markers | 1735 # create markers |
1814 createmarkers(repo, relations, metadata=metadata) | 1736 obsolete.createmarkers(repo, relations, metadata=metadata) |
1815 | 1737 |
1816 # informs that changeset have been pruned | 1738 # informs that changeset have been pruned |
1817 ui.status(_('%i changesets pruned\n') % len(precs)) | 1739 ui.status(_('%i changesets pruned\n') % len(precs)) |
1818 | 1740 |
1819 wdp = repo['.'] | 1741 wdp = repo['.'] |
2035 newid = _commitfiltered(repo, old, match, target=rev) | 1957 newid = _commitfiltered(repo, old, match, target=rev) |
2036 if newid is None: | 1958 if newid is None: |
2037 raise util.Abort(_('nothing to uncommit'), | 1959 raise util.Abort(_('nothing to uncommit'), |
2038 hint=_("use --all to uncommit all files")) | 1960 hint=_("use --all to uncommit all files")) |
2039 # Move local changes on filtered changeset | 1961 # Move local changes on filtered changeset |
2040 createmarkers(repo, [(old, (repo[newid],))]) | 1962 obsolete.createmarkers(repo, [(old, (repo[newid],))]) |
2041 phases.retractboundary(repo, tr, oldphase, [newid]) | 1963 phases.retractboundary(repo, tr, oldphase, [newid]) |
2042 repo.dirstate.setparents(newid, node.nullid) | 1964 repo.dirstate.setparents(newid, node.nullid) |
2043 _uncommitdirstate(repo, old, match) | 1965 _uncommitdirstate(repo, old, match) |
2044 updatebookmarks(newid) | 1966 updatebookmarks(newid) |
2045 if not repo[newid].files(): | 1967 if not repo[newid].files(): |
2066 markers = [] | 1988 markers = [] |
2067 for old in obsoleted: | 1989 for old in obsoleted: |
2068 oldbookmarks.extend(repo.nodebookmarks(old.node())) | 1990 oldbookmarks.extend(repo.nodebookmarks(old.node())) |
2069 markers.append((old, (new,))) | 1991 markers.append((old, (new,))) |
2070 if markers: | 1992 if markers: |
2071 createmarkers(repo, markers) | 1993 obsolete.createmarkers(repo, markers) |
2072 for book in oldbookmarks: | 1994 for book in oldbookmarks: |
2073 repo._bookmarks[book] = new.node() | 1995 repo._bookmarks[book] = new.node() |
2074 if oldbookmarks: | 1996 if oldbookmarks: |
2075 repo._bookmarks.write() | 1997 repo._bookmarks.write() |
2076 return result | 1998 return result |
2121 [p1, p2], | 2043 [p1, p2], |
2122 commitopts={'extra': extra}) | 2044 commitopts={'extra': extra}) |
2123 # store touched version to help potential children | 2045 # store touched version to help potential children |
2124 newmapping[ctx.node()] = new | 2046 newmapping[ctx.node()] = new |
2125 if not duplicate: | 2047 if not duplicate: |
2126 createmarkers(repo, [(ctx, (repo[new],))]) | 2048 obsolete.createmarkers(repo, [(ctx, (repo[new],))]) |
2127 phases.retractboundary(repo, tr, ctx.phase(), [new]) | 2049 phases.retractboundary(repo, tr, ctx.phase(), [new]) |
2128 if ctx in repo[None].parents(): | 2050 if ctx in repo[None].parents(): |
2129 repo.dirstate.setparents(new, node.nullid) | 2051 repo.dirstate.setparents(new, node.nullid) |
2130 tr.close() | 2052 tr.close() |
2131 finally: | 2053 finally: |
2226 | 2148 |
2227 newid, unusedvariable = rewrite(repo, root, allctx, head, | 2149 newid, unusedvariable = rewrite(repo, root, allctx, head, |
2228 [root.p1().node(), root.p2().node()], | 2150 [root.p1().node(), root.p2().node()], |
2229 commitopts=commitopts) | 2151 commitopts=commitopts) |
2230 phases.retractboundary(repo, tr, targetphase, [newid]) | 2152 phases.retractboundary(repo, tr, targetphase, [newid]) |
2231 createmarkers(repo, [(ctx, (repo[newid],)) | 2153 obsolete.createmarkers(repo, [(ctx, (repo[newid],)) |
2232 for ctx in allctx]) | 2154 for ctx in allctx]) |
2233 tr.close() | 2155 tr.close() |
2234 finally: | 2156 finally: |
2235 tr.release() | 2157 tr.release() |
2236 ui.status('%i changesets folded\n' % len(revs)) | 2158 ui.status('%i changesets folded\n' % len(revs)) |