Mercurial > evolve
changeset 2123:cf7b4ab31f0c
split: move the debugcommand into a dedicated module
The code related to debugobsstorestat is fairly independant, we move it into its
own module.
author | Pierre-Yves David <pierre-yves.david@ens-lyon.org> |
---|---|
date | Wed, 22 Mar 2017 03:13:15 +0100 |
parents | efc6633e78e1 |
children | 6665aad2d41b |
files | hgext3rd/evolve/__init__.py hgext3rd/evolve/debugcmd.py |
diffstat | 2 files changed, 131 insertions(+), 110 deletions(-) [+] |
line wrap: on
line diff
--- a/hgext3rd/evolve/__init__.py Wed Mar 22 03:00:11 2017 +0100 +++ b/hgext3rd/evolve/__init__.py Wed Mar 22 03:13:15 2017 +0100 @@ -111,6 +111,7 @@ from mercurial.node import nullid from . import ( + debugcmd, obsdiscovery, obsexchange, exthelper, @@ -143,6 +144,7 @@ # - Older format compat eh = exthelper.exthelper() +eh.merge(debugcmd.eh) eh.merge(obsdiscovery.eh) eh.merge(obsexchange.eh) uisetup = eh.final_uisetup @@ -814,116 +816,6 @@ _deprecatealias('gup', 'next') _deprecatealias('gdown', 'previous') -@eh.command('debugobsstorestat', [], '') -def cmddebugobsstorestat(ui, repo): - """print statistics about obsolescence markers in the repo""" - def _updateclustermap(nodes, mark, clustersmap): - c = (set(nodes), set([mark])) - toproceed = set(nodes) - while toproceed: - n = toproceed.pop() - other = clustersmap.get(n) - if (other is not None - and other is not c): - other[0].update(c[0]) - other[1].update(c[1]) - for on in c[0]: - if on in toproceed: - continue - clustersmap[on] = other - c = other - clustersmap[n] = c - - store = repo.obsstore - unfi = repo.unfiltered() - nm = unfi.changelog.nodemap - ui.write(_('markers total: %9i\n') % len(store._all)) - sucscount = [0, 0, 0, 0] - known = 0 - parentsdata = 0 - metakeys = {} - # node -> cluster mapping - # a cluster is a (set(nodes), set(markers)) tuple - clustersmap = {} - # same data using parent information - pclustersmap = {} - for mark in store: - if mark[0] in nm: - known += 1 - nbsucs = len(mark[1]) - sucscount[min(nbsucs, 3)] += 1 - meta = mark[3] - for key, value in meta: - metakeys.setdefault(key, 0) - metakeys[key] += 1 - meta = dict(meta) - parents = [meta.get('p1'), meta.get('p2')] - parents = [node.bin(p) for p in parents if p is not None] - if parents: - parentsdata += 1 - # cluster handling - nodes = set(mark[1]) - nodes.add(mark[0]) - _updateclustermap(nodes, mark, clustersmap) - # same with parent data - nodes.update(parents) - _updateclustermap(nodes, mark, pclustersmap) - - # freezing the result - for c in clustersmap.values(): - fc = (frozenset(c[0]), frozenset(c[1])) - for n in fc[0]: - clustersmap[n] = fc - # same with parent data - for c in pclustersmap.values(): - fc = (frozenset(c[0]), frozenset(c[1])) - for n in fc[0]: - pclustersmap[n] = fc - ui.write((' for known precursors: %9i\n' % known)) - ui.write((' with parents data: %9i\n' % parentsdata)) - # successors data - ui.write(('markers with no successors: %9i\n' % sucscount[0])) - ui.write((' 1 successors: %9i\n' % sucscount[1])) - ui.write((' 2 successors: %9i\n' % sucscount[2])) - ui.write((' more than 2 successors: %9i\n' % sucscount[3])) - # meta data info - ui.write((' available keys:\n')) - for key in sorted(metakeys): - ui.write((' %15s: %9i\n' % (key, metakeys[key]))) - - allclusters = list(set(clustersmap.values())) - allclusters.sort(key=lambda x: len(x[1])) - ui.write(('disconnected clusters: %9i\n' % len(allclusters))) - - ui.write(' any known node: %9i\n' - % len([c for c in allclusters - if [n for n in c[0] if nm.get(n) is not None]])) - if allclusters: - nbcluster = len(allclusters) - ui.write((' smallest length: %9i\n' % len(allclusters[0][1]))) - ui.write((' longer length: %9i\n' - % len(allclusters[-1][1]))) - median = len(allclusters[nbcluster // 2][1]) - ui.write((' median length: %9i\n' % median)) - mean = sum(len(x[1]) for x in allclusters) // nbcluster - ui.write((' mean length: %9i\n' % mean)) - allpclusters = list(set(pclustersmap.values())) - allpclusters.sort(key=lambda x: len(x[1])) - ui.write((' using parents data: %9i\n' % len(allpclusters))) - ui.write(' any known node: %9i\n' - % len([c for c in allclusters - if [n for n in c[0] if nm.get(n) is not None]])) - if allpclusters: - nbcluster = len(allpclusters) - ui.write((' smallest length: %9i\n' - % len(allpclusters[0][1]))) - ui.write((' longer length: %9i\n' - % len(allpclusters[-1][1]))) - median = len(allpclusters[nbcluster // 2][1]) - ui.write((' median length: %9i\n' % median)) - mean = sum(len(x[1]) for x in allpclusters) // nbcluster - ui.write((' mean length: %9i\n' % mean)) - def _solveone(ui, repo, ctx, dryrun, confirm, progresscb, category): """Resolve the troubles affecting one revision""" wlock = lock = tr = None
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext3rd/evolve/debugcmd.py Wed Mar 22 03:13:15 2017 +0100 @@ -0,0 +1,129 @@ +# Code dedicated to debug commands around evolution +# +# Copyright 2017 Pierre-Yves David <pierre-yves.david@ens-lyon.org> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# Status: Ready to Upstream +# +# * We could have the same code in core as `hg debugobsolete --stat`, +# * We probably want a way for the extension to hook in for extra data. + +from mercurial import node + +from mercurial.i18n import _ + +from . import exthelper + +eh = exthelper.exthelper() + +@eh.command('debugobsstorestat', [], '') +def cmddebugobsstorestat(ui, repo): + """print statistics about obsolescence markers in the repo""" + def _updateclustermap(nodes, mark, clustersmap): + c = (set(nodes), set([mark])) + toproceed = set(nodes) + while toproceed: + n = toproceed.pop() + other = clustersmap.get(n) + if (other is not None + and other is not c): + other[0].update(c[0]) + other[1].update(c[1]) + for on in c[0]: + if on in toproceed: + continue + clustersmap[on] = other + c = other + clustersmap[n] = c + + store = repo.obsstore + unfi = repo.unfiltered() + nm = unfi.changelog.nodemap + ui.write(_('markers total: %9i\n') % len(store._all)) + sucscount = [0, 0, 0, 0] + known = 0 + parentsdata = 0 + metakeys = {} + # node -> cluster mapping + # a cluster is a (set(nodes), set(markers)) tuple + clustersmap = {} + # same data using parent information + pclustersmap = {} + for mark in store: + if mark[0] in nm: + known += 1 + nbsucs = len(mark[1]) + sucscount[min(nbsucs, 3)] += 1 + meta = mark[3] + for key, value in meta: + metakeys.setdefault(key, 0) + metakeys[key] += 1 + meta = dict(meta) + parents = [meta.get('p1'), meta.get('p2')] + parents = [node.bin(p) for p in parents if p is not None] + if parents: + parentsdata += 1 + # cluster handling + nodes = set(mark[1]) + nodes.add(mark[0]) + _updateclustermap(nodes, mark, clustersmap) + # same with parent data + nodes.update(parents) + _updateclustermap(nodes, mark, pclustersmap) + + # freezing the result + for c in clustersmap.values(): + fc = (frozenset(c[0]), frozenset(c[1])) + for n in fc[0]: + clustersmap[n] = fc + # same with parent data + for c in pclustersmap.values(): + fc = (frozenset(c[0]), frozenset(c[1])) + for n in fc[0]: + pclustersmap[n] = fc + ui.write((' for known precursors: %9i\n' % known)) + ui.write((' with parents data: %9i\n' % parentsdata)) + # successors data + ui.write(('markers with no successors: %9i\n' % sucscount[0])) + ui.write((' 1 successors: %9i\n' % sucscount[1])) + ui.write((' 2 successors: %9i\n' % sucscount[2])) + ui.write((' more than 2 successors: %9i\n' % sucscount[3])) + # meta data info + ui.write((' available keys:\n')) + for key in sorted(metakeys): + ui.write((' %15s: %9i\n' % (key, metakeys[key]))) + + allclusters = list(set(clustersmap.values())) + allclusters.sort(key=lambda x: len(x[1])) + ui.write(('disconnected clusters: %9i\n' % len(allclusters))) + + ui.write(' any known node: %9i\n' + % len([c for c in allclusters + if [n for n in c[0] if nm.get(n) is not None]])) + if allclusters: + nbcluster = len(allclusters) + ui.write((' smallest length: %9i\n' % len(allclusters[0][1]))) + ui.write((' longer length: %9i\n' + % len(allclusters[-1][1]))) + median = len(allclusters[nbcluster // 2][1]) + ui.write((' median length: %9i\n' % median)) + mean = sum(len(x[1]) for x in allclusters) // nbcluster + ui.write((' mean length: %9i\n' % mean)) + allpclusters = list(set(pclustersmap.values())) + allpclusters.sort(key=lambda x: len(x[1])) + ui.write((' using parents data: %9i\n' % len(allpclusters))) + ui.write(' any known node: %9i\n' + % len([c for c in allclusters + if [n for n in c[0] if nm.get(n) is not None]])) + if allpclusters: + nbcluster = len(allpclusters) + ui.write((' smallest length: %9i\n' + % len(allpclusters[0][1]))) + ui.write((' longer length: %9i\n' + % len(allpclusters[-1][1]))) + median = len(allpclusters[nbcluster // 2][1]) + ui.write((' median length: %9i\n' % median)) + mean = sum(len(x[1]) for x in allpclusters) // nbcluster + ui.write((' mean length: %9i\n' % mean))