view tests/svnxml.py @ 39270:37e56607cbb9

lfs: add a progress bar when searching for blobs to upload The search itself can take an extreme amount of time if there are a lot of revisions involved. I've got a local repo that took 6 minutes to push 1850 commits, and 60% of that time was spent here (there are ~70K files): \ 58.1% wrapper.py: extractpointers line 297: pointers = extractpointers(... | 57.7% wrapper.py: pointersfromctx line 352: for p in pointersfromctx(ct... | 57.4% wrapper.py: pointerfromctx line 397: p = pointerfromctx(ctx, f, ... \ 38.7% context.py: __contains__ line 368: if f not in ctx: | 38.7% util.py: __get__ line 82: return key in self._manifest | 38.7% context.py: _manifest line 1416: result = self.func(obj) | 38.7% manifest.py: read line 472: return self._manifestctx.re... \ 25.6% revlog.py: revision line 1562: text = rl.revision(self._node) \ 12.8% revlog.py: _chunks line 2217: bins = self._chunks(chain, ... | 12.0% revlog.py: decompressline 2112: ladd(decomp(buffer(data, ch... \ 7.8% revlog.py: checkhash line 2232: self.checkhash(text, node, ... | 7.8% revlog.py: hash line 2315: if node != self.hash(text, ... | 7.8% revlog.py: hash line 2242: return hash(text, p1, p2) \ 12.0% manifest.py: __init__ line 1565: self._data = manifestdict(t... \ 16.8% context.py: filenode line 378: if not _islfs(fctx.filelog(... | 15.7% util.py: __get__ line 706: return self._filelog | 14.8% context.py: _filelog line 1416: result = self.func(obj) | 14.8% localrepo.py: file line 629: return self._repo.file(self... | 14.8% filelog.py: __init__ line 1134: return filelog.filelog(self... | 14.5% revlog.py: __init__ line 24: censorable=True)
author Matt Harbison <matt_harbison@yahoo.com>
date Fri, 24 Aug 2018 17:45:46 -0400
parents 812eb3b7dc43
children c17d73bf6a4d
line wrap: on
line source

# Read the output of a "svn log --xml" command on stdin, parse it and
# print a subset of attributes common to all svn versions tested by
# hg.
from __future__ import absolute_import
import sys
import xml.dom.minidom

def xmltext(e):
    return ''.join(c.data for c
                   in e.childNodes
                   if c.nodeType == c.TEXT_NODE)

def parseentry(entry):
    e = {}
    e['revision'] = entry.getAttribute('revision')
    e['author'] = xmltext(entry.getElementsByTagName('author')[0])
    e['msg'] = xmltext(entry.getElementsByTagName('msg')[0])
    e['paths'] = []
    paths = entry.getElementsByTagName('paths')
    if paths:
        paths = paths[0]
        for p in paths.getElementsByTagName('path'):
            action = p.getAttribute('action')
            path = xmltext(p)
            frompath = p.getAttribute('copyfrom-path')
            fromrev = p.getAttribute('copyfrom-rev')
            e['paths'].append((path, action, frompath, fromrev))
    return e

def parselog(data):
    entries = []
    doc = xml.dom.minidom.parseString(data)
    for e in doc.getElementsByTagName('logentry'):
        entries.append(parseentry(e))
    return entries

def printentries(entries):
    fp = sys.stdout
    for e in entries:
        for k in ('revision', 'author', 'msg'):
            fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8'))
        for path, action, fpath, frev in sorted(e['paths']):
            frominfo = ''
            if frev:
                frominfo = ' (from %s@%s)' % (fpath, frev)
            p = ' %s %s%s\n' % (action, path, frominfo)
            fp.write(p.encode('utf-8'))

if __name__ == '__main__':
    data = sys.stdin.read()
    entries = parselog(data)
    printentries(entries)