hgext/relink.py
author Augie Fackler <raf@durin42.com>
Fri, 17 Oct 2014 13:52:10 -0400
changeset 23031 3c0983cc279e
parent 21252 da0eb4970913
child 25186 80c5b2666a96
permissions -rw-r--r--
i18n: cache the result of every gettext call In looking at profiler output for 'hg log' on mozilla-central, I noticed we spent a _huge_ amount of time in gettext relative to what it's doing. Caching provides a roughly 15% performance improvement even on repositories as small as hg. == hg repo on linux == Before: % cumulative self time seconds seconds name 5.05 0.19 0.19 i18n.py:62:gettext 4.84 0.18 0.18 revlog.py:88:decompress 2.95 0.17 0.11 changelog.py:201:node 2.32 0.09 0.09 ui.py:577:write 2.11 0.08 0.08 i18n.py:72:gettext 2.11 0.08 0.08 obsolete.py:196:_fm0readmarkers 1.89 0.07 0.07 obsolete.py:569:_load 1.68 0.63 0.06 localrepo.py:29:__get__ real 0m4.026s user 0m3.993s sys 0m0.034s After: % cumulative self time seconds seconds name 8.05 0.26 0.26 revlog.py:88:decompress 2.68 0.22 0.09 color.py:395:write 2.20 0.07 0.07 obsolete.py:196:_fm0readmarkers 1.95 0.06 0.06 obsolete.py:174:_fm0readmarkers 1.95 0.06 0.06 ui.py:577:write 1.95 0.06 0.06 util.py:1228:datestr 1.71 0.06 0.06 utf_8.py:16:decode 1.71 0.06 0.06 revlog.py:273:__len__ real 0m3.519s user 0m3.447s sys 0m0.073s == mozilla-central repo on linux == Before: % cumulative self time seconds seconds name 7.72 2.35 2.35 revlog.py:88:decompress 4.46 1.36 1.36 i18n.py:62:gettext 2.22 0.67 0.67 i18n.py:72:gettext 2.19 1.14 0.67 changelog.py:201:node 2.16 0.66 0.66 ui.py:577:write 1.96 0.60 0.60 utf_8.py:16:decode 1.93 1.97 0.59 color.py:395:write 1.85 0.81 0.56 changelog.py:136:tip real 0m30.822s user 0m30.660s sys 0m0.149s After: % cumulative self time seconds seconds name 9.82 2.49 2.49 revlog.py:88:decompress 2.67 1.31 0.68 localrepo.py:29:__get__ 2.57 0.65 0.65 utf_8.py:16:decode 2.48 1.01 0.63 changelog.py:201:node 2.10 0.82 0.53 changelog.py:136:tip 2.01 0.51 0.51 ui.py:577:write 1.91 0.49 0.49 util.py:1232:datestr 1.85 1.65 0.47 color.py:395:write real 0m25.619s user 0m25.446s sys 0m0.166s == cpython repo on os x = Before: % cumulative self time seconds seconds name 5.05 1.35 1.35 cmdutil.py:982:_show 4.59 1.22 1.22 revlog.py:274:__len__ 3.98 1.06 1.06 i18n.py:62:gettext 3.91 1.04 1.04 revlog.py:1016:revision 3.68 0.98 0.98 revlog.py:337:parents 3.45 0.92 0.92 revlog.py:88:decompress 2.91 0.78 0.78 revlog.py:309:rev 2.62 0.70 0.70 revlog.py:1033:revision real 0m30.414s user 0m28.145s sys 0m0.541s After: % cumulative self time seconds seconds name 7.98 1.66 1.66 cmdutil.py:982:_show 6.83 1.42 1.42 changelog.py:46:decodeextra 5.18 1.08 1.08 revlog.py:274:__len__ 3.94 0.82 0.82 revlog.py:1016:revision 3.41 0.71 0.71 revlog.py:309:rev 3.32 0.69 0.69 revlog.py:88:decompress 2.99 0.63 0.62 revlog.py:1033:revision 2.69 0.56 0.56 revlog.py:341:start real 0m22.811s user 0m21.883s sys 0m0.397s

# Mercurial extension to provide 'hg relink' command
#
# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

"""recreates hardlinks between repository clones"""

from mercurial import cmdutil, hg, util
from mercurial.i18n import _
import os, stat

cmdtable = {}
command = cmdutil.command(cmdtable)
testedwith = 'internal'

@command('relink', [], _('[ORIGIN]'))
def relink(ui, repo, origin=None, **opts):
    """recreate hardlinks between two repositories

    When repositories are cloned locally, their data files will be
    hardlinked so that they only use the space of a single repository.

    Unfortunately, subsequent pulls into either repository will break
    hardlinks for any files touched by the new changesets, even if
    both repositories end up pulling the same changes.

    Similarly, passing --rev to "hg clone" will fail to use any
    hardlinks, falling back to a complete copy of the source
    repository.

    This command lets you recreate those hardlinks and reclaim that
    wasted space.

    This repository will be relinked to share space with ORIGIN, which
    must be on the same local disk. If ORIGIN is omitted, looks for
    "default-relink", then "default", in [paths].

    Do not attempt any read operations on this repository while the
    command is running. (Both repositories will be locked against
    writes.)
    """
    if (not util.safehasattr(util, 'samefile') or
        not util.safehasattr(util, 'samedevice')):
        raise util.Abort(_('hardlinks are not supported on this system'))
    src = hg.repository(repo.baseui, ui.expandpath(origin or 'default-relink',
                                          origin or 'default'))
    ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
    if repo.root == src.root:
        ui.status(_('there is nothing to relink\n'))
        return

    if not util.samedevice(src.store.path, repo.store.path):
        # No point in continuing
        raise util.Abort(_('source and destination are on different devices'))

    locallock = repo.lock()
    try:
        remotelock = src.lock()
        try:
            candidates = sorted(collect(src, ui))
            targets = prune(candidates, src.store.path, repo.store.path, ui)
            do_relink(src.store.path, repo.store.path, targets, ui)
        finally:
            remotelock.release()
    finally:
        locallock.release()

def collect(src, ui):
    seplen = len(os.path.sep)
    candidates = []
    live = len(src['tip'].manifest())
    # Your average repository has some files which were deleted before
    # the tip revision. We account for that by assuming that there are
    # 3 tracked files for every 2 live files as of the tip version of
    # the repository.
    #
    # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
    total = live * 3 // 2
    src = src.store.path
    pos = 0
    ui.status(_("tip has %d files, estimated total number of files: %s\n")
              % (live, total))
    for dirpath, dirnames, filenames in os.walk(src):
        dirnames.sort()
        relpath = dirpath[len(src) + seplen:]
        for filename in sorted(filenames):
            if filename[-2:] not in ('.d', '.i'):
                continue
            st = os.stat(os.path.join(dirpath, filename))
            if not stat.S_ISREG(st.st_mode):
                continue
            pos += 1
            candidates.append((os.path.join(relpath, filename), st))
            ui.progress(_('collecting'), pos, filename, _('files'), total)

    ui.progress(_('collecting'), None)
    ui.status(_('collected %d candidate storage files\n') % len(candidates))
    return candidates

def prune(candidates, src, dst, ui):
    def linkfilter(src, dst, st):
        try:
            ts = os.stat(dst)
        except OSError:
            # Destination doesn't have this file?
            return False
        if util.samefile(src, dst):
            return False
        if not util.samedevice(src, dst):
            # No point in continuing
            raise util.Abort(
                _('source and destination are on different devices'))
        if st.st_size != ts.st_size:
            return False
        return st

    targets = []
    total = len(candidates)
    pos = 0
    for fn, st in candidates:
        pos += 1
        srcpath = os.path.join(src, fn)
        tgt = os.path.join(dst, fn)
        ts = linkfilter(srcpath, tgt, st)
        if not ts:
            ui.debug('not linkable: %s\n' % fn)
            continue
        targets.append((fn, ts.st_size))
        ui.progress(_('pruning'), pos, fn, _('files'), total)

    ui.progress(_('pruning'), None)
    ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
    return targets

def do_relink(src, dst, files, ui):
    def relinkfile(src, dst):
        bak = dst + '.bak'
        os.rename(dst, bak)
        try:
            util.oslink(src, dst)
        except OSError:
            os.rename(bak, dst)
            raise
        os.remove(bak)

    CHUNKLEN = 65536
    relinked = 0
    savedbytes = 0

    pos = 0
    total = len(files)
    for f, sz in files:
        pos += 1
        source = os.path.join(src, f)
        tgt = os.path.join(dst, f)
        # Binary mode, so that read() works correctly, especially on Windows
        sfp = file(source, 'rb')
        dfp = file(tgt, 'rb')
        sin = sfp.read(CHUNKLEN)
        while sin:
            din = dfp.read(CHUNKLEN)
            if sin != din:
                break
            sin = sfp.read(CHUNKLEN)
        sfp.close()
        dfp.close()
        if sin:
            ui.debug('not linkable: %s\n' % f)
            continue
        try:
            relinkfile(source, tgt)
            ui.progress(_('relinking'), pos, f, _('files'), total)
            relinked += 1
            savedbytes += sz
        except OSError, inst:
            ui.warn('%s: %s\n' % (tgt, str(inst)))

    ui.progress(_('relinking'), None)

    ui.status(_('relinked %d files (%s reclaimed)\n') %
              (relinked, util.bytecount(savedbytes)))