view hgext/share.py @ 21883:87aa279f7073

largefiles: show also how many data entities are outgoing at "hg outgoing" Before this patch, "hg outgoing --large" shows which largefiles are changed or added in outgoing revisions only in the point of the view of filenames. For example, according to the list of outgoing largefiles shown in "hg outgoing" output, users should expect that the former below costs much more to upload outgoing largefiles than the latter. - outgoing revisions add a hundred largefiles, but all of them refer the same data entity in this case, only one data entity is outgoing, even though "hg summary" says that a hundred largefiles are outgoing. - a hundred outgoing revisions change only one largefile with distinct data in this case, a hundred data entities are outgoing, even though "hg summary" says that only one largefile is outgoing. But the latter costs much more than the former, in fact. This patch shows also how many data entities are outgoing at "hg outgoing" by counting number of unique hash values for outgoing largefiles. When "--debug" is specified, this patch also shows what entities (in hash) are outgoing for each largefiles listed up, for debug purpose. In "ui.debugflag" route, "addfunc()" can append given "lfhash" to the list "toupload[fn]" always without duplication check, because de-duplication is already done in "_getoutgoings()".
author FUJIWARA Katsunori <foozy@lares.dti.ne.jp>
date Mon, 07 Jul 2014 18:45:46 +0900
parents 5a4d1a6c605f
children 141baca16059
line wrap: on
line source

# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

'''share a common history between several working directories'''

from mercurial.i18n import _
from mercurial import cmdutil, hg, util

cmdtable = {}
command = cmdutil.command(cmdtable)
testedwith = 'internal'

@command('share',
    [('U', 'noupdate', None, _('do not create a working copy'))],
    _('[-U] SOURCE [DEST]'),
    norepo=True)
def share(ui, source, dest=None, noupdate=False):
    """create a new shared repository

    Initialize a new repository and working directory that shares its
    history with another repository.

    .. note::

       using rollback or extensions that destroy/modify history (mq,
       rebase, etc.) can cause considerable confusion with shared
       clones. In particular, if two shared clones are both updated to
       the same changeset, and one of them destroys that changeset
       with rollback, the other clone will suddenly stop working: all
       operations will fail with "abort: working directory has unknown
       parent". The only known workaround is to use debugsetparents on
       the broken clone to reset it to a changeset that still exists.
    """

    return hg.share(ui, source, dest, not noupdate)

@command('unshare', [], '')
def unshare(ui, repo):
    """convert a shared repository to a normal one

    Copy the store data to the repo and remove the sharedpath data.
    """

    if repo.sharedpath == repo.path:
        raise util.Abort(_("this is not a shared repo"))

    destlock = lock = None
    lock = repo.lock()
    try:
        # we use locks here because if we race with commit, we
        # can end up with extra data in the cloned revlogs that's
        # not pointed to by changesets, thus causing verify to
        # fail

        destlock = hg.copystore(ui, repo, repo.path)

        sharefile = repo.join('sharedpath')
        util.rename(sharefile, sharefile + '.old')

        repo.requirements.discard('sharedpath')
        repo._writerequirements()
    finally:
        destlock and destlock.release()
        lock and lock.release()

    # update store, spath, sopener and sjoin of repo
    repo.unfiltered().__init__(repo.baseui, repo.root)