hgext/infinitepush/__init__.py
changeset 37189 03ff17a4bf53
child 37190 de4c2f3af97f
equal deleted inserted replaced
37188:6d43b39fbaa0 37189:03ff17a4bf53
       
     1 # Infinite push
       
     2 #
       
     3 # Copyright 2016 Facebook, Inc.
       
     4 #
       
     5 # This software may be used and distributed according to the terms of the
       
     6 # GNU General Public License version 2 or any later version.
       
     7 """ store some pushes in a remote blob store on the server (EXPERIMENTAL)
       
     8 
       
     9     [infinitepush]
       
    10     # Server-side and client-side option. Pattern of the infinitepush bookmark
       
    11     branchpattern = PATTERN
       
    12 
       
    13     # Server or client
       
    14     server = False
       
    15 
       
    16     # Server-side option. Possible values: 'disk' or 'sql'. Fails if not set
       
    17     indextype = disk
       
    18 
       
    19     # Server-side option. Used only if indextype=sql.
       
    20     # Format: 'IP:PORT:DB_NAME:USER:PASSWORD'
       
    21     sqlhost = IP:PORT:DB_NAME:USER:PASSWORD
       
    22 
       
    23     # Server-side option. Used only if indextype=disk.
       
    24     # Filesystem path to the index store
       
    25     indexpath = PATH
       
    26 
       
    27     # Server-side option. Possible values: 'disk' or 'external'
       
    28     # Fails if not set
       
    29     storetype = disk
       
    30 
       
    31     # Server-side option.
       
    32     # Path to the binary that will save bundle to the bundlestore
       
    33     # Formatted cmd line will be passed to it (see `put_args`)
       
    34     put_binary = put
       
    35 
       
    36     # Serser-side option. Used only if storetype=external.
       
    37     # Format cmd-line string for put binary. Placeholder: {filename}
       
    38     put_args = {filename}
       
    39 
       
    40     # Server-side option.
       
    41     # Path to the binary that get bundle from the bundlestore.
       
    42     # Formatted cmd line will be passed to it (see `get_args`)
       
    43     get_binary = get
       
    44 
       
    45     # Serser-side option. Used only if storetype=external.
       
    46     # Format cmd-line string for get binary. Placeholders: {filename} {handle}
       
    47     get_args = {filename} {handle}
       
    48 
       
    49     # Server-side option
       
    50     logfile = FIlE
       
    51 
       
    52     # Server-side option
       
    53     loglevel = DEBUG
       
    54 
       
    55     # Server-side option. Used only if indextype=sql.
       
    56     # Sets mysql wait_timeout option.
       
    57     waittimeout = 300
       
    58 
       
    59     # Server-side option. Used only if indextype=sql.
       
    60     # Sets mysql innodb_lock_wait_timeout option.
       
    61     locktimeout = 120
       
    62 
       
    63     # Server-side option. Used only if indextype=sql.
       
    64     # Name of the repository
       
    65     reponame = ''
       
    66 
       
    67     # Client-side option. Used by --list-remote option. List of remote scratch
       
    68     # patterns to list if no patterns are specified.
       
    69     defaultremotepatterns = ['*']
       
    70 
       
    71     # Server-side option. If bookmark that was pushed matches
       
    72     # `fillmetadatabranchpattern` then background
       
    73     # `hg debugfillinfinitepushmetadata` process will save metadata
       
    74     # in infinitepush index for nodes that are ancestor of the bookmark.
       
    75     fillmetadatabranchpattern = ''
       
    76 
       
    77     # Instructs infinitepush to forward all received bundle2 parts to the
       
    78     # bundle for storage. Defaults to False.
       
    79     storeallparts = True
       
    80 
       
    81     [remotenames]
       
    82     # Client-side option
       
    83     # This option should be set only if remotenames extension is enabled.
       
    84     # Whether remote bookmarks are tracked by remotenames extension.
       
    85     bookmarks = True
       
    86 """
       
    87 
       
    88 from __future__ import absolute_import
       
    89 
       
    90 import collections
       
    91 import contextlib
       
    92 import errno
       
    93 import functools
       
    94 import json
       
    95 import logging
       
    96 import os
       
    97 import random
       
    98 import re
       
    99 import socket
       
   100 import struct
       
   101 import subprocess
       
   102 import sys
       
   103 import tempfile
       
   104 import time
       
   105 
       
   106 from mercurial.node import (
       
   107     bin,
       
   108     hex,
       
   109 )
       
   110 
       
   111 from mercurial.i18n import _
       
   112 
       
   113 from mercurial import (
       
   114     bundle2,
       
   115     changegroup,
       
   116     commands,
       
   117     discovery,
       
   118     encoding,
       
   119     error,
       
   120     exchange,
       
   121     extensions,
       
   122     hg,
       
   123     localrepo,
       
   124     peer,
       
   125     phases,
       
   126     pushkey,
       
   127     registrar,
       
   128     util,
       
   129     wireproto,
       
   130 )
       
   131 
       
   132 from . import (
       
   133     backupcommands,
       
   134     bundleparts,
       
   135     common,
       
   136     infinitepushcommands,
       
   137 )
       
   138 
       
   139 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
       
   140 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
       
   141 # be specifying the version(s) of Mercurial they are tested with, or
       
   142 # leave the attribute unspecified.
       
   143 testedwith = 'ships-with-hg-core'
       
   144 
       
   145 configtable = {}
       
   146 configitem = registrar.configitem(configtable)
       
   147 
       
   148 configitem('infinitepush', 'server',
       
   149     default=False,
       
   150 )
       
   151 configitem('infinitepush', 'storetype',
       
   152     default='',
       
   153 )
       
   154 configitem('infinitepush', 'indextype',
       
   155     default='',
       
   156 )
       
   157 configitem('infinitepush', 'indexpath',
       
   158     default='',
       
   159 )
       
   160 configitem('infinitepush', 'fillmetadatabranchpattern',
       
   161     default='',
       
   162 )
       
   163 configitem('infinitepush', 'storeallparts',
       
   164     default=False,
       
   165 )
       
   166 configitem('infinitepush', 'reponame',
       
   167     default='',
       
   168 )
       
   169 configitem('infinitepush', 'bundle-stream',
       
   170     default=False,
       
   171 )
       
   172 configitem('scratchbranch', 'storepath',
       
   173     default='',
       
   174 )
       
   175 configitem('infinitepush', 'branchpattern',
       
   176     default='',
       
   177 )
       
   178 configitem('infinitepush', 'metadatafilelimit',
       
   179     default=100,
       
   180 )
       
   181 configitem('infinitepushbackup', 'autobackup',
       
   182     default=False,
       
   183 )
       
   184 configitem('experimental', 'server-bundlestore-bookmark',
       
   185     default='',
       
   186 )
       
   187 configitem('experimental', 'server-bundlestore-create',
       
   188     default='',
       
   189 )
       
   190 configitem('experimental', 'infinitepush-scratchpush',
       
   191     default=False,
       
   192 )
       
   193 configitem('experimental', 'non-forward-move',
       
   194     default=False,
       
   195 )
       
   196 
       
   197 pushrebaseparttype = 'b2x:rebase'
       
   198 experimental = 'experimental'
       
   199 configbookmark = 'server-bundlestore-bookmark'
       
   200 configcreate = 'server-bundlestore-create'
       
   201 configscratchpush = 'infinitepush-scratchpush'
       
   202 confignonforwardmove = 'non-forward-move'
       
   203 
       
   204 scratchbranchparttype = bundleparts.scratchbranchparttype
       
   205 cmdtable = infinitepushcommands.cmdtable
       
   206 revsetpredicate = backupcommands.revsetpredicate
       
   207 templatekeyword = backupcommands.templatekeyword
       
   208 _scratchbranchmatcher = lambda x: False
       
   209 _maybehash = re.compile(r'^[a-f0-9]+$').search
       
   210 
       
   211 def _buildexternalbundlestore(ui):
       
   212     put_args = ui.configlist('infinitepush', 'put_args', [])
       
   213     put_binary = ui.config('infinitepush', 'put_binary')
       
   214     if not put_binary:
       
   215         raise error.Abort('put binary is not specified')
       
   216     get_args = ui.configlist('infinitepush', 'get_args', [])
       
   217     get_binary = ui.config('infinitepush', 'get_binary')
       
   218     if not get_binary:
       
   219         raise error.Abort('get binary is not specified')
       
   220     from . import store
       
   221     return store.externalbundlestore(put_binary, put_args, get_binary, get_args)
       
   222 
       
   223 def _buildsqlindex(ui):
       
   224     sqlhost = ui.config('infinitepush', 'sqlhost')
       
   225     if not sqlhost:
       
   226         raise error.Abort(_('please set infinitepush.sqlhost'))
       
   227     host, port, db, user, password = sqlhost.split(':')
       
   228     reponame = ui.config('infinitepush', 'reponame')
       
   229     if not reponame:
       
   230         raise error.Abort(_('please set infinitepush.reponame'))
       
   231 
       
   232     logfile = ui.config('infinitepush', 'logfile', '')
       
   233     waittimeout = ui.configint('infinitepush', 'waittimeout', 300)
       
   234     locktimeout = ui.configint('infinitepush', 'locktimeout', 120)
       
   235     from . import sqlindexapi
       
   236     return sqlindexapi.sqlindexapi(
       
   237         reponame, host, port, db, user, password,
       
   238         logfile, _getloglevel(ui), waittimeout=waittimeout,
       
   239         locktimeout=locktimeout)
       
   240 
       
   241 def _getloglevel(ui):
       
   242     loglevel = ui.config('infinitepush', 'loglevel', 'DEBUG')
       
   243     numeric_loglevel = getattr(logging, loglevel.upper(), None)
       
   244     if not isinstance(numeric_loglevel, int):
       
   245         raise error.Abort(_('invalid log level %s') % loglevel)
       
   246     return numeric_loglevel
       
   247 
       
   248 def _tryhoist(ui, remotebookmark):
       
   249     '''returns a bookmarks with hoisted part removed
       
   250 
       
   251     Remotenames extension has a 'hoist' config that allows to use remote
       
   252     bookmarks without specifying remote path. For example, 'hg update master'
       
   253     works as well as 'hg update remote/master'. We want to allow the same in
       
   254     infinitepush.
       
   255     '''
       
   256 
       
   257     if common.isremotebooksenabled(ui):
       
   258         hoist = ui.config('remotenames', 'hoist') + '/'
       
   259         if remotebookmark.startswith(hoist):
       
   260             return remotebookmark[len(hoist):]
       
   261     return remotebookmark
       
   262 
       
   263 class bundlestore(object):
       
   264     def __init__(self, repo):
       
   265         self._repo = repo
       
   266         storetype = self._repo.ui.config('infinitepush', 'storetype', '')
       
   267         if storetype == 'disk':
       
   268             from . import store
       
   269             self.store = store.filebundlestore(self._repo.ui, self._repo)
       
   270         elif storetype == 'external':
       
   271             self.store = _buildexternalbundlestore(self._repo.ui)
       
   272         else:
       
   273             raise error.Abort(
       
   274                 _('unknown infinitepush store type specified %s') % storetype)
       
   275 
       
   276         indextype = self._repo.ui.config('infinitepush', 'indextype', '')
       
   277         if indextype == 'disk':
       
   278             from . import fileindexapi
       
   279             self.index = fileindexapi.fileindexapi(self._repo)
       
   280         elif indextype == 'sql':
       
   281             self.index = _buildsqlindex(self._repo.ui)
       
   282         else:
       
   283             raise error.Abort(
       
   284                 _('unknown infinitepush index type specified %s') % indextype)
       
   285 
       
   286 def _isserver(ui):
       
   287     return ui.configbool('infinitepush', 'server')
       
   288 
       
   289 def reposetup(ui, repo):
       
   290     if _isserver(ui) and repo.local():
       
   291         repo.bundlestore = bundlestore(repo)
       
   292 
       
   293 def uisetup(ui):
       
   294     # remotenames circumvents the default push implementation entirely, so make
       
   295     # sure we load after it so that we wrap it.
       
   296     order = extensions._order
       
   297     order.remove('infinitepush')
       
   298     order.append('infinitepush')
       
   299     extensions._order = order
       
   300 
       
   301 def extsetup(ui):
       
   302     # Allow writing backup files outside the normal lock
       
   303     localrepo.localrepository._wlockfreeprefix.update([
       
   304         backupcommands._backupstatefile,
       
   305         backupcommands._backupgenerationfile,
       
   306         backupcommands._backuplatestinfofile,
       
   307     ])
       
   308 
       
   309     commonsetup(ui)
       
   310     if _isserver(ui):
       
   311         serverextsetup(ui)
       
   312     else:
       
   313         clientextsetup(ui)
       
   314 
       
   315 def commonsetup(ui):
       
   316     wireproto.commands['listkeyspatterns'] = (
       
   317         wireprotolistkeyspatterns, 'namespace patterns')
       
   318     scratchbranchpat = ui.config('infinitepush', 'branchpattern')
       
   319     if scratchbranchpat:
       
   320         global _scratchbranchmatcher
       
   321         kind, pat, _scratchbranchmatcher = util.stringmatcher(scratchbranchpat)
       
   322 
       
   323 def serverextsetup(ui):
       
   324     origpushkeyhandler = bundle2.parthandlermapping['pushkey']
       
   325 
       
   326     def newpushkeyhandler(*args, **kwargs):
       
   327         bundle2pushkey(origpushkeyhandler, *args, **kwargs)
       
   328     newpushkeyhandler.params = origpushkeyhandler.params
       
   329     bundle2.parthandlermapping['pushkey'] = newpushkeyhandler
       
   330 
       
   331     orighandlephasehandler = bundle2.parthandlermapping['phase-heads']
       
   332     newphaseheadshandler = lambda *args, **kwargs: \
       
   333         bundle2handlephases(orighandlephasehandler, *args, **kwargs)
       
   334     newphaseheadshandler.params = orighandlephasehandler.params
       
   335     bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler
       
   336 
       
   337     extensions.wrapfunction(localrepo.localrepository, 'listkeys',
       
   338                             localrepolistkeys)
       
   339     wireproto.commands['lookup'] = (
       
   340         _lookupwrap(wireproto.commands['lookup'][0]), 'key')
       
   341     extensions.wrapfunction(exchange, 'getbundlechunks', getbundlechunks)
       
   342 
       
   343     extensions.wrapfunction(bundle2, 'processparts', processparts)
       
   344 
       
   345 def clientextsetup(ui):
       
   346     entry = extensions.wrapcommand(commands.table, 'push', _push)
       
   347     # Don't add the 'to' arg if it already exists
       
   348     if not any(a for a in entry[1] if a[1] == 'to'):
       
   349         entry[1].append(('', 'to', '', _('push revs to this bookmark')))
       
   350 
       
   351     if not any(a for a in entry[1] if a[1] == 'non-forward-move'):
       
   352         entry[1].append(('', 'non-forward-move', None,
       
   353                          _('allows moving a remote bookmark to an '
       
   354                            'arbitrary place')))
       
   355 
       
   356     if not any(a for a in entry[1] if a[1] == 'create'):
       
   357         entry[1].append(
       
   358             ('', 'create', None, _('create a new remote bookmark')))
       
   359 
       
   360     entry[1].append(
       
   361         ('', 'bundle-store', None,
       
   362          _('force push to go to bundle store (EXPERIMENTAL)')))
       
   363 
       
   364     bookcmd = extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks)
       
   365     bookcmd[1].append(
       
   366         ('', 'list-remote', None,
       
   367          'list remote bookmarks. '
       
   368          'Positional arguments are interpreted as wildcard patterns. '
       
   369          'Only allowed wildcard is \'*\' in the end of the pattern. '
       
   370          'If no positional arguments are specified then it will list '
       
   371          'the most "important" remote bookmarks. '
       
   372          'Otherwise it will list remote bookmarks '
       
   373          'that match at least one pattern '
       
   374          ''))
       
   375     bookcmd[1].append(
       
   376         ('', 'remote-path', '',
       
   377          'name of the remote path to list the bookmarks'))
       
   378 
       
   379     extensions.wrapcommand(commands.table, 'pull', _pull)
       
   380     extensions.wrapcommand(commands.table, 'update', _update)
       
   381 
       
   382     extensions.wrapfunction(discovery, 'checkheads', _checkheads)
       
   383     extensions.wrapfunction(bundle2, '_addpartsfromopts', _addpartsfromopts)
       
   384 
       
   385     wireproto.wirepeer.listkeyspatterns = listkeyspatterns
       
   386 
       
   387     # Move infinitepush part before pushrebase part
       
   388     # to avoid generation of both parts.
       
   389     partorder = exchange.b2partsgenorder
       
   390     index = partorder.index('changeset')
       
   391     if pushrebaseparttype in partorder:
       
   392         index = min(index, partorder.index(pushrebaseparttype))
       
   393     partorder.insert(
       
   394         index, partorder.pop(partorder.index(scratchbranchparttype)))
       
   395 
       
   396     def wrapsmartlog(loaded):
       
   397         if not loaded:
       
   398             return
       
   399         smartlogmod = extensions.find('smartlog')
       
   400         extensions.wrapcommand(smartlogmod.cmdtable, 'smartlog', _smartlog)
       
   401     extensions.afterloaded('smartlog', wrapsmartlog)
       
   402     backupcommands.extsetup(ui)
       
   403 
       
   404 def _smartlog(orig, ui, repo, **opts):
       
   405     res = orig(ui, repo, **opts)
       
   406     backupcommands.smartlogsummary(ui, repo)
       
   407     return res
       
   408 
       
   409 def _showbookmarks(ui, bookmarks, **opts):
       
   410     # Copy-paste from commands.py
       
   411     fm = ui.formatter('bookmarks', opts)
       
   412     for bmark, n in sorted(bookmarks.iteritems()):
       
   413         fm.startitem()
       
   414         if not ui.quiet:
       
   415             fm.plain('   ')
       
   416         fm.write('bookmark', '%s', bmark)
       
   417         pad = ' ' * (25 - encoding.colwidth(bmark))
       
   418         fm.condwrite(not ui.quiet, 'node', pad + ' %s', n)
       
   419         fm.plain('\n')
       
   420     fm.end()
       
   421 
       
   422 def exbookmarks(orig, ui, repo, *names, **opts):
       
   423     pattern = opts.get('list_remote')
       
   424     delete = opts.get('delete')
       
   425     remotepath = opts.get('remote_path')
       
   426     path = ui.paths.getpath(remotepath or None, default=('default'))
       
   427     if pattern:
       
   428         destpath = path.pushloc or path.loc
       
   429         other = hg.peer(repo, opts, destpath)
       
   430         if not names:
       
   431             raise error.Abort(
       
   432                 '--list-remote requires a bookmark pattern',
       
   433                 hint='use "hg book" to get a list of your local bookmarks')
       
   434         else:
       
   435             fetchedbookmarks = other.listkeyspatterns('bookmarks',
       
   436                                                       patterns=names)
       
   437         _showbookmarks(ui, fetchedbookmarks, **opts)
       
   438         return
       
   439     elif delete and 'remotenames' in extensions._extensions:
       
   440         existing_local_bms = set(repo._bookmarks.keys())
       
   441         scratch_bms = []
       
   442         other_bms = []
       
   443         for name in names:
       
   444             if _scratchbranchmatcher(name) and name not in existing_local_bms:
       
   445                 scratch_bms.append(name)
       
   446             else:
       
   447                 other_bms.append(name)
       
   448 
       
   449         if len(scratch_bms) > 0:
       
   450             if remotepath == '':
       
   451                 remotepath = 'default'
       
   452             _deleteinfinitepushbookmarks(ui,
       
   453                                          repo,
       
   454                                          remotepath,
       
   455                                          scratch_bms)
       
   456 
       
   457         if len(other_bms) > 0 or len(scratch_bms) == 0:
       
   458             return orig(ui, repo, *other_bms, **opts)
       
   459     else:
       
   460         return orig(ui, repo, *names, **opts)
       
   461 
       
   462 def _checkheads(orig, pushop):
       
   463     if pushop.ui.configbool(experimental, configscratchpush, False):
       
   464         return
       
   465     return orig(pushop)
       
   466 
       
   467 def _addpartsfromopts(orig, ui, repo, bundler, *args, **kwargs):
       
   468     """ adds a stream level part to bundle2 storing whether this is an
       
   469     infinitepush bundle or not
       
   470     This functionality is hidden behind a config option:
       
   471 
       
   472     [infinitepush]
       
   473     bundle-stream = True
       
   474     """
       
   475     if ui.configbool('infinitepush', 'bundle-stream', False):
       
   476         bundler.addparam('infinitepush', True)
       
   477     return orig(ui, repo, bundler, *args, **kwargs)
       
   478 
       
   479 def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
       
   480     patterns = wireproto.decodelist(patterns)
       
   481     d = repo.listkeys(encoding.tolocal(namespace), patterns).iteritems()
       
   482     return pushkey.encodekeys(d)
       
   483 
       
   484 def localrepolistkeys(orig, self, namespace, patterns=None):
       
   485     if namespace == 'bookmarks' and patterns:
       
   486         index = self.bundlestore.index
       
   487         results = {}
       
   488         bookmarks = orig(self, namespace)
       
   489         for pattern in patterns:
       
   490             results.update(index.getbookmarks(pattern))
       
   491             if pattern.endswith('*'):
       
   492                 pattern = 're:^' + pattern[:-1] + '.*'
       
   493             kind, pat, matcher = util.stringmatcher(pattern)
       
   494             for bookmark, node in bookmarks.iteritems():
       
   495                 if matcher(bookmark):
       
   496                     results[bookmark] = node
       
   497         return results
       
   498     else:
       
   499         return orig(self, namespace)
       
   500 
       
   501 @peer.batchable
       
   502 def listkeyspatterns(self, namespace, patterns):
       
   503     if not self.capable('pushkey'):
       
   504         yield {}, None
       
   505     f = peer.future()
       
   506     self.ui.debug('preparing listkeys for "%s" with pattern "%s"\n' %
       
   507                   (namespace, patterns))
       
   508     yield {
       
   509         'namespace': encoding.fromlocal(namespace),
       
   510         'patterns': wireproto.encodelist(patterns)
       
   511     }, f
       
   512     d = f.value
       
   513     self.ui.debug('received listkey for "%s": %i bytes\n'
       
   514                   % (namespace, len(d)))
       
   515     yield pushkey.decodekeys(d)
       
   516 
       
   517 def _readbundlerevs(bundlerepo):
       
   518     return list(bundlerepo.revs('bundle()'))
       
   519 
       
   520 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
       
   521     '''Tells remotefilelog to include all changed files to the changegroup
       
   522 
       
   523     By default remotefilelog doesn't include file content to the changegroup.
       
   524     But we need to include it if we are fetching from bundlestore.
       
   525     '''
       
   526     changedfiles = set()
       
   527     cl = bundlerepo.changelog
       
   528     for r in bundlerevs:
       
   529         # [3] means changed files
       
   530         changedfiles.update(cl.read(r)[3])
       
   531     if not changedfiles:
       
   532         return bundlecaps
       
   533 
       
   534     changedfiles = '\0'.join(changedfiles)
       
   535     newcaps = []
       
   536     appended = False
       
   537     for cap in (bundlecaps or []):
       
   538         if cap.startswith('excludepattern='):
       
   539             newcaps.append('\0'.join((cap, changedfiles)))
       
   540             appended = True
       
   541         else:
       
   542             newcaps.append(cap)
       
   543     if not appended:
       
   544         # Not found excludepattern cap. Just append it
       
   545         newcaps.append('excludepattern=' + changedfiles)
       
   546 
       
   547     return newcaps
       
   548 
       
   549 def _rebundle(bundlerepo, bundleroots, unknownhead):
       
   550     '''
       
   551     Bundle may include more revision then user requested. For example,
       
   552     if user asks for revision but bundle also consists its descendants.
       
   553     This function will filter out all revision that user is not requested.
       
   554     '''
       
   555     parts = []
       
   556 
       
   557     version = '02'
       
   558     outgoing = discovery.outgoing(bundlerepo, commonheads=bundleroots,
       
   559                                   missingheads=[unknownhead])
       
   560     cgstream = changegroup.makestream(bundlerepo, outgoing, version, 'pull')
       
   561     cgstream = util.chunkbuffer(cgstream).read()
       
   562     cgpart = bundle2.bundlepart('changegroup', data=cgstream)
       
   563     cgpart.addparam('version', version)
       
   564     parts.append(cgpart)
       
   565 
       
   566     try:
       
   567         treemod = extensions.find('treemanifest')
       
   568     except KeyError:
       
   569         pass
       
   570     else:
       
   571         if treemod._cansendtrees(bundlerepo, outgoing.missing):
       
   572             treepart = treemod.createtreepackpart(bundlerepo, outgoing,
       
   573                                                   treemod.TREEGROUP_PARTTYPE2)
       
   574             parts.append(treepart)
       
   575 
       
   576     return parts
       
   577 
       
   578 def _getbundleroots(oldrepo, bundlerepo, bundlerevs):
       
   579     cl = bundlerepo.changelog
       
   580     bundleroots = []
       
   581     for rev in bundlerevs:
       
   582         node = cl.node(rev)
       
   583         parents = cl.parents(node)
       
   584         for parent in parents:
       
   585             # include all revs that exist in the main repo
       
   586             # to make sure that bundle may apply client-side
       
   587             if parent in oldrepo:
       
   588                 bundleroots.append(parent)
       
   589     return bundleroots
       
   590 
       
   591 def _needsrebundling(head, bundlerepo):
       
   592     bundleheads = list(bundlerepo.revs('heads(bundle())'))
       
   593     return not (len(bundleheads) == 1 and
       
   594                 bundlerepo[bundleheads[0]].node() == head)
       
   595 
       
   596 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
       
   597     '''generates bundle that will be send to the user
       
   598 
       
   599     returns tuple with raw bundle string and bundle type
       
   600     '''
       
   601     parts = []
       
   602     if not _needsrebundling(head, bundlerepo):
       
   603         with util.posixfile(bundlefile, "rb") as f:
       
   604             unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile)
       
   605             if isinstance(unbundler, changegroup.cg1unpacker):
       
   606                 part = bundle2.bundlepart('changegroup',
       
   607                                           data=unbundler._stream.read())
       
   608                 part.addparam('version', '01')
       
   609                 parts.append(part)
       
   610             elif isinstance(unbundler, bundle2.unbundle20):
       
   611                 haschangegroup = False
       
   612                 for part in unbundler.iterparts():
       
   613                     if part.type == 'changegroup':
       
   614                         haschangegroup = True
       
   615                     newpart = bundle2.bundlepart(part.type, data=part.read())
       
   616                     for key, value in part.params.iteritems():
       
   617                         newpart.addparam(key, value)
       
   618                     parts.append(newpart)
       
   619 
       
   620                 if not haschangegroup:
       
   621                     raise error.Abort(
       
   622                         'unexpected bundle without changegroup part, ' +
       
   623                         'head: %s' % hex(head),
       
   624                         hint='report to administrator')
       
   625             else:
       
   626                 raise error.Abort('unknown bundle type')
       
   627     else:
       
   628         parts = _rebundle(bundlerepo, bundleroots, head)
       
   629 
       
   630     return parts
       
   631 
       
   632 def getbundlechunks(orig, repo, source, heads=None, bundlecaps=None, **kwargs):
       
   633     heads = heads or []
       
   634     # newheads are parents of roots of scratch bundles that were requested
       
   635     newphases = {}
       
   636     scratchbundles = []
       
   637     newheads = []
       
   638     scratchheads = []
       
   639     nodestobundle = {}
       
   640     allbundlestocleanup = []
       
   641     try:
       
   642         for head in heads:
       
   643             if head not in repo.changelog.nodemap:
       
   644                 if head not in nodestobundle:
       
   645                     newbundlefile = common.downloadbundle(repo, head)
       
   646                     bundlepath = "bundle:%s+%s" % (repo.root, newbundlefile)
       
   647                     bundlerepo = hg.repository(repo.ui, bundlepath)
       
   648 
       
   649                     allbundlestocleanup.append((bundlerepo, newbundlefile))
       
   650                     bundlerevs = set(_readbundlerevs(bundlerepo))
       
   651                     bundlecaps = _includefilelogstobundle(
       
   652                         bundlecaps, bundlerepo, bundlerevs, repo.ui)
       
   653                     cl = bundlerepo.changelog
       
   654                     bundleroots = _getbundleroots(repo, bundlerepo, bundlerevs)
       
   655                     for rev in bundlerevs:
       
   656                         node = cl.node(rev)
       
   657                         newphases[hex(node)] = str(phases.draft)
       
   658                         nodestobundle[node] = (bundlerepo, bundleroots,
       
   659                                                newbundlefile)
       
   660 
       
   661                 scratchbundles.append(
       
   662                     _generateoutputparts(head, *nodestobundle[head]))
       
   663                 newheads.extend(bundleroots)
       
   664                 scratchheads.append(head)
       
   665     finally:
       
   666         for bundlerepo, bundlefile in allbundlestocleanup:
       
   667             bundlerepo.close()
       
   668             try:
       
   669                 os.unlink(bundlefile)
       
   670             except (IOError, OSError):
       
   671                 # if we can't cleanup the file then just ignore the error,
       
   672                 # no need to fail
       
   673                 pass
       
   674 
       
   675     pullfrombundlestore = bool(scratchbundles)
       
   676     wrappedchangegrouppart = False
       
   677     wrappedlistkeys = False
       
   678     oldchangegrouppart = exchange.getbundle2partsmapping['changegroup']
       
   679     try:
       
   680         def _changegrouppart(bundler, *args, **kwargs):
       
   681             # Order is important here. First add non-scratch part
       
   682             # and only then add parts with scratch bundles because
       
   683             # non-scratch part contains parents of roots of scratch bundles.
       
   684             result = oldchangegrouppart(bundler, *args, **kwargs)
       
   685             for bundle in scratchbundles:
       
   686                 for part in bundle:
       
   687                     bundler.addpart(part)
       
   688             return result
       
   689 
       
   690         exchange.getbundle2partsmapping['changegroup'] = _changegrouppart
       
   691         wrappedchangegrouppart = True
       
   692 
       
   693         def _listkeys(orig, self, namespace):
       
   694             origvalues = orig(self, namespace)
       
   695             if namespace == 'phases' and pullfrombundlestore:
       
   696                 if origvalues.get('publishing') == 'True':
       
   697                     # Make repo non-publishing to preserve draft phase
       
   698                     del origvalues['publishing']
       
   699                 origvalues.update(newphases)
       
   700             return origvalues
       
   701 
       
   702         extensions.wrapfunction(localrepo.localrepository, 'listkeys',
       
   703                                 _listkeys)
       
   704         wrappedlistkeys = True
       
   705         heads = list((set(newheads) | set(heads)) - set(scratchheads))
       
   706         result = orig(repo, source, heads=heads,
       
   707                       bundlecaps=bundlecaps, **kwargs)
       
   708     finally:
       
   709         if wrappedchangegrouppart:
       
   710             exchange.getbundle2partsmapping['changegroup'] = oldchangegrouppart
       
   711         if wrappedlistkeys:
       
   712             extensions.unwrapfunction(localrepo.localrepository, 'listkeys',
       
   713                                       _listkeys)
       
   714     return result
       
   715 
       
   716 def _lookupwrap(orig):
       
   717     def _lookup(repo, proto, key):
       
   718         localkey = encoding.tolocal(key)
       
   719 
       
   720         if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
       
   721             scratchnode = repo.bundlestore.index.getnode(localkey)
       
   722             if scratchnode:
       
   723                 return "%s %s\n" % (1, scratchnode)
       
   724             else:
       
   725                 return "%s %s\n" % (0, 'scratch branch %s not found' % localkey)
       
   726         else:
       
   727             try:
       
   728                 r = hex(repo.lookup(localkey))
       
   729                 return "%s %s\n" % (1, r)
       
   730             except Exception as inst:
       
   731                 if repo.bundlestore.index.getbundle(localkey):
       
   732                     return "%s %s\n" % (1, localkey)
       
   733                 else:
       
   734                     r = str(inst)
       
   735                     return "%s %s\n" % (0, r)
       
   736     return _lookup
       
   737 
       
   738 def _decodebookmarks(stream):
       
   739     sizeofjsonsize = struct.calcsize('>i')
       
   740     size = struct.unpack('>i', stream.read(sizeofjsonsize))[0]
       
   741     unicodedict = json.loads(stream.read(size))
       
   742     # python json module always returns unicode strings. We need to convert
       
   743     # it back to bytes string
       
   744     result = {}
       
   745     for bookmark, node in unicodedict.iteritems():
       
   746         bookmark = bookmark.encode('ascii')
       
   747         node = node.encode('ascii')
       
   748         result[bookmark] = node
       
   749     return result
       
   750 
       
   751 def _update(orig, ui, repo, node=None, rev=None, **opts):
       
   752     if rev and node:
       
   753         raise error.Abort(_("please specify just one revision"))
       
   754 
       
   755     if not opts.get('date') and (rev or node) not in repo:
       
   756         mayberemote = rev or node
       
   757         mayberemote = _tryhoist(ui, mayberemote)
       
   758         dopull = False
       
   759         kwargs = {}
       
   760         if _scratchbranchmatcher(mayberemote):
       
   761             dopull = True
       
   762             kwargs['bookmark'] = [mayberemote]
       
   763         elif len(mayberemote) == 40 and _maybehash(mayberemote):
       
   764             dopull = True
       
   765             kwargs['rev'] = [mayberemote]
       
   766 
       
   767         if dopull:
       
   768             ui.warn(
       
   769                 _("'%s' does not exist locally - looking for it " +
       
   770                   "remotely...\n") % mayberemote)
       
   771             # Try pulling node from remote repo
       
   772             try:
       
   773                 cmdname = '^pull'
       
   774                 pullcmd = commands.table[cmdname][0]
       
   775                 pullopts = dict(opt[1:3] for opt in commands.table[cmdname][1])
       
   776                 pullopts.update(kwargs)
       
   777                 pullcmd(ui, repo, **pullopts)
       
   778             except Exception:
       
   779                 ui.warn(_('pull failed: %s\n') % sys.exc_info()[1])
       
   780             else:
       
   781                 ui.warn(_("'%s' found remotely\n") % mayberemote)
       
   782     return orig(ui, repo, node, rev, **opts)
       
   783 
       
   784 def _pull(orig, ui, repo, source="default", **opts):
       
   785     # Copy paste from `pull` command
       
   786     source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
       
   787 
       
   788     scratchbookmarks = {}
       
   789     unfi = repo.unfiltered()
       
   790     unknownnodes = []
       
   791     for rev in opts.get('rev', []):
       
   792         if rev not in unfi:
       
   793             unknownnodes.append(rev)
       
   794     if opts.get('bookmark'):
       
   795         bookmarks = []
       
   796         revs = opts.get('rev') or []
       
   797         for bookmark in opts.get('bookmark'):
       
   798             if _scratchbranchmatcher(bookmark):
       
   799                 # rev is not known yet
       
   800                 # it will be fetched with listkeyspatterns next
       
   801                 scratchbookmarks[bookmark] = 'REVTOFETCH'
       
   802             else:
       
   803                 bookmarks.append(bookmark)
       
   804 
       
   805         if scratchbookmarks:
       
   806             other = hg.peer(repo, opts, source)
       
   807             fetchedbookmarks = other.listkeyspatterns(
       
   808                 'bookmarks', patterns=scratchbookmarks)
       
   809             for bookmark in scratchbookmarks:
       
   810                 if bookmark not in fetchedbookmarks:
       
   811                     raise error.Abort('remote bookmark %s not found!' %
       
   812                                       bookmark)
       
   813                 scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
       
   814                 revs.append(fetchedbookmarks[bookmark])
       
   815         opts['bookmark'] = bookmarks
       
   816         opts['rev'] = revs
       
   817 
       
   818     try:
       
   819         inhibitmod = extensions.find('inhibit')
       
   820     except KeyError:
       
   821         # Ignore if inhibit is not enabled
       
   822         pass
       
   823     else:
       
   824         # Pulling revisions that were filtered results in a error.
       
   825         # Let's inhibit them
       
   826         unfi = repo.unfiltered()
       
   827         for rev in opts.get('rev', []):
       
   828             try:
       
   829                 repo[rev]
       
   830             except error.FilteredRepoLookupError:
       
   831                 node = unfi[rev].node()
       
   832                 inhibitmod.revive([repo.unfiltered()[node]])
       
   833             except error.RepoLookupError:
       
   834                 pass
       
   835 
       
   836     if scratchbookmarks or unknownnodes:
       
   837         # Set anyincoming to True
       
   838         extensions.wrapfunction(discovery, 'findcommonincoming',
       
   839                                 _findcommonincoming)
       
   840     try:
       
   841         # Remote scratch bookmarks will be deleted because remotenames doesn't
       
   842         # know about them. Let's save it before pull and restore after
       
   843         remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, source)
       
   844         result = orig(ui, repo, source, **opts)
       
   845         # TODO(stash): race condition is possible
       
   846         # if scratch bookmarks was updated right after orig.
       
   847         # But that's unlikely and shouldn't be harmful.
       
   848         if common.isremotebooksenabled(ui):
       
   849             remotescratchbookmarks.update(scratchbookmarks)
       
   850             _saveremotebookmarks(repo, remotescratchbookmarks, source)
       
   851         else:
       
   852             _savelocalbookmarks(repo, scratchbookmarks)
       
   853         return result
       
   854     finally:
       
   855         if scratchbookmarks:
       
   856             extensions.unwrapfunction(discovery, 'findcommonincoming')
       
   857 
       
   858 def _readscratchremotebookmarks(ui, repo, other):
       
   859     if common.isremotebooksenabled(ui):
       
   860         remotenamesext = extensions.find('remotenames')
       
   861         remotepath = remotenamesext.activepath(repo.ui, other)
       
   862         result = {}
       
   863         # Let's refresh remotenames to make sure we have it up to date
       
   864         # Seems that `repo.names['remotebookmarks']` may return stale bookmarks
       
   865         # and it results in deleting scratch bookmarks. Our best guess how to
       
   866         # fix it is to use `clearnames()`
       
   867         repo._remotenames.clearnames()
       
   868         for remotebookmark in repo.names['remotebookmarks'].listnames(repo):
       
   869             path, bookname = remotenamesext.splitremotename(remotebookmark)
       
   870             if path == remotepath and _scratchbranchmatcher(bookname):
       
   871                 nodes = repo.names['remotebookmarks'].nodes(repo,
       
   872                                                             remotebookmark)
       
   873                 if nodes:
       
   874                     result[bookname] = hex(nodes[0])
       
   875         return result
       
   876     else:
       
   877         return {}
       
   878 
       
   879 def _saveremotebookmarks(repo, newbookmarks, remote):
       
   880     remotenamesext = extensions.find('remotenames')
       
   881     remotepath = remotenamesext.activepath(repo.ui, remote)
       
   882     branches = collections.defaultdict(list)
       
   883     bookmarks = {}
       
   884     remotenames = remotenamesext.readremotenames(repo)
       
   885     for hexnode, nametype, remote, rname in remotenames:
       
   886         if remote != remotepath:
       
   887             continue
       
   888         if nametype == 'bookmarks':
       
   889             if rname in newbookmarks:
       
   890                 # It's possible if we have a normal bookmark that matches
       
   891                 # scratch branch pattern. In this case just use the current
       
   892                 # bookmark node
       
   893                 del newbookmarks[rname]
       
   894             bookmarks[rname] = hexnode
       
   895         elif nametype == 'branches':
       
   896             # saveremotenames expects 20 byte binary nodes for branches
       
   897             branches[rname].append(bin(hexnode))
       
   898 
       
   899     for bookmark, hexnode in newbookmarks.iteritems():
       
   900         bookmarks[bookmark] = hexnode
       
   901     remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks)
       
   902 
       
   903 def _savelocalbookmarks(repo, bookmarks):
       
   904     if not bookmarks:
       
   905         return
       
   906     with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
       
   907         changes = []
       
   908         for scratchbook, node in bookmarks.iteritems():
       
   909             changectx = repo[node]
       
   910             changes.append((scratchbook, changectx.node()))
       
   911         repo._bookmarks.applychanges(repo, tr, changes)
       
   912 
       
   913 def _findcommonincoming(orig, *args, **kwargs):
       
   914     common, inc, remoteheads = orig(*args, **kwargs)
       
   915     return common, True, remoteheads
       
   916 
       
   917 def _push(orig, ui, repo, dest=None, *args, **opts):
       
   918     bookmark = opts.get('to') or ''
       
   919     create = opts.get('create') or False
       
   920 
       
   921     oldphasemove = None
       
   922     overrides = {(experimental, configbookmark): bookmark,
       
   923                  (experimental, configcreate): create}
       
   924 
       
   925     with ui.configoverride(overrides, 'infinitepush'):
       
   926         scratchpush = opts.get('bundle_store')
       
   927         if _scratchbranchmatcher(bookmark):
       
   928             # Hack to fix interaction with remotenames. Remotenames push
       
   929             # '--to' bookmark to the server but we don't want to push scratch
       
   930             # bookmark to the server. Let's delete '--to' and '--create' and
       
   931             # also set allow_anon to True (because if --to is not set
       
   932             # remotenames will think that we are pushing anonymoush head)
       
   933             if 'to' in opts:
       
   934                 del opts['to']
       
   935             if 'create' in opts:
       
   936                 del opts['create']
       
   937             opts['allow_anon'] = True
       
   938             scratchpush = True
       
   939             # bundle2 can be sent back after push (for example, bundle2
       
   940             # containing `pushkey` part to update bookmarks)
       
   941             ui.setconfig(experimental, 'bundle2.pushback', True)
       
   942 
       
   943         ui.setconfig(experimental, confignonforwardmove,
       
   944                      opts.get('non_forward_move'), '--non-forward-move')
       
   945         if scratchpush:
       
   946             ui.setconfig(experimental, configscratchpush, True)
       
   947             oldphasemove = extensions.wrapfunction(exchange,
       
   948                                                    '_localphasemove',
       
   949                                                    _phasemove)
       
   950         # Copy-paste from `push` command
       
   951         path = ui.paths.getpath(dest, default=('default-push', 'default'))
       
   952         if not path:
       
   953             raise error.Abort(_('default repository not configured!'),
       
   954                              hint=_("see 'hg help config.paths'"))
       
   955         destpath = path.pushloc or path.loc
       
   956         if destpath.startswith('svn+') and scratchpush:
       
   957             raise error.Abort('infinite push does not work with svn repo',
       
   958                               hint='did you forget to `hg push default`?')
       
   959         # Remote scratch bookmarks will be deleted because remotenames doesn't
       
   960         # know about them. Let's save it before push and restore after
       
   961         remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath)
       
   962         result = orig(ui, repo, dest, *args, **opts)
       
   963         if common.isremotebooksenabled(ui):
       
   964             if bookmark and scratchpush:
       
   965                 other = hg.peer(repo, opts, destpath)
       
   966                 fetchedbookmarks = other.listkeyspatterns('bookmarks',
       
   967                                                           patterns=[bookmark])
       
   968                 remotescratchbookmarks.update(fetchedbookmarks)
       
   969             _saveremotebookmarks(repo, remotescratchbookmarks, destpath)
       
   970     if oldphasemove:
       
   971         exchange._localphasemove = oldphasemove
       
   972     return result
       
   973 
       
   974 def _deleteinfinitepushbookmarks(ui, repo, path, names):
       
   975     """Prune remote names by removing the bookmarks we don't want anymore,
       
   976     then writing the result back to disk
       
   977     """
       
   978     remotenamesext = extensions.find('remotenames')
       
   979 
       
   980     # remotename format is:
       
   981     # (node, nametype ("branches" or "bookmarks"), remote, name)
       
   982     nametype_idx = 1
       
   983     remote_idx = 2
       
   984     name_idx = 3
       
   985     remotenames = [remotename for remotename in \
       
   986                    remotenamesext.readremotenames(repo) \
       
   987                    if remotename[remote_idx] == path]
       
   988     remote_bm_names = [remotename[name_idx] for remotename in \
       
   989                        remotenames if remotename[nametype_idx] == "bookmarks"]
       
   990 
       
   991     for name in names:
       
   992         if name not in remote_bm_names:
       
   993             raise error.Abort(_("infinitepush bookmark '{}' does not exist "
       
   994                                 "in path '{}'").format(name, path))
       
   995 
       
   996     bookmarks = {}
       
   997     branches = collections.defaultdict(list)
       
   998     for node, nametype, remote, name in remotenames:
       
   999         if nametype == "bookmarks" and name not in names:
       
  1000             bookmarks[name] = node
       
  1001         elif nametype == "branches":
       
  1002             # saveremotenames wants binary nodes for branches
       
  1003             branches[name].append(bin(node))
       
  1004 
       
  1005     remotenamesext.saveremotenames(repo, path, branches, bookmarks)
       
  1006 
       
  1007 def _phasemove(orig, pushop, nodes, phase=phases.public):
       
  1008     """prevent commits from being marked public
       
  1009 
       
  1010     Since these are going to a scratch branch, they aren't really being
       
  1011     published."""
       
  1012 
       
  1013     if phase != phases.public:
       
  1014         orig(pushop, nodes, phase)
       
  1015 
       
  1016 @exchange.b2partsgenerator(scratchbranchparttype)
       
  1017 def partgen(pushop, bundler):
       
  1018     bookmark = pushop.ui.config(experimental, configbookmark)
       
  1019     create = pushop.ui.configbool(experimental, configcreate)
       
  1020     scratchpush = pushop.ui.configbool(experimental, configscratchpush)
       
  1021     if 'changesets' in pushop.stepsdone or not scratchpush:
       
  1022         return
       
  1023 
       
  1024     if scratchbranchparttype not in bundle2.bundle2caps(pushop.remote):
       
  1025         return
       
  1026 
       
  1027     pushop.stepsdone.add('changesets')
       
  1028     pushop.stepsdone.add('treepack')
       
  1029     if not pushop.outgoing.missing:
       
  1030         pushop.ui.status(_('no changes found\n'))
       
  1031         pushop.cgresult = 0
       
  1032         return
       
  1033 
       
  1034     # This parameter tells the server that the following bundle is an
       
  1035     # infinitepush. This let's it switch the part processing to our infinitepush
       
  1036     # code path.
       
  1037     bundler.addparam("infinitepush", "True")
       
  1038 
       
  1039     nonforwardmove = pushop.force or pushop.ui.configbool(experimental,
       
  1040                                                           confignonforwardmove)
       
  1041     scratchparts = bundleparts.getscratchbranchparts(pushop.repo,
       
  1042                                                      pushop.remote,
       
  1043                                                      pushop.outgoing,
       
  1044                                                      nonforwardmove,
       
  1045                                                      pushop.ui,
       
  1046                                                      bookmark,
       
  1047                                                      create)
       
  1048 
       
  1049     for scratchpart in scratchparts:
       
  1050         bundler.addpart(scratchpart)
       
  1051 
       
  1052     def handlereply(op):
       
  1053         # server either succeeds or aborts; no code to read
       
  1054         pushop.cgresult = 1
       
  1055 
       
  1056     return handlereply
       
  1057 
       
  1058 bundle2.capabilities[bundleparts.scratchbranchparttype] = ()
       
  1059 bundle2.capabilities[bundleparts.scratchbookmarksparttype] = ()
       
  1060 
       
  1061 def _getrevs(bundle, oldnode, force, bookmark):
       
  1062     'extracts and validates the revs to be imported'
       
  1063     revs = [bundle[r] for r in bundle.revs('sort(bundle())')]
       
  1064 
       
  1065     # new bookmark
       
  1066     if oldnode is None:
       
  1067         return revs
       
  1068 
       
  1069     # Fast forward update
       
  1070     if oldnode in bundle and list(bundle.set('bundle() & %s::', oldnode)):
       
  1071         return revs
       
  1072 
       
  1073     # Forced non-fast forward update
       
  1074     if force:
       
  1075         return revs
       
  1076     else:
       
  1077         raise error.Abort(_('non-forward push'),
       
  1078                           hint=_('use --non-forward-move to override'))
       
  1079 
       
  1080 @contextlib.contextmanager
       
  1081 def logservicecall(logger, service, **kwargs):
       
  1082     start = time.time()
       
  1083     logger(service, eventtype='start', **kwargs)
       
  1084     try:
       
  1085         yield
       
  1086         logger(service, eventtype='success',
       
  1087                elapsedms=(time.time() - start) * 1000, **kwargs)
       
  1088     except Exception as e:
       
  1089         logger(service, eventtype='failure',
       
  1090                elapsedms=(time.time() - start) * 1000, errormsg=str(e),
       
  1091                **kwargs)
       
  1092         raise
       
  1093 
       
  1094 def _getorcreateinfinitepushlogger(op):
       
  1095     logger = op.records['infinitepushlogger']
       
  1096     if not logger:
       
  1097         ui = op.repo.ui
       
  1098         try:
       
  1099             username = util.getuser()
       
  1100         except Exception:
       
  1101             username = 'unknown'
       
  1102         # Generate random request id to be able to find all logged entries
       
  1103         # for the same request. Since requestid is pseudo-generated it may
       
  1104         # not be unique, but we assume that (hostname, username, requestid)
       
  1105         # is unique.
       
  1106         random.seed()
       
  1107         requestid = random.randint(0, 2000000000)
       
  1108         hostname = socket.gethostname()
       
  1109         logger = functools.partial(ui.log, 'infinitepush', user=username,
       
  1110                                    requestid=requestid, hostname=hostname,
       
  1111                                    reponame=ui.config('infinitepush',
       
  1112                                                       'reponame'))
       
  1113         op.records.add('infinitepushlogger', logger)
       
  1114     else:
       
  1115         logger = logger[0]
       
  1116     return logger
       
  1117 
       
  1118 def processparts(orig, repo, op, unbundler):
       
  1119     if unbundler.params.get('infinitepush') != 'True':
       
  1120         return orig(repo, op, unbundler)
       
  1121 
       
  1122     handleallparts = repo.ui.configbool('infinitepush', 'storeallparts')
       
  1123 
       
  1124     partforwardingwhitelist = []
       
  1125     try:
       
  1126         treemfmod = extensions.find('treemanifest')
       
  1127         partforwardingwhitelist.append(treemfmod.TREEGROUP_PARTTYPE2)
       
  1128     except KeyError:
       
  1129         pass
       
  1130 
       
  1131     bundler = bundle2.bundle20(repo.ui)
       
  1132     cgparams = None
       
  1133     scratchbookpart = None
       
  1134     with bundle2.partiterator(repo, op, unbundler) as parts:
       
  1135         for part in parts:
       
  1136             bundlepart = None
       
  1137             if part.type == 'replycaps':
       
  1138                 # This configures the current operation to allow reply parts.
       
  1139                 bundle2._processpart(op, part)
       
  1140             elif part.type == bundleparts.scratchbranchparttype:
       
  1141                 # Scratch branch parts need to be converted to normal
       
  1142                 # changegroup parts, and the extra parameters stored for later
       
  1143                 # when we upload to the store. Eventually those parameters will
       
  1144                 # be put on the actual bundle instead of this part, then we can
       
  1145                 # send a vanilla changegroup instead of the scratchbranch part.
       
  1146                 cgversion = part.params.get('cgversion', '01')
       
  1147                 bundlepart = bundle2.bundlepart('changegroup', data=part.read())
       
  1148                 bundlepart.addparam('version', cgversion)
       
  1149                 cgparams = part.params
       
  1150 
       
  1151                 # If we're not dumping all parts into the new bundle, we need to
       
  1152                 # alert the future pushkey and phase-heads handler to skip
       
  1153                 # the part.
       
  1154                 if not handleallparts:
       
  1155                     op.records.add(scratchbranchparttype + '_skippushkey', True)
       
  1156                     op.records.add(scratchbranchparttype + '_skipphaseheads',
       
  1157                                    True)
       
  1158             elif part.type == bundleparts.scratchbookmarksparttype:
       
  1159                 # Save this for later processing. Details below.
       
  1160                 #
       
  1161                 # Upstream https://phab.mercurial-scm.org/D1389 and its
       
  1162                 # follow-ups stop part.seek support to reduce memory usage
       
  1163                 # (https://bz.mercurial-scm.org/5691). So we need to copy
       
  1164                 # the part so it can be consumed later.
       
  1165                 scratchbookpart = bundleparts.copiedpart(part)
       
  1166             else:
       
  1167                 if handleallparts or part.type in partforwardingwhitelist:
       
  1168                     # Ideally we would not process any parts, and instead just
       
  1169                     # forward them to the bundle for storage, but since this
       
  1170                     # differs from previous behavior, we need to put it behind a
       
  1171                     # config flag for incremental rollout.
       
  1172                     bundlepart = bundle2.bundlepart(part.type, data=part.read())
       
  1173                     for key, value in part.params.iteritems():
       
  1174                         bundlepart.addparam(key, value)
       
  1175 
       
  1176                     # Certain parts require a response
       
  1177                     if part.type == 'pushkey':
       
  1178                         if op.reply is not None:
       
  1179                             rpart = op.reply.newpart('reply:pushkey')
       
  1180                             rpart.addparam('in-reply-to', str(part.id),
       
  1181                                            mandatory=False)
       
  1182                             rpart.addparam('return', '1', mandatory=False)
       
  1183                 else:
       
  1184                     bundle2._processpart(op, part)
       
  1185 
       
  1186             if handleallparts:
       
  1187                 op.records.add(part.type, {
       
  1188                     'return': 1,
       
  1189                 })
       
  1190             if bundlepart:
       
  1191                 bundler.addpart(bundlepart)
       
  1192 
       
  1193     # If commits were sent, store them
       
  1194     if cgparams:
       
  1195         buf = util.chunkbuffer(bundler.getchunks())
       
  1196         fd, bundlefile = tempfile.mkstemp()
       
  1197         try:
       
  1198             try:
       
  1199                 fp = os.fdopen(fd, 'wb')
       
  1200                 fp.write(buf.read())
       
  1201             finally:
       
  1202                 fp.close()
       
  1203             storebundle(op, cgparams, bundlefile)
       
  1204         finally:
       
  1205             try:
       
  1206                 os.unlink(bundlefile)
       
  1207             except Exception:
       
  1208                 # we would rather see the original exception
       
  1209                 pass
       
  1210 
       
  1211     # The scratch bookmark part is sent as part of a push backup. It needs to be
       
  1212     # processed after the main bundle has been stored, so that any commits it
       
  1213     # references are available in the store.
       
  1214     if scratchbookpart:
       
  1215         bundle2._processpart(op, scratchbookpart)
       
  1216 
       
  1217 def storebundle(op, params, bundlefile):
       
  1218     log = _getorcreateinfinitepushlogger(op)
       
  1219     parthandlerstart = time.time()
       
  1220     log(scratchbranchparttype, eventtype='start')
       
  1221     index = op.repo.bundlestore.index
       
  1222     store = op.repo.bundlestore.store
       
  1223     op.records.add(scratchbranchparttype + '_skippushkey', True)
       
  1224 
       
  1225     bundle = None
       
  1226     try:  # guards bundle
       
  1227         bundlepath = "bundle:%s+%s" % (op.repo.root, bundlefile)
       
  1228         bundle = hg.repository(op.repo.ui, bundlepath)
       
  1229 
       
  1230         bookmark = params.get('bookmark')
       
  1231         bookprevnode = params.get('bookprevnode', '')
       
  1232         create = params.get('create')
       
  1233         force = params.get('force')
       
  1234 
       
  1235         if bookmark:
       
  1236             oldnode = index.getnode(bookmark)
       
  1237 
       
  1238             if not oldnode and not create:
       
  1239                 raise error.Abort("unknown bookmark %s" % bookmark,
       
  1240                                   hint="use --create if you want to create one")
       
  1241         else:
       
  1242             oldnode = None
       
  1243         bundleheads = bundle.revs('heads(bundle())')
       
  1244         if bookmark and len(bundleheads) > 1:
       
  1245             raise error.Abort(
       
  1246                 _('cannot push more than one head to a scratch branch'))
       
  1247 
       
  1248         revs = _getrevs(bundle, oldnode, force, bookmark)
       
  1249 
       
  1250         # Notify the user of what is being pushed
       
  1251         plural = 's' if len(revs) > 1 else ''
       
  1252         op.repo.ui.warn(_("pushing %s commit%s:\n") % (len(revs), plural))
       
  1253         maxoutput = 10
       
  1254         for i in range(0, min(len(revs), maxoutput)):
       
  1255             firstline = bundle[revs[i]].description().split('\n')[0][:50]
       
  1256             op.repo.ui.warn(("    %s  %s\n") % (revs[i], firstline))
       
  1257 
       
  1258         if len(revs) > maxoutput + 1:
       
  1259             op.repo.ui.warn(("    ...\n"))
       
  1260             firstline = bundle[revs[-1]].description().split('\n')[0][:50]
       
  1261             op.repo.ui.warn(("    %s  %s\n") % (revs[-1], firstline))
       
  1262 
       
  1263         nodesctx = [bundle[rev] for rev in revs]
       
  1264         inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
       
  1265         if bundleheads:
       
  1266             newheadscount = sum(not inindex(rev) for rev in bundleheads)
       
  1267         else:
       
  1268             newheadscount = 0
       
  1269         # If there's a bookmark specified, there should be only one head,
       
  1270         # so we choose the last node, which will be that head.
       
  1271         # If a bug or malicious client allows there to be a bookmark
       
  1272         # with multiple heads, we will place the bookmark on the last head.
       
  1273         bookmarknode = nodesctx[-1].hex() if nodesctx else None
       
  1274         key = None
       
  1275         if newheadscount:
       
  1276             with open(bundlefile, 'r') as f:
       
  1277                 bundledata = f.read()
       
  1278                 with logservicecall(log, 'bundlestore',
       
  1279                                     bundlesize=len(bundledata)):
       
  1280                     bundlesizelimit = 100 * 1024 * 1024  # 100 MB
       
  1281                     if len(bundledata) > bundlesizelimit:
       
  1282                         error_msg = ('bundle is too big: %d bytes. ' +
       
  1283                                      'max allowed size is 100 MB')
       
  1284                         raise error.Abort(error_msg % (len(bundledata),))
       
  1285                     key = store.write(bundledata)
       
  1286 
       
  1287         with logservicecall(log, 'index', newheadscount=newheadscount), index:
       
  1288             if key:
       
  1289                 index.addbundle(key, nodesctx)
       
  1290             if bookmark:
       
  1291                 index.addbookmark(bookmark, bookmarknode)
       
  1292                 _maybeaddpushbackpart(op, bookmark, bookmarknode,
       
  1293                                       bookprevnode, params)
       
  1294         log(scratchbranchparttype, eventtype='success',
       
  1295             elapsedms=(time.time() - parthandlerstart) * 1000)
       
  1296 
       
  1297         fillmetadatabranchpattern = op.repo.ui.config(
       
  1298             'infinitepush', 'fillmetadatabranchpattern', '')
       
  1299         if bookmark and fillmetadatabranchpattern:
       
  1300             __, __, matcher = util.stringmatcher(fillmetadatabranchpattern)
       
  1301             if matcher(bookmark):
       
  1302                 _asyncsavemetadata(op.repo.root,
       
  1303                                    [ctx.hex() for ctx in nodesctx])
       
  1304     except Exception as e:
       
  1305         log(scratchbranchparttype, eventtype='failure',
       
  1306             elapsedms=(time.time() - parthandlerstart) * 1000,
       
  1307             errormsg=str(e))
       
  1308         raise
       
  1309     finally:
       
  1310         if bundle:
       
  1311             bundle.close()
       
  1312 
       
  1313 @bundle2.b2streamparamhandler('infinitepush')
       
  1314 def processinfinitepush(unbundler, param, value):
       
  1315     """ process the bundle2 stream level parameter containing whether this push
       
  1316     is an infinitepush or not. """
       
  1317     if value and unbundler.ui.configbool('infinitepush',
       
  1318                                          'bundle-stream', False):
       
  1319         pass
       
  1320 
       
  1321 @bundle2.parthandler(scratchbranchparttype,
       
  1322                      ('bookmark', 'bookprevnode' 'create', 'force',
       
  1323                       'pushbackbookmarks', 'cgversion'))
       
  1324 def bundle2scratchbranch(op, part):
       
  1325     '''unbundle a bundle2 part containing a changegroup to store'''
       
  1326 
       
  1327     bundler = bundle2.bundle20(op.repo.ui)
       
  1328     cgversion = part.params.get('cgversion', '01')
       
  1329     cgpart = bundle2.bundlepart('changegroup', data=part.read())
       
  1330     cgpart.addparam('version', cgversion)
       
  1331     bundler.addpart(cgpart)
       
  1332     buf = util.chunkbuffer(bundler.getchunks())
       
  1333 
       
  1334     fd, bundlefile = tempfile.mkstemp()
       
  1335     try:
       
  1336         try:
       
  1337             fp = os.fdopen(fd, 'wb')
       
  1338             fp.write(buf.read())
       
  1339         finally:
       
  1340             fp.close()
       
  1341         storebundle(op, part.params, bundlefile)
       
  1342     finally:
       
  1343         try:
       
  1344             os.unlink(bundlefile)
       
  1345         except OSError as e:
       
  1346             if e.errno != errno.ENOENT:
       
  1347                 raise
       
  1348 
       
  1349     return 1
       
  1350 
       
  1351 @bundle2.parthandler(bundleparts.scratchbookmarksparttype)
       
  1352 def bundle2scratchbookmarks(op, part):
       
  1353     '''Handler deletes bookmarks first then adds new bookmarks.
       
  1354     '''
       
  1355     index = op.repo.bundlestore.index
       
  1356     decodedbookmarks = _decodebookmarks(part)
       
  1357     toinsert = {}
       
  1358     todelete = []
       
  1359     for bookmark, node in decodedbookmarks.iteritems():
       
  1360         if node:
       
  1361             toinsert[bookmark] = node
       
  1362         else:
       
  1363             todelete.append(bookmark)
       
  1364     log = _getorcreateinfinitepushlogger(op)
       
  1365     with logservicecall(log, bundleparts.scratchbookmarksparttype), index:
       
  1366         if todelete:
       
  1367             index.deletebookmarks(todelete)
       
  1368         if toinsert:
       
  1369             index.addmanybookmarks(toinsert)
       
  1370 
       
  1371 def _maybeaddpushbackpart(op, bookmark, newnode, oldnode, params):
       
  1372     if params.get('pushbackbookmarks'):
       
  1373         if op.reply and 'pushback' in op.reply.capabilities:
       
  1374             params = {
       
  1375                 'namespace': 'bookmarks',
       
  1376                 'key': bookmark,
       
  1377                 'new': newnode,
       
  1378                 'old': oldnode,
       
  1379             }
       
  1380             op.reply.newpart('pushkey', mandatoryparams=params.iteritems())
       
  1381 
       
  1382 def bundle2pushkey(orig, op, part):
       
  1383     '''Wrapper of bundle2.handlepushkey()
       
  1384 
       
  1385     The only goal is to skip calling the original function if flag is set.
       
  1386     It's set if infinitepush push is happening.
       
  1387     '''
       
  1388     if op.records[scratchbranchparttype + '_skippushkey']:
       
  1389         if op.reply is not None:
       
  1390             rpart = op.reply.newpart('reply:pushkey')
       
  1391             rpart.addparam('in-reply-to', str(part.id), mandatory=False)
       
  1392             rpart.addparam('return', '1', mandatory=False)
       
  1393         return 1
       
  1394 
       
  1395     return orig(op, part)
       
  1396 
       
  1397 def bundle2handlephases(orig, op, part):
       
  1398     '''Wrapper of bundle2.handlephases()
       
  1399 
       
  1400     The only goal is to skip calling the original function if flag is set.
       
  1401     It's set if infinitepush push is happening.
       
  1402     '''
       
  1403 
       
  1404     if op.records[scratchbranchparttype + '_skipphaseheads']:
       
  1405         return
       
  1406 
       
  1407     return orig(op, part)
       
  1408 
       
  1409 def _asyncsavemetadata(root, nodes):
       
  1410     '''starts a separate process that fills metadata for the nodes
       
  1411 
       
  1412     This function creates a separate process and doesn't wait for it's
       
  1413     completion. This was done to avoid slowing down pushes
       
  1414     '''
       
  1415 
       
  1416     maxnodes = 50
       
  1417     if len(nodes) > maxnodes:
       
  1418         return
       
  1419     nodesargs = []
       
  1420     for node in nodes:
       
  1421         nodesargs.append('--node')
       
  1422         nodesargs.append(node)
       
  1423     with open(os.devnull, 'w+b') as devnull:
       
  1424         cmdline = [util.hgexecutable(), 'debugfillinfinitepushmetadata',
       
  1425                    '-R', root] + nodesargs
       
  1426         # Process will run in background. We don't care about the return code
       
  1427         subprocess.Popen(cmdline, close_fds=True, shell=False,
       
  1428                          stdin=devnull, stdout=devnull, stderr=devnull)