mercurial/discovery.py
changeset 43076 2372284d9457
parent 43049 6e8582ccf76d
child 43077 687b865b95ad
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
    25     setdiscovery,
    25     setdiscovery,
    26     treediscovery,
    26     treediscovery,
    27     util,
    27     util,
    28 )
    28 )
    29 
    29 
       
    30 
    30 def findcommonincoming(repo, remote, heads=None, force=False, ancestorsof=None):
    31 def findcommonincoming(repo, remote, heads=None, force=False, ancestorsof=None):
    31     """Return a tuple (common, anyincoming, heads) used to identify the common
    32     """Return a tuple (common, anyincoming, heads) used to identify the common
    32     subset of nodes between repo and remote.
    33     subset of nodes between repo and remote.
    33 
    34 
    34     "common" is a list of (at least) the heads of the common subset.
    35     "common" is a list of (at least) the heads of the common subset.
    51 
    52 
    52     if not remote.capable('getbundle'):
    53     if not remote.capable('getbundle'):
    53         return treediscovery.findcommonincoming(repo, remote, heads, force)
    54         return treediscovery.findcommonincoming(repo, remote, heads, force)
    54 
    55 
    55     if heads:
    56     if heads:
    56         knownnode = repo.changelog.hasnode # no nodemap until it is filtered
    57         knownnode = repo.changelog.hasnode  # no nodemap until it is filtered
    57         if all(knownnode(h) for h in heads):
    58         if all(knownnode(h) for h in heads):
    58             return (heads, False, heads)
    59             return (heads, False, heads)
    59 
    60 
    60     res = setdiscovery.findcommonheads(repo.ui, repo, remote,
    61     res = setdiscovery.findcommonheads(
    61                                        abortwhenunrelated=not force,
    62         repo.ui,
    62                                        ancestorsof=ancestorsof)
    63         repo,
       
    64         remote,
       
    65         abortwhenunrelated=not force,
       
    66         ancestorsof=ancestorsof,
       
    67     )
    63     common, anyinc, srvheads = res
    68     common, anyinc, srvheads = res
    64     return (list(common), anyinc, heads or list(srvheads))
    69     return (list(common), anyinc, heads or list(srvheads))
       
    70 
    65 
    71 
    66 class outgoing(object):
    72 class outgoing(object):
    67     '''Represents the set of nodes present in a local repo but not in a
    73     '''Represents the set of nodes present in a local repo but not in a
    68     (possibly) remote one.
    74     (possibly) remote one.
    69 
    75 
    76       commonheads is the list of heads of common.
    82       commonheads is the list of heads of common.
    77 
    83 
    78     The sets are computed on demand from the heads, unless provided upfront
    84     The sets are computed on demand from the heads, unless provided upfront
    79     by discovery.'''
    85     by discovery.'''
    80 
    86 
    81     def __init__(self, repo, commonheads=None, missingheads=None,
    87     def __init__(
    82                  missingroots=None):
    88         self, repo, commonheads=None, missingheads=None, missingroots=None
       
    89     ):
    83         # at least one of them must not be set
    90         # at least one of them must not be set
    84         assert None in (commonheads, missingroots)
    91         assert None in (commonheads, missingroots)
    85         cl = repo.changelog
    92         cl = repo.changelog
    86         if missingheads is None:
    93         if missingheads is None:
    87             missingheads = cl.heads()
    94             missingheads = cl.heads()
   104         self._common = None
   111         self._common = None
   105         self._missing = None
   112         self._missing = None
   106         self.excluded = []
   113         self.excluded = []
   107 
   114 
   108     def _computecommonmissing(self):
   115     def _computecommonmissing(self):
   109         sets = self._revlog.findcommonmissing(self.commonheads,
   116         sets = self._revlog.findcommonmissing(
   110                                               self.missingheads)
   117             self.commonheads, self.missingheads
       
   118         )
   111         self._common, self._missing = sets
   119         self._common, self._missing = sets
   112 
   120 
   113     @util.propertycache
   121     @util.propertycache
   114     def common(self):
   122     def common(self):
   115         if self._common is None:
   123         if self._common is None:
   120     def missing(self):
   128     def missing(self):
   121         if self._missing is None:
   129         if self._missing is None:
   122             self._computecommonmissing()
   130             self._computecommonmissing()
   123         return self._missing
   131         return self._missing
   124 
   132 
   125 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
   133 
   126                        commoninc=None, portable=False):
   134 def findcommonoutgoing(
       
   135     repo, other, onlyheads=None, force=False, commoninc=None, portable=False
       
   136 ):
   127     '''Return an outgoing instance to identify the nodes present in repo but
   137     '''Return an outgoing instance to identify the nodes present in repo but
   128     not in other.
   138     not in other.
   129 
   139 
   130     If onlyheads is given, only nodes ancestral to nodes in onlyheads
   140     If onlyheads is given, only nodes ancestral to nodes in onlyheads
   131     (inclusive) are included. If you already know the local repo's heads,
   141     (inclusive) are included. If you already know the local repo's heads,
   139     # declare an empty outgoing object to be filled later
   149     # declare an empty outgoing object to be filled later
   140     og = outgoing(repo, None, None)
   150     og = outgoing(repo, None, None)
   141 
   151 
   142     # get common set if not provided
   152     # get common set if not provided
   143     if commoninc is None:
   153     if commoninc is None:
   144         commoninc = findcommonincoming(repo, other, force=force,
   154         commoninc = findcommonincoming(
   145                                        ancestorsof=onlyheads)
   155             repo, other, force=force, ancestorsof=onlyheads
       
   156         )
   146     og.commonheads, _any, _hds = commoninc
   157     og.commonheads, _any, _hds = commoninc
   147 
   158 
   148     # compute outgoing
   159     # compute outgoing
   149     mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
   160     mayexclude = repo._phasecache.phaseroots[phases.secret] or repo.obsstore
   150     if not mayexclude:
   161     if not mayexclude:
   151         og.missingheads = onlyheads or repo.heads()
   162         og.missingheads = onlyheads or repo.heads()
   152     elif onlyheads is None:
   163     elif onlyheads is None:
   153         # use visible heads as it should be cached
   164         # use visible heads as it should be cached
   154         og.missingheads = repo.filtered("served").heads()
   165         og.missingheads = repo.filtered("served").heads()
   165                 excluded.append(node)
   176                 excluded.append(node)
   166             else:
   177             else:
   167                 missing.append(node)
   178                 missing.append(node)
   168         if len(missing) == len(allmissing):
   179         if len(missing) == len(allmissing):
   169             missingheads = onlyheads
   180             missingheads = onlyheads
   170         else: # update missing heads
   181         else:  # update missing heads
   171             missingheads = phases.newheads(repo, onlyheads, excluded)
   182             missingheads = phases.newheads(repo, onlyheads, excluded)
   172         og.missingheads = missingheads
   183         og.missingheads = missingheads
   173     if portable:
   184     if portable:
   174         # recompute common and missingheads as if -r<rev> had been given for
   185         # recompute common and missingheads as if -r<rev> had been given for
   175         # each head of missing, and --base <rev> for each head of the proper
   186         # each head of missing, and --base <rev> for each head of the proper
   180         og._common = set(cl.ancestors(missingrevs)) - missingrevs
   191         og._common = set(cl.ancestors(missingrevs)) - missingrevs
   181         commonheads = set(og.commonheads)
   192         commonheads = set(og.commonheads)
   182         og.missingheads = [h for h in og.missingheads if h not in commonheads]
   193         og.missingheads = [h for h in og.missingheads if h not in commonheads]
   183 
   194 
   184     return og
   195     return og
       
   196 
   185 
   197 
   186 def _headssummary(pushop):
   198 def _headssummary(pushop):
   187     """compute a summary of branch and heads status before and after push
   199     """compute a summary of branch and heads status before and after push
   188 
   200 
   189     return {'branch': ([remoteheads], [newheads],
   201     return {'branch': ([remoteheads], [newheads],
   210         branches.add(ctx.branch())
   222         branches.add(ctx.branch())
   211 
   223 
   212     with remote.commandexecutor() as e:
   224     with remote.commandexecutor() as e:
   213         remotemap = e.callcommand('branchmap', {}).result()
   225         remotemap = e.callcommand('branchmap', {}).result()
   214 
   226 
   215     knownnode = cl.hasnode # do not use nodemap until it is filtered
   227     knownnode = cl.hasnode  # do not use nodemap until it is filtered
   216     # A. register remote heads of branches which are in outgoing set
   228     # A. register remote heads of branches which are in outgoing set
   217     for branch, heads in remotemap.iteritems():
   229     for branch, heads in remotemap.iteritems():
   218         # don't add head info about branches which we don't have locally
   230         # don't add head info about branches which we don't have locally
   219         if branch not in branches:
   231         if branch not in branches:
   220             continue
   232             continue
   232         if branch not in headssum:
   244         if branch not in headssum:
   233             headssum[branch] = (None, [], [])
   245             headssum[branch] = (None, [], [])
   234 
   246 
   235     # C. Update newmap with outgoing changes.
   247     # C. Update newmap with outgoing changes.
   236     # This will possibly add new heads and remove existing ones.
   248     # This will possibly add new heads and remove existing ones.
   237     newmap = branchmap.remotebranchcache((branch, heads[1])
   249     newmap = branchmap.remotebranchcache(
   238                                  for branch, heads in headssum.iteritems()
   250         (branch, heads[1])
   239                                  if heads[0] is not None)
   251         for branch, heads in headssum.iteritems()
       
   252         if heads[0] is not None
       
   253     )
   240     newmap.update(repo, (ctx.rev() for ctx in missingctx))
   254     newmap.update(repo, (ctx.rev() for ctx in missingctx))
   241     for branch, newheads in newmap.iteritems():
   255     for branch, newheads in newmap.iteritems():
   242         headssum[branch][1][:] = newheads
   256         headssum[branch][1][:] = newheads
   243     for branch, items in headssum.iteritems():
   257     for branch, items in headssum.iteritems():
   244         for l in items:
   258         for l in items:
   253         futureheads |= set(torev(h) for h in outgoing.commonheads)
   267         futureheads |= set(torev(h) for h in outgoing.commonheads)
   254         allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True)
   268         allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True)
   255         for branch, heads in sorted(headssum.iteritems()):
   269         for branch, heads in sorted(headssum.iteritems()):
   256             remoteheads, newheads, unsyncedheads, placeholder = heads
   270             remoteheads, newheads, unsyncedheads, placeholder = heads
   257             result = _postprocessobsolete(pushop, allfuturecommon, newheads)
   271             result = _postprocessobsolete(pushop, allfuturecommon, newheads)
   258             headssum[branch] = (remoteheads, sorted(result[0]), unsyncedheads,
   272             headssum[branch] = (
   259                                 sorted(result[1]))
   273                 remoteheads,
       
   274                 sorted(result[0]),
       
   275                 unsyncedheads,
       
   276                 sorted(result[1]),
       
   277             )
   260     return headssum
   278     return headssum
       
   279 
   261 
   280 
   262 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
   281 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
   263     """Compute branchmapsummary for repo without branchmap support"""
   282     """Compute branchmapsummary for repo without branchmap support"""
   264 
   283 
   265     # 1-4b. old servers: Check for new topological heads.
   284     # 1-4b. old servers: Check for new topological heads.
   266     # Construct {old,new}map with branch = None (topological branch).
   285     # Construct {old,new}map with branch = None (topological branch).
   267     # (code based on update)
   286     # (code based on update)
   268     knownnode = repo.changelog.hasnode # no nodemap until it is filtered
   287     knownnode = repo.changelog.hasnode  # no nodemap until it is filtered
   269     oldheads = sorted(h for h in remoteheads if knownnode(h))
   288     oldheads = sorted(h for h in remoteheads if knownnode(h))
   270     # all nodes in outgoing.missing are children of either:
   289     # all nodes in outgoing.missing are children of either:
   271     # - an element of oldheads
   290     # - an element of oldheads
   272     # - another element of outgoing.missing
   291     # - another element of outgoing.missing
   273     # - nullrev
   292     # - nullrev
   279         unsynced = [None]
   298         unsynced = [None]
   280     else:
   299     else:
   281         unsynced = []
   300         unsynced = []
   282     return {None: (oldheads, newheads, unsynced, [])}
   301     return {None: (oldheads, newheads, unsynced, [])}
   283 
   302 
       
   303 
   284 def _nowarnheads(pushop):
   304 def _nowarnheads(pushop):
   285     # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
   305     # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
   286     repo = pushop.repo.unfiltered()
   306     repo = pushop.repo.unfiltered()
   287     remote = pushop.remote
   307     remote = pushop.remote
   288     localbookmarks = repo._bookmarks
   308     localbookmarks = repo._bookmarks
   289 
   309 
   290     with remote.commandexecutor() as e:
   310     with remote.commandexecutor() as e:
   291         remotebookmarks = e.callcommand('listkeys', {
   311         remotebookmarks = e.callcommand(
   292             'namespace': 'bookmarks',
   312             'listkeys', {'namespace': 'bookmarks',}
   293         }).result()
   313         ).result()
   294 
   314 
   295     bookmarkedheads = set()
   315     bookmarkedheads = set()
   296 
   316 
   297     # internal config: bookmarks.pushing
   317     # internal config: bookmarks.pushing
   298     newbookmarks = [localbookmarks.expandname(b)
   318     newbookmarks = [
   299                     for b in pushop.ui.configlist('bookmarks', 'pushing')]
   319         localbookmarks.expandname(b)
       
   320         for b in pushop.ui.configlist('bookmarks', 'pushing')
       
   321     ]
   300 
   322 
   301     for bm in localbookmarks:
   323     for bm in localbookmarks:
   302         rnode = remotebookmarks.get(bm)
   324         rnode = remotebookmarks.get(bm)
   303         if rnode and rnode in repo:
   325         if rnode and rnode in repo:
   304             lctx, rctx = repo[localbookmarks[bm]], repo[rnode]
   326             lctx, rctx = repo[localbookmarks[bm]], repo[rnode]
   308             if bm in newbookmarks and bm not in remotebookmarks:
   330             if bm in newbookmarks and bm not in remotebookmarks:
   309                 bookmarkedheads.add(localbookmarks[bm])
   331                 bookmarkedheads.add(localbookmarks[bm])
   310 
   332 
   311     return bookmarkedheads
   333     return bookmarkedheads
   312 
   334 
       
   335 
   313 def checkheads(pushop):
   336 def checkheads(pushop):
   314     """Check that a push won't add any outgoing head
   337     """Check that a push won't add any outgoing head
   315 
   338 
   316     raise Abort error and display ui message as needed.
   339     raise Abort error and display ui message as needed.
   317     """
   340     """
   336     if remote.capable('branchmap'):
   359     if remote.capable('branchmap'):
   337         headssum = _headssummary(pushop)
   360         headssum = _headssummary(pushop)
   338     else:
   361     else:
   339         headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
   362         headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
   340     pushop.pushbranchmap = headssum
   363     pushop.pushbranchmap = headssum
   341     newbranches = [branch for branch, heads in headssum.iteritems()
   364     newbranches = [
   342                    if heads[0] is None]
   365         branch for branch, heads in headssum.iteritems() if heads[0] is None
       
   366     ]
   343     # 1. Check for new branches on the remote.
   367     # 1. Check for new branches on the remote.
   344     if newbranches and not newbranch:  # new branch requires --new-branch
   368     if newbranches and not newbranch:  # new branch requires --new-branch
   345         branchnames = ', '.join(sorted(newbranches))
   369         branchnames = ', '.join(sorted(newbranches))
   346         # Calculate how many of the new branches are closed branches
   370         # Calculate how many of the new branches are closed branches
   347         closedbranches = set()
   371         closedbranches = set()
   348         for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
   372         for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
   349             if isclosed:
   373             if isclosed:
   350                 closedbranches.add(tag)
   374                 closedbranches.add(tag)
   351         closedbranches = (closedbranches & set(newbranches))
   375         closedbranches = closedbranches & set(newbranches)
   352         if closedbranches:
   376         if closedbranches:
   353             errmsg = (_("push creates new remote branches: %s (%d closed)!")
   377             errmsg = _("push creates new remote branches: %s (%d closed)!") % (
   354                         % (branchnames, len(closedbranches)))
   378                 branchnames,
       
   379                 len(closedbranches),
       
   380             )
   355         else:
   381         else:
   356             errmsg = (_("push creates new remote branches: %s!")% branchnames)
   382             errmsg = _("push creates new remote branches: %s!") % branchnames
   357         hint=_("use 'hg push --new-branch' to create new remote branches")
   383         hint = _("use 'hg push --new-branch' to create new remote branches")
   358         raise error.Abort(errmsg, hint=hint)
   384         raise error.Abort(errmsg, hint=hint)
   359 
   385 
   360     # 2. Find heads that we need not warn about
   386     # 2. Find heads that we need not warn about
   361     nowarnheads = _nowarnheads(pushop)
   387     nowarnheads = _nowarnheads(pushop)
   362 
   388 
   370         if remoteheads is None:
   396         if remoteheads is None:
   371             oldhs = set()
   397             oldhs = set()
   372         else:
   398         else:
   373             oldhs = set(remoteheads)
   399             oldhs = set(remoteheads)
   374         oldhs.update(unsyncedheads)
   400         oldhs.update(unsyncedheads)
   375         dhs = None # delta heads, the new heads on branch
   401         dhs = None  # delta heads, the new heads on branch
   376         newhs = set(newheads)
   402         newhs = set(newheads)
   377         newhs.update(unsyncedheads)
   403         newhs.update(unsyncedheads)
   378         if unsyncedheads:
   404         if unsyncedheads:
   379             if None in unsyncedheads:
   405             if None in unsyncedheads:
   380                 # old remote, no heads data
   406                 # old remote, no heads data
   381                 heads = None
   407                 heads = None
   382             else:
   408             else:
   383                 heads = scmutil.nodesummaries(repo, unsyncedheads)
   409                 heads = scmutil.nodesummaries(repo, unsyncedheads)
   384             if heads is None:
   410             if heads is None:
   385                 repo.ui.status(_("remote has heads that are "
   411                 repo.ui.status(
   386                                  "not known locally\n"))
   412                     _("remote has heads that are " "not known locally\n")
       
   413                 )
   387             elif branch is None:
   414             elif branch is None:
   388                 repo.ui.status(_("remote has heads that are "
   415                 repo.ui.status(
   389                                  "not known locally: %s\n") % heads)
   416                     _("remote has heads that are " "not known locally: %s\n")
       
   417                     % heads
       
   418                 )
   390             else:
   419             else:
   391                 repo.ui.status(_("remote has heads on branch '%s' that are "
   420                 repo.ui.status(
   392                                  "not known locally: %s\n") % (branch, heads))
   421                     _(
       
   422                         "remote has heads on branch '%s' that are "
       
   423                         "not known locally: %s\n"
       
   424                     )
       
   425                     % (branch, heads)
       
   426                 )
   393         if remoteheads is None:
   427         if remoteheads is None:
   394             if len(newhs) > 1:
   428             if len(newhs) > 1:
   395                 dhs = list(newhs)
   429                 dhs = list(newhs)
   396                 if errormsg is None:
   430                 if errormsg is None:
   397                     errormsg = (
   431                     errormsg = (
   398                         _("push creates new branch '%s' with multiple heads") %
   432                         _("push creates new branch '%s' with multiple heads")
   399                         branch
   433                         % branch
   400                     )
   434                     )
   401                     hint = _("merge or"
   435                     hint = _(
   402                              " see 'hg help push' for details about"
   436                         "merge or"
   403                              " pushing new heads")
   437                         " see 'hg help push' for details about"
       
   438                         " pushing new heads"
       
   439                     )
   404         elif len(newhs) > len(oldhs):
   440         elif len(newhs) > len(oldhs):
   405             # remove bookmarked or existing remote heads from the new heads list
   441             # remove bookmarked or existing remote heads from the new heads list
   406             dhs = sorted(newhs - nowarnheads - oldhs)
   442             dhs = sorted(newhs - nowarnheads - oldhs)
   407         if dhs:
   443         if dhs:
   408             if errormsg is None:
   444             if errormsg is None:
   409                 if branch not in ('default', None):
   445                 if branch not in ('default', None):
   410                     errormsg = _("push creates new remote head %s "
   446                     errormsg = _(
   411                                  "on branch '%s'!") % (short(dhs[0]), branch)
   447                         "push creates new remote head %s " "on branch '%s'!"
       
   448                     ) % (short(dhs[0]), branch)
   412                 elif repo[dhs[0]].bookmarks():
   449                 elif repo[dhs[0]].bookmarks():
   413                     errormsg = _("push creates new remote head %s "
   450                     errormsg = _(
   414                                  "with bookmark '%s'!") % (
   451                         "push creates new remote head %s " "with bookmark '%s'!"
   415                                  short(dhs[0]), repo[dhs[0]].bookmarks()[0])
   452                     ) % (short(dhs[0]), repo[dhs[0]].bookmarks()[0])
   416                 else:
   453                 else:
   417                     errormsg = _("push creates new remote head %s!"
   454                     errormsg = _("push creates new remote head %s!") % short(
   418                                  ) % short(dhs[0])
   455                         dhs[0]
       
   456                     )
   419                 if unsyncedheads:
   457                 if unsyncedheads:
   420                     hint = _("pull and merge or"
   458                     hint = _(
   421                              " see 'hg help push' for details about"
   459                         "pull and merge or"
   422                              " pushing new heads")
   460                         " see 'hg help push' for details about"
       
   461                         " pushing new heads"
       
   462                     )
   423                 else:
   463                 else:
   424                     hint = _("merge or"
   464                     hint = _(
   425                              " see 'hg help push' for details about"
   465                         "merge or"
   426                              " pushing new heads")
   466                         " see 'hg help push' for details about"
       
   467                         " pushing new heads"
       
   468                     )
   427             if branch is None:
   469             if branch is None:
   428                 repo.ui.note(_("new remote heads:\n"))
   470                 repo.ui.note(_("new remote heads:\n"))
   429             else:
   471             else:
   430                 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
   472                 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
   431             for h in dhs:
   473             for h in dhs:
   432                 repo.ui.note((" %s\n") % short(h))
   474                 repo.ui.note(" %s\n" % short(h))
   433     if errormsg:
   475     if errormsg:
   434         raise error.Abort(errormsg, hint=hint)
   476         raise error.Abort(errormsg, hint=hint)
       
   477 
   435 
   478 
   436 def _postprocessobsolete(pushop, futurecommon, candidate_newhs):
   479 def _postprocessobsolete(pushop, futurecommon, candidate_newhs):
   437     """post process the list of new heads with obsolescence information
   480     """post process the list of new heads with obsolescence information
   438 
   481 
   439     Exists as a sub-function to contain the complexity and allow extensions to
   482     Exists as a sub-function to contain the complexity and allow extensions to
   453     unfi = repo.unfiltered()
   496     unfi = repo.unfiltered()
   454     tonode = unfi.changelog.node
   497     tonode = unfi.changelog.node
   455     torev = unfi.changelog.nodemap.get
   498     torev = unfi.changelog.nodemap.get
   456     public = phases.public
   499     public = phases.public
   457     getphase = unfi._phasecache.phase
   500     getphase = unfi._phasecache.phase
   458     ispublic = (lambda r: getphase(unfi, r) == public)
   501     ispublic = lambda r: getphase(unfi, r) == public
   459     ispushed = (lambda n: torev(n) in futurecommon)
   502     ispushed = lambda n: torev(n) in futurecommon
   460     hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore, ispushed)
   503     hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore, ispushed)
   461     successorsmarkers = unfi.obsstore.successors
   504     successorsmarkers = unfi.obsstore.successors
   462     newhs = set() # final set of new heads
   505     newhs = set()  # final set of new heads
   463     discarded = set() # new head of fully replaced branch
   506     discarded = set()  # new head of fully replaced branch
   464 
   507 
   465     localcandidate = set() # candidate heads known locally
   508     localcandidate = set()  # candidate heads known locally
   466     unknownheads = set() # candidate heads unknown locally
   509     unknownheads = set()  # candidate heads unknown locally
   467     for h in candidate_newhs:
   510     for h in candidate_newhs:
   468         if h in unfi:
   511         if h in unfi:
   469             localcandidate.add(h)
   512             localcandidate.add(h)
   470         else:
   513         else:
   471             if successorsmarkers.get(h) is not None:
   514             if successorsmarkers.get(h) is not None:
   472                 msg = ('checkheads: remote head unknown locally has'
   515                 msg = (
   473                        ' local marker: %s\n')
   516                     'checkheads: remote head unknown locally has'
       
   517                     ' local marker: %s\n'
       
   518                 )
   474                 repo.ui.debug(msg % hex(h))
   519                 repo.ui.debug(msg % hex(h))
   475             unknownheads.add(h)
   520             unknownheads.add(h)
   476 
   521 
   477     # fast path the simple case
   522     # fast path the simple case
   478     if len(localcandidate) == 1:
   523     if len(localcandidate) == 1:
   480 
   525 
   481     # actually process branch replacement
   526     # actually process branch replacement
   482     while localcandidate:
   527     while localcandidate:
   483         nh = localcandidate.pop()
   528         nh = localcandidate.pop()
   484         # run this check early to skip the evaluation of the whole branch
   529         # run this check early to skip the evaluation of the whole branch
   485         if (torev(nh) in futurecommon or ispublic(torev(nh))):
   530         if torev(nh) in futurecommon or ispublic(torev(nh)):
   486             newhs.add(nh)
   531             newhs.add(nh)
   487             continue
   532             continue
   488 
   533 
   489         # Get all revs/nodes on the branch exclusive to this head
   534         # Get all revs/nodes on the branch exclusive to this head
   490         # (already filtered heads are "ignored"))
   535         # (already filtered heads are "ignored"))
   491         branchrevs = unfi.revs('only(%n, (%ln+%ln))',
   536         branchrevs = unfi.revs('only(%n, (%ln+%ln))', nh, localcandidate, newhs)
   492                                nh, localcandidate, newhs)
       
   493         branchnodes = [tonode(r) for r in branchrevs]
   537         branchnodes = [tonode(r) for r in branchrevs]
   494 
   538 
   495         # The branch won't be hidden on the remote if
   539         # The branch won't be hidden on the remote if
   496         # * any part of it is public,
   540         # * any part of it is public,
   497         # * any part of it is considered part of the result by previous logic,
   541         # * any part of it is considered part of the result by previous logic,
   498         # * if we have no markers to push to obsolete it.
   542         # * if we have no markers to push to obsolete it.
   499         if (any(ispublic(r) for r in branchrevs)
   543         if (
   500                 or any(torev(n) in futurecommon for n in branchnodes)
   544             any(ispublic(r) for r in branchrevs)
   501                 or any(not hasoutmarker(n) for n in branchnodes)):
   545             or any(torev(n) in futurecommon for n in branchnodes)
       
   546             or any(not hasoutmarker(n) for n in branchnodes)
       
   547         ):
   502             newhs.add(nh)
   548             newhs.add(nh)
   503         else:
   549         else:
   504             # note: there is a corner case if there is a merge in the branch.
   550             # note: there is a corner case if there is a merge in the branch.
   505             # we might end up with -more- heads.  However, these heads are not
   551             # we might end up with -more- heads.  However, these heads are not
   506             # "added" by the push, but more by the "removal" on the remote so I
   552             # "added" by the push, but more by the "removal" on the remote so I
   507             # think is a okay to ignore them,
   553             # think is a okay to ignore them,
   508             discarded.add(nh)
   554             discarded.add(nh)
   509     newhs |= unknownheads
   555     newhs |= unknownheads
   510     return newhs, discarded
   556     return newhs, discarded
       
   557 
   511 
   558 
   512 def pushingmarkerfor(obsstore, ispushed, node):
   559 def pushingmarkerfor(obsstore, ispushed, node):
   513     """true if some markers are to be pushed for node
   560     """true if some markers are to be pushed for node
   514 
   561 
   515     We cannot just look in to the pushed obsmarkers from the pushop because
   562     We cannot just look in to the pushed obsmarkers from the pushop because
   528         if ispushed(current):
   575         if ispushed(current):
   529             return True
   576             return True
   530         markers = successorsmarkers.get(current, ())
   577         markers = successorsmarkers.get(current, ())
   531         # markers fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
   578         # markers fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
   532         for m in markers:
   579         for m in markers:
   533             nexts = m[1] # successors
   580             nexts = m[1]  # successors
   534             if not nexts: # this is a prune marker
   581             if not nexts:  # this is a prune marker
   535                 nexts = m[5] or () # parents
   582                 nexts = m[5] or ()  # parents
   536             for n in nexts:
   583             for n in nexts:
   537                 if n not in seen:
   584                 if n not in seen:
   538                     seen.add(n)
   585                     seen.add(n)
   539                     stack.append(n)
   586                     stack.append(n)
   540     return False
   587     return False