comparison mercurial/discovery.py @ 11301:3d0591a66118

move discovery methods from localrepo into new discovery module
author Dirkjan Ochtman <dirkjan@ochtman.nl>
date Mon, 07 Jun 2010 18:35:54 +0200
parents mercurial/localrepo.py@5116a077c3da
children 0bb14798cd07
comparison
equal deleted inserted replaced
11300:24eeca1f2791 11301:3d0591a66118
1 # localrepo.py - read/write repository class for mercurial
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 from node import nullid, short
9 from i18n import _
10 import util, error
11
12 def findincoming(repo, remote, base=None, heads=None, force=False):
13 """Return list of roots of the subsets of missing nodes from remote
14
15 If base dict is specified, assume that these nodes and their parents
16 exist on the remote side and that no child of a node of base exists
17 in both remote and repo.
18 Furthermore base will be updated to include the nodes that exists
19 in repo and remote but no children exists in repo and remote.
20 If a list of heads is specified, return only nodes which are heads
21 or ancestors of these heads.
22
23 All the ancestors of base are in repo and in remote.
24 All the descendants of the list returned are missing in repo.
25 (and so we know that the rest of the nodes are missing in remote, see
26 outgoing)
27 """
28 return findcommonincoming(repo, remote, base, heads, force)[1]
29
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
31 """Return a tuple (common, missing roots, heads) used to identify
32 missing nodes from remote.
33
34 If base dict is specified, assume that these nodes and their parents
35 exist on the remote side and that no child of a node of base exists
36 in both remote and repo.
37 Furthermore base will be updated to include the nodes that exists
38 in repo and remote but no children exists in repo and remote.
39 If a list of heads is specified, return only nodes which are heads
40 or ancestors of these heads.
41
42 All the ancestors of base are in repo and in remote.
43 """
44 m = repo.changelog.nodemap
45 search = []
46 fetch = set()
47 seen = set()
48 seenbranch = set()
49 if base is None:
50 base = {}
51
52 if not heads:
53 heads = remote.heads()
54
55 if repo.changelog.tip() == nullid:
56 base[nullid] = 1
57 if heads != [nullid]:
58 return [nullid], [nullid], list(heads)
59 return [nullid], [], []
60
61 # assume we're closer to the tip than the root
62 # and start by examining the heads
63 repo.ui.status(_("searching for changes\n"))
64
65 unknown = []
66 for h in heads:
67 if h not in m:
68 unknown.append(h)
69 else:
70 base[h] = 1
71
72 heads = unknown
73 if not unknown:
74 return base.keys(), [], []
75
76 req = set(unknown)
77 reqcnt = 0
78
79 # search through remote branches
80 # a 'branch' here is a linear segment of history, with four parts:
81 # head, root, first parent, second parent
82 # (a branch always has two parents (or none) by definition)
83 unknown = remote.branches(unknown)
84 while unknown:
85 r = []
86 while unknown:
87 n = unknown.pop(0)
88 if n[0] in seen:
89 continue
90
91 repo.ui.debug("examining %s:%s\n"
92 % (short(n[0]), short(n[1])))
93 if n[0] == nullid: # found the end of the branch
94 pass
95 elif n in seenbranch:
96 repo.ui.debug("branch already found\n")
97 continue
98 elif n[1] and n[1] in m: # do we know the base?
99 repo.ui.debug("found incomplete branch %s:%s\n"
100 % (short(n[0]), short(n[1])))
101 search.append(n[0:2]) # schedule branch range for scanning
102 seenbranch.add(n)
103 else:
104 if n[1] not in seen and n[1] not in fetch:
105 if n[2] in m and n[3] in m:
106 repo.ui.debug("found new changeset %s\n" %
107 short(n[1]))
108 fetch.add(n[1]) # earliest unknown
109 for p in n[2:4]:
110 if p in m:
111 base[p] = 1 # latest known
112
113 for p in n[2:4]:
114 if p not in req and p not in m:
115 r.append(p)
116 req.add(p)
117 seen.add(n[0])
118
119 if r:
120 reqcnt += 1
121 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
122 repo.ui.debug("request %d: %s\n" %
123 (reqcnt, " ".join(map(short, r))))
124 for p in xrange(0, len(r), 10):
125 for b in remote.branches(r[p:p + 10]):
126 repo.ui.debug("received %s:%s\n" %
127 (short(b[0]), short(b[1])))
128 unknown.append(b)
129
130 # do binary search on the branches we found
131 while search:
132 newsearch = []
133 reqcnt += 1
134 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
135 for n, l in zip(search, remote.between(search)):
136 l.append(n[1])
137 p = n[0]
138 f = 1
139 for i in l:
140 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
141 if i in m:
142 if f <= 2:
143 repo.ui.debug("found new branch changeset %s\n" %
144 short(p))
145 fetch.add(p)
146 base[i] = 1
147 else:
148 repo.ui.debug("narrowed branch search to %s:%s\n"
149 % (short(p), short(i)))
150 newsearch.append((p, i))
151 break
152 p, f = i, f * 2
153 search = newsearch
154
155 # sanity check our fetch list
156 for f in fetch:
157 if f in m:
158 raise error.RepoError(_("already have changeset ")
159 + short(f[:4]))
160
161 if base.keys() == [nullid]:
162 if force:
163 repo.ui.warn(_("warning: repository is unrelated\n"))
164 else:
165 raise util.Abort(_("repository is unrelated"))
166
167 repo.ui.debug("found new changesets starting at " +
168 " ".join([short(f) for f in fetch]) + "\n")
169
170 repo.ui.progress(_('searching'), None)
171 repo.ui.debug("%d total queries\n" % reqcnt)
172
173 return base.keys(), list(fetch), heads
174
175 def findoutgoing(repo, remote, base=None, heads=None, force=False):
176 """Return list of nodes that are roots of subsets not in remote
177
178 If base dict is specified, assume that these nodes and their parents
179 exist on the remote side.
180 If a list of heads is specified, return only nodes which are heads
181 or ancestors of these heads, and return a second element which
182 contains all remote heads which get new children.
183 """
184 if base is None:
185 base = {}
186 findincoming(repo, remote, base, heads, force=force)
187
188 repo.ui.debug("common changesets up to "
189 + " ".join(map(short, base.keys())) + "\n")
190
191 remain = set(repo.changelog.nodemap)
192
193 # prune everything remote has from the tree
194 remain.remove(nullid)
195 remove = base.keys()
196 while remove:
197 n = remove.pop(0)
198 if n in remain:
199 remain.remove(n)
200 for p in repo.changelog.parents(n):
201 remove.append(p)
202
203 # find every node whose parents have been pruned
204 subset = []
205 # find every remote head that will get new children
206 updated_heads = set()
207 for n in remain:
208 p1, p2 = repo.changelog.parents(n)
209 if p1 not in remain and p2 not in remain:
210 subset.append(n)
211 if heads:
212 if p1 in heads:
213 updated_heads.add(p1)
214 if p2 in heads:
215 updated_heads.add(p2)
216
217 # this is the set of all roots we have to push
218 if heads:
219 return subset, list(updated_heads)
220 else:
221 return subset
222
223 def prepush(repo, remote, force, revs, newbranch):
224 '''Analyze the local and remote repositories and determine which
225 changesets need to be pushed to the remote. Return value depends
226 on circumstances:
227
228 If we are not going to push anything, return a tuple (None,
229 outgoing) where outgoing is 0 if there are no outgoing
230 changesets and 1 if there are, but we refuse to push them
231 (e.g. would create new remote heads).
232
233 Otherwise, return a tuple (changegroup, remoteheads), where
234 changegroup is a readable file-like object whose read() returns
235 successive changegroup chunks ready to be sent over the wire and
236 remoteheads is the list of remote heads.'''
237 common = {}
238 remote_heads = remote.heads()
239 inc = findincoming(repo, remote, common, remote_heads, force=force)
240
241 cl = repo.changelog
242 update, updated_heads = findoutgoing(repo, remote, common, remote_heads)
243 outg, bases, heads = cl.nodesbetween(update, revs)
244
245 if not bases:
246 repo.ui.status(_("no changes found\n"))
247 return None, 1
248
249 if not force and remote_heads != [nullid]:
250
251 def fail_multiple_heads(unsynced, branch=None):
252 if branch:
253 msg = _("abort: push creates new remote heads"
254 " on branch '%s'!\n") % branch
255 else:
256 msg = _("abort: push creates new remote heads!\n")
257 repo.ui.warn(msg)
258 if unsynced:
259 repo.ui.status(_("(you should pull and merge or"
260 " use push -f to force)\n"))
261 else:
262 repo.ui.status(_("(did you forget to merge?"
263 " use push -f to force)\n"))
264 return None, 0
265
266 if remote.capable('branchmap'):
267 # Check for each named branch if we're creating new remote heads.
268 # To be a remote head after push, node must be either:
269 # - unknown locally
270 # - a local outgoing head descended from update
271 # - a remote head that's known locally and not
272 # ancestral to an outgoing head
273 #
274 # New named branches cannot be created without --force.
275
276 # 1. Create set of branches involved in the push.
277 branches = set(repo[n].branch() for n in outg)
278
279 # 2. Check for new branches on the remote.
280 remotemap = remote.branchmap()
281 newbranches = branches - set(remotemap)
282 if newbranches and not newbranch: # new branch requires --new-branch
283 branchnames = ', '.join("%s" % b for b in newbranches)
284 repo.ui.warn(_("abort: push creates "
285 "new remote branches: %s!\n")
286 % branchnames)
287 repo.ui.status(_("(use 'hg push --new-branch' to create new "
288 "remote branches)\n"))
289 return None, 0
290 branches.difference_update(newbranches)
291
292 # 3. Construct the initial oldmap and newmap dicts.
293 # They contain information about the remote heads before and
294 # after the push, respectively.
295 # Heads not found locally are not included in either dict,
296 # since they won't be affected by the push.
297 # unsynced contains all branches with incoming changesets.
298 oldmap = {}
299 newmap = {}
300 unsynced = set()
301 for branch in branches:
302 remoteheads = remotemap[branch]
303 prunedheads = [h for h in remoteheads if h in cl.nodemap]
304 oldmap[branch] = prunedheads
305 newmap[branch] = list(prunedheads)
306 if len(remoteheads) > len(prunedheads):
307 unsynced.add(branch)
308
309 # 4. Update newmap with outgoing changes.
310 # This will possibly add new heads and remove existing ones.
311 ctxgen = (repo[n] for n in outg)
312 repo._updatebranchcache(newmap, ctxgen)
313
314 # 5. Check for new heads.
315 # If there are more heads after the push than before, a suitable
316 # warning, depending on unsynced status, is displayed.
317 for branch in branches:
318 if len(newmap[branch]) > len(oldmap[branch]):
319 return fail_multiple_heads(branch in unsynced, branch)
320
321 # 6. Check for unsynced changes on involved branches.
322 if unsynced:
323 repo.ui.warn(_("note: unsynced remote changes!\n"))
324
325 else:
326 # Old servers: Check for new topological heads.
327 # Code based on _updatebranchcache.
328 newheads = set(h for h in remote_heads if h in cl.nodemap)
329 oldheadcnt = len(newheads)
330 newheads.update(outg)
331 if len(newheads) > 1:
332 for latest in reversed(outg):
333 if latest not in newheads:
334 continue
335 minhrev = min(cl.rev(h) for h in newheads)
336 reachable = cl.reachable(latest, cl.node(minhrev))
337 reachable.remove(latest)
338 newheads.difference_update(reachable)
339 if len(newheads) > oldheadcnt:
340 return fail_multiple_heads(inc)
341 if inc:
342 repo.ui.warn(_("note: unsynced remote changes!\n"))
343
344 if revs is None:
345 # use the fast path, no race possible on push
346 nodes = repo.changelog.findmissing(common.keys())
347 cg = repo._changegroup(nodes, 'push')
348 else:
349 cg = repo.changegroupsubset(update, revs, 'push')
350 return cg, remote_heads