author | Robin Farine <robin.farine@terminus.org> |
Wed, 28 Sep 2005 19:42:03 +0200 | |
changeset 1371 | 68e84563c540 |
parent 1351 | 0e2be889ccd7 |
child 1393 | 67779d34cb52 |
child 1457 | 518da3c3b6ce |
permissions | -rw-r--r-- |
1083 | 1 |
""" |
2 |
revlog.py - storage back-end for mercurial |
|
3 |
||
4 |
This provides efficient delta storage with O(1) retrieve and append |
|
5 |
and O(changes) merge between branches |
|
6 |
||
7 |
Copyright 2005 Matt Mackall <mpm@selenic.com> |
|
8 |
||
9 |
This software may be used and distributed according to the terms |
|
10 |
of the GNU General Public License, incorporated herein by reference. |
|
11 |
""" |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
12 |
|
1089 | 13 |
from node import * |
1322
b3d44e9b3092
Make revlog constructor more discerning in its treatment of errors.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1232
diff
changeset
|
14 |
from demandload import demandload |
1325
57220daf40e9
Move urllib error handling from revlog into statichttprepo, where it belongs.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1322
diff
changeset
|
15 |
demandload(globals(), "binascii errno heapq mdiff sha struct zlib") |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
16 |
|
1091
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
17 |
def hash(text, p1, p2): |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
18 |
"""generate a hash from the given text and its parent hashes |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
19 |
|
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
20 |
This hash combines both the current file contents and its history |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
21 |
in a manner that makes it easy to distinguish nodes with the same |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
22 |
content in the revision graph. |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
23 |
""" |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
24 |
l = [p1, p2] |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
25 |
l.sort() |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
26 |
s = sha.new(l[0]) |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
27 |
s.update(l[1]) |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
28 |
s.update(text) |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
29 |
return s.digest() |
d62130f99a73
Move hash function back to revlog from node
mpm@selenic.com
parents:
1089
diff
changeset
|
30 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
31 |
def compress(text): |
1083 | 32 |
""" generate a possibly-compressed representation of text """ |
112 | 33 |
if not text: return text |
34 |
if len(text) < 44: |
|
35 |
if text[0] == '\0': return text |
|
36 |
return 'u' + text |
|
37 |
bin = zlib.compress(text) |
|
38 |
if len(bin) > len(text): |
|
39 |
if text[0] == '\0': return text |
|
40 |
return 'u' + text |
|
41 |
return bin |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
42 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
43 |
def decompress(bin): |
1083 | 44 |
""" decompress the given input """ |
112 | 45 |
if not bin: return bin |
46 |
t = bin[0] |
|
47 |
if t == '\0': return bin |
|
48 |
if t == 'x': return zlib.decompress(bin) |
|
49 |
if t == 'u': return bin[1:] |
|
1073
7b35a980b982
[PATCH] raise exceptions with Exception subclasses
Bart Trojanowski <bart@jukie.net>
parents:
1062
diff
changeset
|
50 |
raise RevlogError("unknown compression type %s" % t) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
51 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
52 |
indexformat = ">4l20s20s20s" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
53 |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
54 |
class lazyparser: |
1083 | 55 |
""" |
56 |
this class avoids the need to parse the entirety of large indices |
|
57 |
||
58 |
By default we parse and load 1000 entries at a time. |
|
59 |
||
60 |
If no position is specified, we load the whole index, and replace |
|
61 |
the lazy objects in revlog with the underlying objects for |
|
62 |
efficiency in cases where we look at most of the nodes. |
|
63 |
""" |
|
323 | 64 |
def __init__(self, data, revlog): |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
65 |
self.data = data |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
66 |
self.s = struct.calcsize(indexformat) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
67 |
self.l = len(data)/self.s |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
68 |
self.index = [None] * self.l |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
69 |
self.map = {nullid: -1} |
323 | 70 |
self.all = 0 |
71 |
self.revlog = revlog |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
72 |
|
323 | 73 |
def load(self, pos=None): |
74 |
if self.all: return |
|
75 |
if pos is not None: |
|
76 |
block = pos / 1000 |
|
77 |
i = block * 1000 |
|
78 |
end = min(self.l, i + 1000) |
|
79 |
else: |
|
80 |
self.all = 1 |
|
81 |
i = 0 |
|
82 |
end = self.l |
|
83 |
self.revlog.index = self.index |
|
84 |
self.revlog.nodemap = self.map |
|
515 | 85 |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
86 |
while i < end: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
87 |
d = self.data[i * self.s: (i + 1) * self.s] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
88 |
e = struct.unpack(indexformat, d) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
89 |
self.index[i] = e |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
90 |
self.map[e[6]] = i |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
91 |
i += 1 |
515 | 92 |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
93 |
class lazyindex: |
1083 | 94 |
"""a lazy version of the index array""" |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
95 |
def __init__(self, parser): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
96 |
self.p = parser |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
97 |
def __len__(self): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
98 |
return len(self.p.index) |
115 | 99 |
def load(self, pos): |
100 |
self.p.load(pos) |
|
101 |
return self.p.index[pos] |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
102 |
def __getitem__(self, pos): |
115 | 103 |
return self.p.index[pos] or self.load(pos) |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
104 |
def append(self, e): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
105 |
self.p.index.append(e) |
515 | 106 |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
107 |
class lazymap: |
1083 | 108 |
"""a lazy version of the node map""" |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
109 |
def __init__(self, parser): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
110 |
self.p = parser |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
111 |
def load(self, key): |
323 | 112 |
if self.p.all: return |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
113 |
n = self.p.data.find(key) |
1214 | 114 |
if n < 0: |
115 |
raise KeyError(key) |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
116 |
pos = n / self.p.s |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
117 |
self.p.load(pos) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
118 |
def __contains__(self, key): |
323 | 119 |
self.p.load() |
120 |
return key in self.p.map |
|
97 | 121 |
def __iter__(self): |
469 | 122 |
yield nullid |
97 | 123 |
for i in xrange(self.p.l): |
124 |
try: |
|
125 |
yield self.p.index[i][6] |
|
126 |
except: |
|
127 |
self.p.load(i) |
|
128 |
yield self.p.index[i][6] |
|
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
129 |
def __getitem__(self, key): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
130 |
try: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
131 |
return self.p.map[key] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
132 |
except KeyError: |
86
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
133 |
try: |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
134 |
self.load(key) |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
135 |
return self.p.map[key] |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
136 |
except KeyError: |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
137 |
raise KeyError("node " + hex(key)) |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
138 |
def __setitem__(self, key, val): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
139 |
self.p.map[key] = val |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
140 |
|
1073
7b35a980b982
[PATCH] raise exceptions with Exception subclasses
Bart Trojanowski <bart@jukie.net>
parents:
1062
diff
changeset
|
141 |
class RevlogError(Exception): pass |
7b35a980b982
[PATCH] raise exceptions with Exception subclasses
Bart Trojanowski <bart@jukie.net>
parents:
1062
diff
changeset
|
142 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
143 |
class revlog: |
1083 | 144 |
""" |
145 |
the underlying revision storage object |
|
146 |
||
147 |
A revlog consists of two parts, an index and the revision data. |
|
148 |
||
149 |
The index is a file with a fixed record size containing |
|
150 |
information on each revision, includings its nodeid (hash), the |
|
151 |
nodeids of its parents, the position and offset of its data within |
|
152 |
the data file, and the revision it's based on. Finally, each entry |
|
153 |
contains a linkrev entry that can serve as a pointer to external |
|
154 |
data. |
|
155 |
||
156 |
The revision data itself is a linear collection of data chunks. |
|
157 |
Each chunk represents a revision and is usually represented as a |
|
158 |
delta against the previous chunk. To bound lookup time, runs of |
|
159 |
deltas are limited to about 2 times the length of the original |
|
160 |
version data. This makes retrieval of a version proportional to |
|
161 |
its size, or O(1) relative to the number of revisions. |
|
162 |
||
163 |
Both pieces of the revlog are written to in an append-only |
|
164 |
fashion, which means we never need to rewrite a file to insert or |
|
165 |
remove data, and can use some simple techniques to avoid the need |
|
166 |
for locking while reading. |
|
167 |
""" |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
168 |
def __init__(self, opener, indexfile, datafile): |
1083 | 169 |
""" |
170 |
create a revlog object |
|
171 |
||
172 |
opener is a function that abstracts the file opening operation |
|
173 |
and can be used to implement COW semantics or the like. |
|
174 |
""" |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
175 |
self.indexfile = indexfile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
176 |
self.datafile = datafile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
177 |
self.opener = opener |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
178 |
self.cache = None |
116
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
179 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
180 |
try: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
181 |
i = self.opener(self.indexfile).read() |
1322
b3d44e9b3092
Make revlog constructor more discerning in its treatment of errors.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1232
diff
changeset
|
182 |
except IOError, inst: |
b3d44e9b3092
Make revlog constructor more discerning in its treatment of errors.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1232
diff
changeset
|
183 |
if inst.errno != errno.ENOENT: |
b3d44e9b3092
Make revlog constructor more discerning in its treatment of errors.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1232
diff
changeset
|
184 |
raise |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
185 |
i = "" |
116
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
186 |
|
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
187 |
if len(i) > 10000: |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
188 |
# big index, let's parse it on demand |
323 | 189 |
parser = lazyparser(i, self) |
116
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
190 |
self.index = lazyindex(parser) |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
191 |
self.nodemap = lazymap(parser) |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
192 |
else: |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
193 |
s = struct.calcsize(indexformat) |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
194 |
l = len(i) / s |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
195 |
self.index = [None] * l |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
196 |
m = [None] * l |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
197 |
|
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
198 |
n = 0 |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
199 |
for f in xrange(0, len(i), s): |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
200 |
# offset, size, base, linkrev, p1, p2, nodeid |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
201 |
e = struct.unpack(indexformat, i[f:f + s]) |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
202 |
m[n] = (e[6], n) |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
203 |
self.index[n] = e |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
204 |
n += 1 |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
205 |
|
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
206 |
self.nodemap = dict(m) |
e484cd5ec282
Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents:
115
diff
changeset
|
207 |
self.nodemap[nullid] = -1 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
208 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
209 |
def tip(self): return self.node(len(self.index) - 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
210 |
def count(self): return len(self.index) |
26 | 211 |
def node(self, rev): return (rev < 0) and nullid or self.index[rev][6] |
1201
59bfbdbc38f6
revlog: raise informative exception if file is missing.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1099
diff
changeset
|
212 |
def rev(self, node): |
59bfbdbc38f6
revlog: raise informative exception if file is missing.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1099
diff
changeset
|
213 |
try: |
59bfbdbc38f6
revlog: raise informative exception if file is missing.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1099
diff
changeset
|
214 |
return self.nodemap[node] |
59bfbdbc38f6
revlog: raise informative exception if file is missing.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1099
diff
changeset
|
215 |
except KeyError: |
1214 | 216 |
raise RevlogError('%s: no node %s' % (self.indexfile, hex(node))) |
1201
59bfbdbc38f6
revlog: raise informative exception if file is missing.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1099
diff
changeset
|
217 |
def linkrev(self, node): return self.index[self.rev(node)][3] |
2 | 218 |
def parents(self, node): |
219 |
if node == nullid: return (nullid, nullid) |
|
1201
59bfbdbc38f6
revlog: raise informative exception if file is missing.
Bryan O'Sullivan <bos@serpentine.com>
parents:
1099
diff
changeset
|
220 |
return self.index[self.rev(node)][4:6] |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
221 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
222 |
def start(self, rev): return self.index[rev][0] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
223 |
def length(self, rev): return self.index[rev][1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
224 |
def end(self, rev): return self.start(rev) + self.length(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
225 |
def base(self, rev): return self.index[rev][2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
226 |
|
1074
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
227 |
def reachable(self, rev, stop=None): |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
228 |
reachable = {} |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
229 |
visit = [rev] |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
230 |
reachable[rev] = 1 |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
231 |
if stop: |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
232 |
stopn = self.rev(stop) |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
233 |
else: |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
234 |
stopn = 0 |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
235 |
while visit: |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
236 |
n = visit.pop(0) |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
237 |
if n == stop: |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
238 |
continue |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
239 |
if n == nullid: |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
240 |
continue |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
241 |
for p in self.parents(n): |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
242 |
if self.rev(p) < stopn: |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
243 |
continue |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
244 |
if p not in reachable: |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
245 |
reachable[p] = 1 |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
246 |
visit.append(p) |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
247 |
return reachable |
55bf5cfde69e
Add revlog.reachable to find a graph of ancestors for a given rev
mason@suse.com
parents:
1073
diff
changeset
|
248 |
|
902 | 249 |
def heads(self, stop=None): |
1083 | 250 |
"""return the list of all nodes that have no children""" |
221 | 251 |
p = {} |
252 |
h = [] |
|
902 | 253 |
stoprev = 0 |
254 |
if stop and stop in self.nodemap: |
|
255 |
stoprev = self.rev(stop) |
|
1083 | 256 |
|
243 | 257 |
for r in range(self.count() - 1, -1, -1): |
221 | 258 |
n = self.node(r) |
259 |
if n not in p: |
|
260 |
h.append(n) |
|
902 | 261 |
if n == stop: |
262 |
break |
|
263 |
if r < stoprev: |
|
264 |
break |
|
221 | 265 |
for pn in self.parents(n): |
266 |
p[pn] = 1 |
|
267 |
return h |
|
370 | 268 |
|
269 |
def children(self, node): |
|
1083 | 270 |
"""find the children of a given node""" |
370 | 271 |
c = [] |
272 |
p = self.rev(node) |
|
273 |
for r in range(p + 1, self.count()): |
|
274 |
n = self.node(r) |
|
275 |
for pn in self.parents(n): |
|
854
473c030d34a6
Fixed revlog.children.
Tristan Wibberley <tristan@wibberley.org>
parents:
655
diff
changeset
|
276 |
if pn == node: |
473c030d34a6
Fixed revlog.children.
Tristan Wibberley <tristan@wibberley.org>
parents:
655
diff
changeset
|
277 |
c.append(n) |
370 | 278 |
continue |
279 |
elif pn == nullid: |
|
280 |
continue |
|
281 |
return c |
|
515 | 282 |
|
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
283 |
def lookup(self, id): |
1083 | 284 |
"""locate a node based on revision number or subset of hex nodeid""" |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
285 |
try: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
286 |
rev = int(id) |
469 | 287 |
if str(rev) != id: raise ValueError |
288 |
if rev < 0: rev = self.count() + rev |
|
476
0a338d506268
Really _call_ method revlog.count in revlog.lookup()
Thomas Arendsen Hein <thomas@intevation.de>
parents:
469
diff
changeset
|
289 |
if rev < 0 or rev >= self.count(): raise ValueError |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
290 |
return self.node(rev) |
469 | 291 |
except (ValueError, OverflowError): |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
292 |
c = [] |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
293 |
for n in self.nodemap: |
469 | 294 |
if hex(n).startswith(id): |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
295 |
c.append(n) |
1232
eb3cc5e2eb89
Revert some exception type changes in revlog
mpm@selenic.com
parents:
1218
diff
changeset
|
296 |
if len(c) > 1: raise KeyError("Ambiguous identifier") |
eb3cc5e2eb89
Revert some exception type changes in revlog
mpm@selenic.com
parents:
1218
diff
changeset
|
297 |
if len(c) < 1: raise KeyError("No match found") |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
298 |
return c[0] |
515 | 299 |
|
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
300 |
return None |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
301 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
302 |
def diff(self, a, b): |
1083 | 303 |
"""return a delta between two revisions""" |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
304 |
return mdiff.textdiff(a, b) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
305 |
|
73 | 306 |
def patches(self, t, pl): |
1083 | 307 |
"""apply a list of patches to a string""" |
73 | 308 |
return mdiff.patches(t, pl) |
309 |
||
119
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
310 |
def delta(self, node): |
1083 | 311 |
"""return or calculate a delta between a node and its predecessor""" |
119
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
312 |
r = self.rev(node) |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
313 |
b = self.base(r) |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
314 |
if r == b: |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
315 |
return self.diff(self.revision(self.node(r - 1)), |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
316 |
self.revision(node)) |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
317 |
else: |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
318 |
f = self.opener(self.datafile) |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
319 |
f.seek(self.start(r)) |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
320 |
data = f.read(self.length(r)) |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
321 |
return decompress(data) |
c7a66f9752a4
Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents:
117
diff
changeset
|
322 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
323 |
def revision(self, node): |
1083 | 324 |
"""return an uncompressed revision of a given""" |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
325 |
if node == nullid: return "" |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
326 |
if self.cache and self.cache[0] == node: return self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
327 |
|
1083 | 328 |
# look up what we need to read |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
329 |
text = None |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
330 |
rev = self.rev(node) |
117 | 331 |
start, length, base, link, p1, p2, node = self.index[rev] |
332 |
end = start + length |
|
333 |
if base != rev: start = self.start(base) |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
334 |
|
1083 | 335 |
# do we have useful data cached? |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
336 |
if self.cache and self.cache[1] >= base and self.cache[1] < rev: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
337 |
base = self.cache[1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
338 |
start = self.start(base + 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
339 |
text = self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
340 |
last = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
341 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
342 |
f = self.opener(self.datafile) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
343 |
f.seek(start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
344 |
data = f.read(end - start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
345 |
|
651 | 346 |
if text is None: |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
347 |
last = self.length(base) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
348 |
text = decompress(data[:last]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
349 |
|
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
350 |
bins = [] |
64 | 351 |
for r in xrange(base + 1, rev + 1): |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
352 |
s = self.length(r) |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
353 |
bins.append(decompress(data[last:last + s])) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
354 |
last = last + s |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
355 |
|
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
356 |
text = mdiff.patches(text, bins) |
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
357 |
|
26 | 358 |
if node != hash(text, p1, p2): |
1214 | 359 |
raise RevlogError("integrity check failed on %s:%d" |
98 | 360 |
% (self.datafile, rev)) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
361 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
362 |
self.cache = (node, rev, text) |
515 | 363 |
return text |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
364 |
|
644 | 365 |
def addrevision(self, text, transaction, link, p1=None, p2=None, d=None): |
1083 | 366 |
"""add a revision to the log |
367 |
||
368 |
text - the revision data to add |
|
369 |
transaction - the transaction object used for rollback |
|
370 |
link - the linkrev data to add |
|
371 |
p1, p2 - the parent nodeids of the revision |
|
372 |
d - an optional precomputed delta |
|
373 |
""" |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
374 |
if text is None: text = "" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
375 |
if p1 is None: p1 = self.tip() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
376 |
if p2 is None: p2 = nullid |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
377 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
378 |
node = hash(text, p1, p2) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
379 |
|
301 | 380 |
if node in self.nodemap: |
381 |
return node |
|
382 |
||
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
383 |
n = self.count() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
384 |
t = n - 1 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
385 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
386 |
if n: |
64 | 387 |
base = self.base(t) |
388 |
start = self.start(base) |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
389 |
end = self.end(t) |
644 | 390 |
if not d: |
391 |
prev = self.revision(self.tip()) |
|
392 |
d = self.diff(prev, text) |
|
98 | 393 |
data = compress(d) |
64 | 394 |
dist = end - start + len(data) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
395 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
396 |
# full versions are inserted when the needed deltas |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
397 |
# become comparable to the uncompressed text |
64 | 398 |
if not n or dist > len(text) * 2: |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
399 |
data = compress(text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
400 |
base = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
401 |
else: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
402 |
base = self.base(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
403 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
404 |
offset = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
405 |
if t >= 0: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
406 |
offset = self.end(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
407 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
408 |
e = (offset, len(data), base, link, p1, p2, node) |
515 | 409 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
410 |
self.index.append(e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
411 |
self.nodemap[node] = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
412 |
entry = struct.pack(indexformat, *e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
413 |
|
26 | 414 |
transaction.add(self.datafile, e[0]) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
415 |
self.opener(self.datafile, "a").write(data) |
41 | 416 |
transaction.add(self.indexfile, n * len(entry)) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
417 |
self.opener(self.indexfile, "a").write(entry) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
418 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
419 |
self.cache = (node, n, text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
420 |
return node |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
421 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
422 |
def ancestor(self, a, b): |
1083 | 423 |
"""calculate the least common ancestor of nodes a and b""" |
147 | 424 |
# calculate the distance of every node from root |
425 |
dist = {nullid: 0} |
|
426 |
for i in xrange(self.count()): |
|
427 |
n = self.node(i) |
|
428 |
p1, p2 = self.parents(n) |
|
429 |
dist[n] = max(dist[p1], dist[p2]) + 1 |
|
515 | 430 |
|
147 | 431 |
# traverse ancestors in order of decreasing distance from root |
432 |
def ancestors(node): |
|
433 |
# we store negative distances because heap returns smallest member |
|
434 |
h = [(-dist[node], node)] |
|
435 |
seen = {} |
|
436 |
earliest = self.count() |
|
437 |
while h: |
|
438 |
d, n = heapq.heappop(h) |
|
439 |
if n not in seen: |
|
440 |
seen[n] = 1 |
|
381 | 441 |
r = self.rev(n) |
1351
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
442 |
yield (-d, n) |
147 | 443 |
for p in self.parents(n): |
444 |
heapq.heappush(h, (-dist[p], p)) |
|
45
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
445 |
|
1351
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
446 |
def generations(node): |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
447 |
sg, s = None, {} |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
448 |
for g,n in ancestors(node): |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
449 |
if g != sg: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
450 |
if sg: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
451 |
yield sg, s |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
452 |
sg, s = g, {n:1} |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
453 |
else: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
454 |
s[n] = 1 |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
455 |
yield sg, s |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
456 |
|
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
457 |
x = generations(a) |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
458 |
y = generations(b) |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
459 |
gx = x.next() |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
460 |
gy = y.next() |
45
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
461 |
|
147 | 462 |
# increment each ancestor list until it is closer to root than |
463 |
# the other, or they match |
|
464 |
while 1: |
|
1351
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
465 |
#print "ancestor gen %s %s" % (gx[0], gy[0]) |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
466 |
if gx[0] == gy[0]: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
467 |
# find the intersection |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
468 |
i = [ n for n in gx[1] if n in gy[1] ] |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
469 |
if i: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
470 |
return i[0] |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
471 |
else: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
472 |
#print "next" |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
473 |
gy = y.next() |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
474 |
gx = x.next() |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
475 |
elif gx[0] < gy[0]: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
476 |
#print "next y" |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
477 |
gy = y.next() |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
478 |
else: |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
479 |
#print "next x" |
0e2be889ccd7
Repair ancestor logic, fix up test cases
Matt Mackall <mpm@selenic.com>
parents:
1325
diff
changeset
|
480 |
gx = x.next() |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
481 |
|
46 | 482 |
def group(self, linkmap): |
1083 | 483 |
"""calculate a delta group |
46 | 484 |
|
1083 | 485 |
Given a list of changeset revs, return a set of deltas and |
486 |
metadata corresponding to nodes. the first delta is |
|
487 |
parent(nodes[0]) -> nodes[0] the receiver is guaranteed to |
|
488 |
have this parent as it has all history before these |
|
489 |
changesets. parent is parent[0] |
|
490 |
""" |
|
46 | 491 |
revs = [] |
492 |
needed = {} |
|
493 |
||
494 |
# find file nodes/revs that match changeset revs |
|
495 |
for i in xrange(0, self.count()): |
|
496 |
if self.index[i][3] in linkmap: |
|
497 |
revs.append(i) |
|
498 |
needed[i] = 1 |
|
499 |
||
500 |
# if we don't have any revisions touched by these changesets, bail |
|
192 | 501 |
if not revs: |
502 |
yield struct.pack(">l", 0) |
|
503 |
return |
|
46 | 504 |
|
505 |
# add the parent of the first rev |
|
506 |
p = self.parents(self.node(revs[0]))[0] |
|
507 |
revs.insert(0, self.rev(p)) |
|
508 |
||
509 |
# for each delta that isn't contiguous in the log, we need to |
|
510 |
# reconstruct the base, reconstruct the result, and then |
|
511 |
# calculate the delta. We also need to do this where we've |
|
512 |
# stored a full version and not a delta |
|
513 |
for i in xrange(0, len(revs) - 1): |
|
514 |
a, b = revs[i], revs[i + 1] |
|
515 |
if a + 1 != b or self.base(b) == b: |
|
516 |
for j in xrange(self.base(a), a + 1): |
|
517 |
needed[j] = 1 |
|
518 |
for j in xrange(self.base(b), b + 1): |
|
519 |
needed[j] = 1 |
|
520 |
||
521 |
# calculate spans to retrieve from datafile |
|
522 |
needed = needed.keys() |
|
523 |
needed.sort() |
|
524 |
spans = [] |
|
192 | 525 |
oo = -1 |
526 |
ol = 0 |
|
46 | 527 |
for n in needed: |
528 |
if n < 0: continue |
|
529 |
o = self.start(n) |
|
530 |
l = self.length(n) |
|
192 | 531 |
if oo + ol == o: # can we merge with the previous? |
532 |
nl = spans[-1][2] |
|
533 |
nl.append((n, l)) |
|
534 |
ol += l |
|
535 |
spans[-1] = (oo, ol, nl) |
|
46 | 536 |
else: |
192 | 537 |
oo = o |
538 |
ol = l |
|
539 |
spans.append((oo, ol, [(n, l)])) |
|
46 | 540 |
|
541 |
# read spans in, divide up chunks |
|
542 |
chunks = {} |
|
192 | 543 |
for span in spans: |
46 | 544 |
# we reopen the file for each span to make http happy for now |
545 |
f = self.opener(self.datafile) |
|
546 |
f.seek(span[0]) |
|
547 |
data = f.read(span[1]) |
|
548 |
||
549 |
# divide up the span |
|
550 |
pos = 0 |
|
551 |
for r, l in span[2]: |
|
192 | 552 |
chunks[r] = decompress(data[pos: pos + l]) |
46 | 553 |
pos += l |
554 |
||
555 |
# helper to reconstruct intermediate versions |
|
556 |
def construct(text, base, rev): |
|
192 | 557 |
bins = [chunks[r] for r in xrange(base + 1, rev + 1)] |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
558 |
return mdiff.patches(text, bins) |
46 | 559 |
|
560 |
# build deltas |
|
561 |
deltas = [] |
|
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
562 |
for d in xrange(0, len(revs) - 1): |
46 | 563 |
a, b = revs[d], revs[d + 1] |
564 |
n = self.node(b) |
|
192 | 565 |
|
566 |
# do we need to construct a new delta? |
|
46 | 567 |
if a + 1 != b or self.base(b) == b: |
568 |
if a >= 0: |
|
569 |
base = self.base(a) |
|
192 | 570 |
ta = chunks[self.base(a)] |
46 | 571 |
ta = construct(ta, base, a) |
572 |
else: |
|
573 |
ta = "" |
|
515 | 574 |
|
46 | 575 |
base = self.base(b) |
576 |
if a > base: |
|
577 |
base = a |
|
578 |
tb = ta |
|
579 |
else: |
|
192 | 580 |
tb = chunks[self.base(b)] |
46 | 581 |
tb = construct(tb, base, b) |
582 |
d = self.diff(ta, tb) |
|
583 |
else: |
|
192 | 584 |
d = chunks[b] |
46 | 585 |
|
586 |
p = self.parents(n) |
|
587 |
meta = n + p[0] + p[1] + linkmap[self.linkrev(n)] |
|
588 |
l = struct.pack(">l", len(meta) + len(d) + 4) |
|
192 | 589 |
yield l |
590 |
yield meta |
|
591 |
yield d |
|
46 | 592 |
|
192 | 593 |
yield struct.pack(">l", 0) |
594 |
||
1062 | 595 |
def addgroup(self, revs, linkmapper, transaction, unique=0): |
1083 | 596 |
""" |
597 |
add a delta group |
|
46 | 598 |
|
1083 | 599 |
given a set of deltas, add them to the revision log. the |
600 |
first delta is against its parent, which should be in our |
|
601 |
log, the rest are against the previous delta. |
|
602 |
""" |
|
603 |
||
604 |
#track the base of the current delta log |
|
46 | 605 |
r = self.count() |
606 |
t = r - 1 |
|
192 | 607 |
node = nullid |
515 | 608 |
|
655 | 609 |
base = prev = -1 |
653
94cdd02792b5
Fix corruption resulting from skipping parts of a revision group
Matt Mackall <mpm@selenic.com>
parents:
651
diff
changeset
|
610 |
start = end = measure = 0 |
46 | 611 |
if r: |
612 |
start = self.start(self.base(t)) |
|
613 |
end = self.end(t) |
|
614 |
measure = self.length(self.base(t)) |
|
615 |
base = self.base(t) |
|
616 |
prev = self.tip() |
|
617 |
||
618 |
transaction.add(self.datafile, end) |
|
619 |
transaction.add(self.indexfile, r * struct.calcsize(indexformat)) |
|
620 |
dfh = self.opener(self.datafile, "a") |
|
621 |
ifh = self.opener(self.indexfile, "a") |
|
622 |
||
623 |
# loop through our set of deltas |
|
192 | 624 |
chain = None |
625 |
for chunk in revs: |
|
626 |
node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80]) |
|
94 | 627 |
link = linkmapper(cs) |
77 | 628 |
if node in self.nodemap: |
224
ccbcc4d76f81
fix bad assumption about uniqueness of file versions
mpm@selenic.com
parents:
221
diff
changeset
|
629 |
# this can happen if two branches make the same change |
1218
cde6818e082a
Add preliminary support for the bundle and unbundle commands
mpm@selenic.com
parents:
1214
diff
changeset
|
630 |
# if unique: |
cde6818e082a
Add preliminary support for the bundle and unbundle commands
mpm@selenic.com
parents:
1214
diff
changeset
|
631 |
# raise RevlogError("already have %s" % hex(node[:4])) |
653
94cdd02792b5
Fix corruption resulting from skipping parts of a revision group
Matt Mackall <mpm@selenic.com>
parents:
651
diff
changeset
|
632 |
chain = node |
224
ccbcc4d76f81
fix bad assumption about uniqueness of file versions
mpm@selenic.com
parents:
221
diff
changeset
|
633 |
continue |
192 | 634 |
delta = chunk[80:] |
635 |
||
636 |
if not chain: |
|
637 |
# retrieve the parent revision of the delta chain |
|
638 |
chain = p1 |
|
639 |
if not chain in self.nodemap: |
|
1073
7b35a980b982
[PATCH] raise exceptions with Exception subclasses
Bart Trojanowski <bart@jukie.net>
parents:
1062
diff
changeset
|
640 |
raise RevlogError("unknown base %s" % short(chain[:4])) |
46 | 641 |
|
642 |
# full versions are inserted when the needed deltas become |
|
643 |
# comparable to the uncompressed text or when the previous |
|
644 |
# version is not the one we have a delta against. We use |
|
645 |
# the size of the previous full rev as a proxy for the |
|
646 |
# current size. |
|
647 |
||
648 |
if chain == prev: |
|
649 |
cdelta = compress(delta) |
|
650 |
||
651 |
if chain != prev or (end - start + len(cdelta)) > measure * 2: |
|
652 |
# flush our writes here so we can read it in revision |
|
653 |
dfh.flush() |
|
654 |
ifh.flush() |
|
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
655 |
text = self.revision(chain) |
73 | 656 |
text = self.patches(text, [delta]) |
46 | 657 |
chk = self.addrevision(text, transaction, link, p1, p2) |
658 |
if chk != node: |
|
1073
7b35a980b982
[PATCH] raise exceptions with Exception subclasses
Bart Trojanowski <bart@jukie.net>
parents:
1062
diff
changeset
|
659 |
raise RevlogError("consistency error adding group") |
46 | 660 |
measure = len(text) |
661 |
else: |
|
662 |
e = (end, len(cdelta), self.base(t), link, p1, p2, node) |
|
663 |
self.index.append(e) |
|
664 |
self.nodemap[node] = r |
|
665 |
dfh.write(cdelta) |
|
666 |
ifh.write(struct.pack(indexformat, *e)) |
|
667 |
||
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
668 |
t, r, chain, prev = r, r + 1, node, node |
46 | 669 |
start = self.start(self.base(t)) |
670 |
end = self.end(t) |
|
671 |
||
672 |
dfh.close() |
|
673 |
ifh.close() |
|
674 |
return node |