author | mpm@selenic.com |
Wed, 11 May 2005 14:06:34 -0800 | |
changeset 50 | 0263ce8be574 |
parent 46 | 93e868fa0db8 |
child 64 | b3e2ddff0159 |
permissions | -rw-r--r-- |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
1 |
# revlog.py - storage back-end for mercurial |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
2 |
# |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
3 |
# This provides efficient delta storage with O(1) retrieve and append |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
4 |
# and O(changes) merge between branches |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
5 |
# |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
6 |
# Copyright 2005 Matt Mackall <mpm@selenic.com> |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
7 |
# |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
8 |
# This software may be used and distributed according to the terms |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
9 |
# of the GNU General Public License, incorporated herein by reference. |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
10 |
|
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
11 |
import zlib, struct, sha, os, tempfile, binascii |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
12 |
from mercurial import mdiff |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
13 |
|
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
14 |
def hex(node): return binascii.hexlify(node) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
15 |
def bin(node): return binascii.unhexlify(node) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
16 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
17 |
def compress(text): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
18 |
return zlib.compress(text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
19 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
20 |
def decompress(bin): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
21 |
return zlib.decompress(bin) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
22 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
23 |
def hash(text, p1, p2): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
24 |
l = [p1, p2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
25 |
l.sort() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
26 |
return sha.sha(l[0] + l[1] + text).digest() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
27 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
28 |
nullid = "\0" * 20 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
29 |
indexformat = ">4l20s20s20s" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
30 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
31 |
class revlog: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
32 |
def __init__(self, opener, indexfile, datafile): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
33 |
self.indexfile = indexfile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
34 |
self.datafile = datafile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
35 |
self.index = [] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
36 |
self.opener = opener |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
37 |
self.cache = None |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
38 |
self.nodemap = {nullid: -1} |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
39 |
# read the whole index for now, handle on-demand later |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
40 |
try: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
41 |
n = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
42 |
i = self.opener(self.indexfile).read() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
43 |
s = struct.calcsize(indexformat) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
44 |
for f in range(0, len(i), s): |
26 | 45 |
# offset, size, base, linkrev, p1, p2, nodeid |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
46 |
e = struct.unpack(indexformat, i[f:f + s]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
47 |
self.nodemap[e[6]] = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
48 |
self.index.append(e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
49 |
n += 1 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
50 |
except IOError: pass |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
51 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
52 |
def tip(self): return self.node(len(self.index) - 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
53 |
def count(self): return len(self.index) |
26 | 54 |
def node(self, rev): return (rev < 0) and nullid or self.index[rev][6] |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
55 |
def rev(self, node): return self.nodemap[node] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
56 |
def linkrev(self, node): return self.index[self.nodemap[node]][3] |
2 | 57 |
def parents(self, node): |
58 |
if node == nullid: return (nullid, nullid) |
|
59 |
return self.index[self.nodemap[node]][4:6] |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
60 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
61 |
def start(self, rev): return self.index[rev][0] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
62 |
def length(self, rev): return self.index[rev][1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
63 |
def end(self, rev): return self.start(rev) + self.length(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
64 |
def base(self, rev): return self.index[rev][2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
65 |
|
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
66 |
def lookup(self, id): |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
67 |
try: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
68 |
rev = int(id) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
69 |
return self.node(rev) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
70 |
except ValueError: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
71 |
c = [] |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
72 |
for n in self.nodemap: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
73 |
if id in hex(n): |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
74 |
c.append(n) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
75 |
if len(c) > 1: raise KeyError("Ambiguous identifier") |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
76 |
if len(c) < 1: raise KeyError |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
77 |
return c[0] |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
78 |
|
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
79 |
return None |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
80 |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
81 |
def revisions(self, list): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
82 |
# this can be optimized to do spans, etc |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
83 |
# be stupid for now |
14
e0e5c1b9febd
Fix off-by-one truncation in transaction rollback.
mpm@selenic.com
parents:
2
diff
changeset
|
84 |
for node in list: |
e0e5c1b9febd
Fix off-by-one truncation in transaction rollback.
mpm@selenic.com
parents:
2
diff
changeset
|
85 |
yield self.revision(node) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
86 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
87 |
def diff(self, a, b): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
88 |
return mdiff.textdiff(a, b) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
89 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
90 |
def patch(self, text, patch): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
91 |
return mdiff.patch(text, patch) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
92 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
93 |
def revision(self, node): |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
94 |
if node == nullid: return "" |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
95 |
if self.cache and self.cache[0] == node: return self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
96 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
97 |
text = None |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
98 |
rev = self.rev(node) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
99 |
base = self.base(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
100 |
start = self.start(base) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
101 |
end = self.end(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
102 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
103 |
if self.cache and self.cache[1] >= base and self.cache[1] < rev: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
104 |
base = self.cache[1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
105 |
start = self.start(base + 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
106 |
text = self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
107 |
last = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
108 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
109 |
f = self.opener(self.datafile) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
110 |
f.seek(start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
111 |
data = f.read(end - start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
112 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
113 |
if not text: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
114 |
last = self.length(base) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
115 |
text = decompress(data[:last]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
116 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
117 |
for r in range(base + 1, rev + 1): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
118 |
s = self.length(r) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
119 |
b = decompress(data[last:last + s]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
120 |
text = self.patch(text, b) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
121 |
last = last + s |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
122 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
123 |
(p1, p2) = self.parents(node) |
26 | 124 |
if node != hash(text, p1, p2): |
125 |
raise "integrity check failed on %s:%d" % (self.datafile, rev) |
|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
126 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
127 |
self.cache = (node, rev, text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
128 |
return text |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
129 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
130 |
def addrevision(self, text, transaction, link, p1=None, p2=None): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
131 |
if text is None: text = "" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
132 |
if p1 is None: p1 = self.tip() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
133 |
if p2 is None: p2 = nullid |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
134 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
135 |
node = hash(text, p1, p2) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
136 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
137 |
n = self.count() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
138 |
t = n - 1 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
139 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
140 |
if n: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
141 |
start = self.start(self.base(t)) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
142 |
end = self.end(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
143 |
prev = self.revision(self.tip()) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
144 |
data = compress(self.diff(prev, text)) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
145 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
146 |
# full versions are inserted when the needed deltas |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
147 |
# become comparable to the uncompressed text |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
148 |
if not n or (end + len(data) - start) > len(text) * 2: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
149 |
data = compress(text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
150 |
base = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
151 |
else: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
152 |
base = self.base(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
153 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
154 |
offset = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
155 |
if t >= 0: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
156 |
offset = self.end(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
157 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
158 |
e = (offset, len(data), base, link, p1, p2, node) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
159 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
160 |
self.index.append(e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
161 |
self.nodemap[node] = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
162 |
entry = struct.pack(indexformat, *e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
163 |
|
26 | 164 |
transaction.add(self.datafile, e[0]) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
165 |
self.opener(self.datafile, "a").write(data) |
41 | 166 |
transaction.add(self.indexfile, n * len(entry)) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
167 |
self.opener(self.indexfile, "a").write(entry) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
168 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
169 |
self.cache = (node, n, text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
170 |
return node |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
171 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
172 |
def ancestor(self, a, b): |
45
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
173 |
def expand(list, map): |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
174 |
a = [] |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
175 |
while list: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
176 |
n = list.pop(0) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
177 |
map[n] = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
178 |
yield n |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
179 |
for p in self.parents(n): |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
180 |
if p != nullid and p not in map: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
181 |
list.append(p) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
182 |
yield nullid |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
183 |
|
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
184 |
amap = {} |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
185 |
bmap = {} |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
186 |
ag = expand([a], amap) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
187 |
bg = expand([b], bmap) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
188 |
adone = bdone = 0 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
189 |
|
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
190 |
while not adone or not bdone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
191 |
if not adone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
192 |
an = ag.next() |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
193 |
if an == nullid: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
194 |
adone = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
195 |
elif an in bmap: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
196 |
return an |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
197 |
if not bdone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
198 |
bn = bg.next() |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
199 |
if bn == nullid: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
200 |
bdone = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
201 |
elif bn in amap: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
202 |
return bn |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
203 |
|
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
204 |
return nullid |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
205 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
206 |
def mergedag(self, other, transaction, linkseq, accumulate = None): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
207 |
"""combine the nodes from other's DAG into ours""" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
208 |
old = self.tip() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
209 |
i = self.count() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
210 |
l = [] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
211 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
212 |
# merge the other revision log into our DAG |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
213 |
for r in range(other.count()): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
214 |
id = other.node(r) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
215 |
if id not in self.nodemap: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
216 |
(xn, yn) = other.parents(id) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
217 |
l.append((id, xn, yn)) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
218 |
self.nodemap[id] = i |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
219 |
i += 1 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
220 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
221 |
# merge node date for new nodes |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
222 |
r = other.revisions([e[0] for e in l]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
223 |
for e in l: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
224 |
t = r.next() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
225 |
if accumulate: accumulate(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
226 |
self.addrevision(t, transaction, linkseq.next(), e[1], e[2]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
227 |
|
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
228 |
# return the unmerged heads for later resolving |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
229 |
return (old, self.tip()) |
46 | 230 |
|
231 |
def group(self, linkmap): |
|
232 |
# given a list of changeset revs, return a set of deltas and |
|
233 |
# metadata corresponding to nodes the first delta is |
|
234 |
# parent(nodes[0]) -> nodes[0] the receiver is guaranteed to |
|
235 |
# have this parent as it has all history before these |
|
236 |
# changesets. parent is parent[0] |
|
237 |
||
238 |
revs = [] |
|
239 |
needed = {} |
|
240 |
||
241 |
# find file nodes/revs that match changeset revs |
|
242 |
for i in xrange(0, self.count()): |
|
243 |
if self.index[i][3] in linkmap: |
|
244 |
revs.append(i) |
|
245 |
needed[i] = 1 |
|
246 |
||
247 |
# if we don't have any revisions touched by these changesets, bail |
|
248 |
if not revs: return struct.pack(">l", 0) |
|
249 |
||
250 |
# add the parent of the first rev |
|
251 |
p = self.parents(self.node(revs[0]))[0] |
|
252 |
revs.insert(0, self.rev(p)) |
|
253 |
||
254 |
# for each delta that isn't contiguous in the log, we need to |
|
255 |
# reconstruct the base, reconstruct the result, and then |
|
256 |
# calculate the delta. We also need to do this where we've |
|
257 |
# stored a full version and not a delta |
|
258 |
for i in xrange(0, len(revs) - 1): |
|
259 |
a, b = revs[i], revs[i + 1] |
|
260 |
if a + 1 != b or self.base(b) == b: |
|
261 |
for j in xrange(self.base(a), a + 1): |
|
262 |
needed[j] = 1 |
|
263 |
for j in xrange(self.base(b), b + 1): |
|
264 |
needed[j] = 1 |
|
265 |
||
266 |
# calculate spans to retrieve from datafile |
|
267 |
needed = needed.keys() |
|
268 |
needed.sort() |
|
269 |
spans = [] |
|
270 |
for n in needed: |
|
271 |
if n < 0: continue |
|
272 |
o = self.start(n) |
|
273 |
l = self.length(n) |
|
274 |
spans.append((o, l, [(n, l)])) |
|
275 |
||
276 |
# merge spans |
|
277 |
merge = [spans.pop(0)] |
|
278 |
while spans: |
|
279 |
e = spans.pop(0) |
|
280 |
f = merge[-1] |
|
281 |
if e[0] == f[0] + f[1]: |
|
282 |
merge[-1] = (f[0], f[1] + e[1], f[2] + e[2]) |
|
283 |
else: |
|
284 |
merge.append(e) |
|
285 |
||
286 |
# read spans in, divide up chunks |
|
287 |
chunks = {} |
|
288 |
for span in merge: |
|
289 |
# we reopen the file for each span to make http happy for now |
|
290 |
f = self.opener(self.datafile) |
|
291 |
f.seek(span[0]) |
|
292 |
data = f.read(span[1]) |
|
293 |
||
294 |
# divide up the span |
|
295 |
pos = 0 |
|
296 |
for r, l in span[2]: |
|
297 |
chunks[r] = data[pos: pos + l] |
|
298 |
pos += l |
|
299 |
||
300 |
# helper to reconstruct intermediate versions |
|
301 |
def construct(text, base, rev): |
|
302 |
for r in range(base + 1, rev + 1): |
|
303 |
b = decompress(chunks[r]) |
|
304 |
text = self.patch(text, b) |
|
305 |
return text |
|
306 |
||
307 |
# build deltas |
|
308 |
deltas = [] |
|
309 |
for d in range(0, len(revs) - 1): |
|
310 |
a, b = revs[d], revs[d + 1] |
|
311 |
n = self.node(b) |
|
312 |
||
313 |
if a + 1 != b or self.base(b) == b: |
|
314 |
if a >= 0: |
|
315 |
base = self.base(a) |
|
316 |
ta = decompress(chunks[self.base(a)]) |
|
317 |
ta = construct(ta, base, a) |
|
318 |
else: |
|
319 |
ta = "" |
|
320 |
||
321 |
base = self.base(b) |
|
322 |
if a > base: |
|
323 |
base = a |
|
324 |
tb = ta |
|
325 |
else: |
|
326 |
tb = decompress(chunks[self.base(b)]) |
|
327 |
tb = construct(tb, base, b) |
|
328 |
d = self.diff(ta, tb) |
|
329 |
else: |
|
330 |
d = decompress(chunks[b]) |
|
331 |
||
332 |
p = self.parents(n) |
|
333 |
meta = n + p[0] + p[1] + linkmap[self.linkrev(n)] |
|
334 |
l = struct.pack(">l", len(meta) + len(d) + 4) |
|
335 |
deltas.append(l + meta + d) |
|
336 |
||
337 |
l = struct.pack(">l", sum(map(len, deltas)) + 4) |
|
338 |
deltas.insert(0, l) |
|
339 |
return "".join(deltas) |
|
340 |
||
341 |
def addgroup(self, data, linkmapper, transaction): |
|
342 |
# given a set of deltas, add them to the revision log. the |
|
343 |
# first delta is against its parent, which should be in our |
|
344 |
# log, the rest are against the previous delta. |
|
345 |
||
346 |
if len(data) <= 4: return |
|
347 |
||
348 |
# retrieve the parent revision of the delta chain |
|
349 |
chain = data[28:48] |
|
350 |
text = self.revision(chain) |
|
351 |
||
352 |
# track the base of the current delta log |
|
353 |
r = self.count() |
|
354 |
t = r - 1 |
|
355 |
||
356 |
base = prev = -1 |
|
357 |
start = end = 0 |
|
358 |
if r: |
|
359 |
start = self.start(self.base(t)) |
|
360 |
end = self.end(t) |
|
361 |
measure = self.length(self.base(t)) |
|
362 |
base = self.base(t) |
|
363 |
prev = self.tip() |
|
364 |
||
365 |
transaction.add(self.datafile, end) |
|
366 |
transaction.add(self.indexfile, r * struct.calcsize(indexformat)) |
|
367 |
dfh = self.opener(self.datafile, "a") |
|
368 |
ifh = self.opener(self.indexfile, "a") |
|
369 |
||
370 |
# loop through our set of deltas |
|
371 |
pos = 4 |
|
372 |
while pos < len(data): |
|
373 |
l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s", |
|
374 |
data[pos:pos+84]) |
|
375 |
link = linkmapper(cs) |
|
376 |
delta = data[pos + 84:pos + l] |
|
377 |
pos += l |
|
378 |
||
379 |
# full versions are inserted when the needed deltas become |
|
380 |
# comparable to the uncompressed text or when the previous |
|
381 |
# version is not the one we have a delta against. We use |
|
382 |
# the size of the previous full rev as a proxy for the |
|
383 |
# current size. |
|
384 |
||
385 |
if chain == prev: |
|
386 |
cdelta = compress(delta) |
|
387 |
||
388 |
if chain != prev or (end - start + len(cdelta)) > measure * 2: |
|
389 |
# flush our writes here so we can read it in revision |
|
390 |
dfh.flush() |
|
391 |
ifh.flush() |
|
392 |
text = self.revision(self.node(t)) |
|
393 |
text = self.patch(text, delta) |
|
394 |
chk = self.addrevision(text, transaction, link, p1, p2) |
|
395 |
if chk != node: |
|
396 |
raise "consistency error adding group" |
|
397 |
measure = len(text) |
|
398 |
else: |
|
399 |
e = (end, len(cdelta), self.base(t), link, p1, p2, node) |
|
400 |
self.index.append(e) |
|
401 |
self.nodemap[node] = r |
|
402 |
dfh.write(cdelta) |
|
403 |
ifh.write(struct.pack(indexformat, *e)) |
|
404 |
||
405 |
t, r = r, r + 1 |
|
406 |
chain = prev |
|
407 |
start = self.start(self.base(t)) |
|
408 |
end = self.end(t) |
|
409 |
||
410 |
dfh.close() |
|
411 |
ifh.close() |
|
412 |
return node |