|
1 #!/usr/bin/env python |
|
2 from __future__ import absolute_import |
|
3 |
|
4 import hashlib |
|
5 import os |
|
6 import random |
|
7 import shutil |
|
8 import stat |
|
9 import struct |
|
10 import sys |
|
11 import tempfile |
|
12 import unittest |
|
13 |
|
14 import silenttestrunner |
|
15 |
|
16 from mercurial.node import nullid |
|
17 from mercurial import ( |
|
18 ui as uimod, |
|
19 ) |
|
20 # Load the local remotefilelog, not the system one |
|
21 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')] |
|
22 from hgext.remotefilelog import ( |
|
23 basepack, |
|
24 historypack, |
|
25 ) |
|
26 |
|
27 class histpacktests(unittest.TestCase): |
|
28 def setUp(self): |
|
29 self.tempdirs = [] |
|
30 |
|
31 def tearDown(self): |
|
32 for d in self.tempdirs: |
|
33 shutil.rmtree(d) |
|
34 |
|
35 def makeTempDir(self): |
|
36 tempdir = tempfile.mkdtemp() |
|
37 self.tempdirs.append(tempdir) |
|
38 return tempdir |
|
39 |
|
40 def getHash(self, content): |
|
41 return hashlib.sha1(content).digest() |
|
42 |
|
43 def getFakeHash(self): |
|
44 return ''.join(chr(random.randint(0, 255)) for _ in range(20)) |
|
45 |
|
46 def createPack(self, revisions=None): |
|
47 """Creates and returns a historypack containing the specified revisions. |
|
48 |
|
49 `revisions` is a list of tuples, where each tuple contains a filanem, |
|
50 node, p1node, p2node, and linknode. |
|
51 """ |
|
52 if revisions is None: |
|
53 revisions = [("filename", self.getFakeHash(), nullid, nullid, |
|
54 self.getFakeHash(), None)] |
|
55 |
|
56 packdir = self.makeTempDir() |
|
57 packer = historypack.mutablehistorypack(uimod.ui(), packdir, |
|
58 version=1) |
|
59 |
|
60 for filename, node, p1, p2, linknode, copyfrom in revisions: |
|
61 packer.add(filename, node, p1, p2, linknode, copyfrom) |
|
62 |
|
63 path = packer.close() |
|
64 return historypack.historypack(path) |
|
65 |
|
66 def testAddSingle(self): |
|
67 """Test putting a single entry into a pack and reading it out. |
|
68 """ |
|
69 filename = "foo" |
|
70 node = self.getFakeHash() |
|
71 p1 = self.getFakeHash() |
|
72 p2 = self.getFakeHash() |
|
73 linknode = self.getFakeHash() |
|
74 |
|
75 revisions = [(filename, node, p1, p2, linknode, None)] |
|
76 pack = self.createPack(revisions) |
|
77 |
|
78 actual = pack.getancestors(filename, node)[node] |
|
79 self.assertEquals(p1, actual[0]) |
|
80 self.assertEquals(p2, actual[1]) |
|
81 self.assertEquals(linknode, actual[2]) |
|
82 |
|
83 def testAddMultiple(self): |
|
84 """Test putting multiple unrelated revisions into a pack and reading |
|
85 them out. |
|
86 """ |
|
87 revisions = [] |
|
88 for i in range(10): |
|
89 filename = "foo-%s" % i |
|
90 node = self.getFakeHash() |
|
91 p1 = self.getFakeHash() |
|
92 p2 = self.getFakeHash() |
|
93 linknode = self.getFakeHash() |
|
94 revisions.append((filename, node, p1, p2, linknode, None)) |
|
95 |
|
96 pack = self.createPack(revisions) |
|
97 |
|
98 for filename, node, p1, p2, linknode, copyfrom in revisions: |
|
99 actual = pack.getancestors(filename, node)[node] |
|
100 self.assertEquals(p1, actual[0]) |
|
101 self.assertEquals(p2, actual[1]) |
|
102 self.assertEquals(linknode, actual[2]) |
|
103 self.assertEquals(copyfrom, actual[3]) |
|
104 |
|
105 def testAddAncestorChain(self): |
|
106 """Test putting multiple revisions in into a pack and read the ancestor |
|
107 chain. |
|
108 """ |
|
109 revisions = [] |
|
110 filename = "foo" |
|
111 lastnode = nullid |
|
112 for i in range(10): |
|
113 node = self.getFakeHash() |
|
114 revisions.append((filename, node, lastnode, nullid, nullid, None)) |
|
115 lastnode = node |
|
116 |
|
117 # revisions must be added in topological order, newest first |
|
118 revisions = list(reversed(revisions)) |
|
119 pack = self.createPack(revisions) |
|
120 |
|
121 # Test that the chain has all the entries |
|
122 ancestors = pack.getancestors(revisions[0][0], revisions[0][1]) |
|
123 for filename, node, p1, p2, linknode, copyfrom in revisions: |
|
124 ap1, ap2, alinknode, acopyfrom = ancestors[node] |
|
125 self.assertEquals(ap1, p1) |
|
126 self.assertEquals(ap2, p2) |
|
127 self.assertEquals(alinknode, linknode) |
|
128 self.assertEquals(acopyfrom, copyfrom) |
|
129 |
|
130 def testPackMany(self): |
|
131 """Pack many related and unrelated ancestors. |
|
132 """ |
|
133 # Build a random pack file |
|
134 allentries = {} |
|
135 ancestorcounts = {} |
|
136 revisions = [] |
|
137 random.seed(0) |
|
138 for i in range(100): |
|
139 filename = "filename-%s" % i |
|
140 entries = [] |
|
141 p2 = nullid |
|
142 linknode = nullid |
|
143 for j in range(random.randint(1, 100)): |
|
144 node = self.getFakeHash() |
|
145 p1 = nullid |
|
146 if len(entries) > 0: |
|
147 p1 = entries[random.randint(0, len(entries) - 1)] |
|
148 entries.append(node) |
|
149 revisions.append((filename, node, p1, p2, linknode, None)) |
|
150 allentries[(filename, node)] = (p1, p2, linknode) |
|
151 if p1 == nullid: |
|
152 ancestorcounts[(filename, node)] = 1 |
|
153 else: |
|
154 newcount = ancestorcounts[(filename, p1)] + 1 |
|
155 ancestorcounts[(filename, node)] = newcount |
|
156 |
|
157 # Must add file entries in reverse topological order |
|
158 revisions = list(reversed(revisions)) |
|
159 pack = self.createPack(revisions) |
|
160 |
|
161 # Verify the pack contents |
|
162 for (filename, node), (p1, p2, lastnode) in allentries.iteritems(): |
|
163 ancestors = pack.getancestors(filename, node) |
|
164 self.assertEquals(ancestorcounts[(filename, node)], |
|
165 len(ancestors)) |
|
166 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.iteritems(): |
|
167 ep1, ep2, elinknode = allentries[(filename, anode)] |
|
168 self.assertEquals(ap1, ep1) |
|
169 self.assertEquals(ap2, ep2) |
|
170 self.assertEquals(alinknode, elinknode) |
|
171 self.assertEquals(copyfrom, None) |
|
172 |
|
173 def testGetNodeInfo(self): |
|
174 revisions = [] |
|
175 filename = "foo" |
|
176 lastnode = nullid |
|
177 for i in range(10): |
|
178 node = self.getFakeHash() |
|
179 revisions.append((filename, node, lastnode, nullid, nullid, None)) |
|
180 lastnode = node |
|
181 |
|
182 pack = self.createPack(revisions) |
|
183 |
|
184 # Test that getnodeinfo returns the expected results |
|
185 for filename, node, p1, p2, linknode, copyfrom in revisions: |
|
186 ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node) |
|
187 self.assertEquals(ap1, p1) |
|
188 self.assertEquals(ap2, p2) |
|
189 self.assertEquals(alinknode, linknode) |
|
190 self.assertEquals(acopyfrom, copyfrom) |
|
191 |
|
192 def testGetMissing(self): |
|
193 """Test the getmissing() api. |
|
194 """ |
|
195 revisions = [] |
|
196 filename = "foo" |
|
197 for i in range(10): |
|
198 node = self.getFakeHash() |
|
199 p1 = self.getFakeHash() |
|
200 p2 = self.getFakeHash() |
|
201 linknode = self.getFakeHash() |
|
202 revisions.append((filename, node, p1, p2, linknode, None)) |
|
203 |
|
204 pack = self.createPack(revisions) |
|
205 |
|
206 missing = pack.getmissing([(filename, revisions[0][1])]) |
|
207 self.assertFalse(missing) |
|
208 |
|
209 missing = pack.getmissing([(filename, revisions[0][1]), |
|
210 (filename, revisions[1][1])]) |
|
211 self.assertFalse(missing) |
|
212 |
|
213 fakenode = self.getFakeHash() |
|
214 missing = pack.getmissing([(filename, revisions[0][1]), |
|
215 (filename, fakenode)]) |
|
216 self.assertEquals(missing, [(filename, fakenode)]) |
|
217 |
|
218 # Test getmissing on a non-existant filename |
|
219 missing = pack.getmissing([("bar", fakenode)]) |
|
220 self.assertEquals(missing, [("bar", fakenode)]) |
|
221 |
|
222 def testAddThrows(self): |
|
223 pack = self.createPack() |
|
224 |
|
225 try: |
|
226 pack.add('filename', nullid, nullid, nullid, nullid, None) |
|
227 self.assertTrue(False, "historypack.add should throw") |
|
228 except RuntimeError: |
|
229 pass |
|
230 |
|
231 def testBadVersionThrows(self): |
|
232 pack = self.createPack() |
|
233 path = pack.path + '.histpack' |
|
234 with open(path) as f: |
|
235 raw = f.read() |
|
236 raw = struct.pack('!B', 255) + raw[1:] |
|
237 os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE) |
|
238 with open(path, 'w+') as f: |
|
239 f.write(raw) |
|
240 |
|
241 try: |
|
242 pack = historypack.historypack(pack.path) |
|
243 self.assertTrue(False, "bad version number should have thrown") |
|
244 except RuntimeError: |
|
245 pass |
|
246 |
|
247 def testLargePack(self): |
|
248 """Test creating and reading from a large pack with over X entries. |
|
249 This causes it to use a 2^16 fanout table instead.""" |
|
250 total = basepack.SMALLFANOUTCUTOFF + 1 |
|
251 revisions = [] |
|
252 for i in xrange(total): |
|
253 filename = "foo-%s" % i |
|
254 node = self.getFakeHash() |
|
255 p1 = self.getFakeHash() |
|
256 p2 = self.getFakeHash() |
|
257 linknode = self.getFakeHash() |
|
258 revisions.append((filename, node, p1, p2, linknode, None)) |
|
259 |
|
260 pack = self.createPack(revisions) |
|
261 self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) |
|
262 |
|
263 for filename, node, p1, p2, linknode, copyfrom in revisions: |
|
264 actual = pack.getancestors(filename, node)[node] |
|
265 self.assertEquals(p1, actual[0]) |
|
266 self.assertEquals(p2, actual[1]) |
|
267 self.assertEquals(linknode, actual[2]) |
|
268 self.assertEquals(copyfrom, actual[3]) |
|
269 # TODO: |
|
270 # histpack store: |
|
271 # - repack two packs into one |
|
272 |
|
273 if __name__ == '__main__': |
|
274 silenttestrunner.main(__name__) |