comparison hgext/remotefilelog/debugcommands.py @ 43076:2372284d9457

formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:45:02 -0400
parents 5fadf6103790
children 687b865b95ad
comparison
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
27 historypack, 27 historypack,
28 repack, 28 repack,
29 shallowutil, 29 shallowutil,
30 ) 30 )
31 31
32
32 def debugremotefilelog(ui, path, **opts): 33 def debugremotefilelog(ui, path, **opts):
33 decompress = opts.get(r'decompress') 34 decompress = opts.get(r'decompress')
34 35
35 size, firstnode, mapping = parsefileblob(path, decompress) 36 size, firstnode, mapping = parsefileblob(path, decompress)
36 37
37 ui.status(_("size: %d bytes\n") % (size)) 38 ui.status(_("size: %d bytes\n") % size)
38 ui.status(_("path: %s \n") % (path)) 39 ui.status(_("path: %s \n") % path)
39 ui.status(_("key: %s \n") % (short(firstnode))) 40 ui.status(_("key: %s \n") % (short(firstnode)))
40 ui.status(_("\n")) 41 ui.status(_("\n"))
41 ui.status(_("%12s => %12s %13s %13s %12s\n") % 42 ui.status(
42 ("node", "p1", "p2", "linknode", "copyfrom")) 43 _("%12s => %12s %13s %13s %12s\n")
44 % ("node", "p1", "p2", "linknode", "copyfrom")
45 )
43 46
44 queue = [firstnode] 47 queue = [firstnode]
45 while queue: 48 while queue:
46 node = queue.pop(0) 49 node = queue.pop(0)
47 p1, p2, linknode, copyfrom = mapping[node] 50 p1, p2, linknode, copyfrom = mapping[node]
48 ui.status(_("%s => %s %s %s %s\n") % 51 ui.status(
49 (short(node), short(p1), short(p2), short(linknode), copyfrom)) 52 _("%s => %s %s %s %s\n")
53 % (short(node), short(p1), short(p2), short(linknode), copyfrom)
54 )
50 if p1 != nullid: 55 if p1 != nullid:
51 queue.append(p1) 56 queue.append(p1)
52 if p2 != nullid: 57 if p2 != nullid:
53 queue.append(p2) 58 queue.append(p2)
59
54 60
55 def buildtemprevlog(repo, file): 61 def buildtemprevlog(repo, file):
56 # get filename key 62 # get filename key
57 filekey = nodemod.hex(hashlib.sha1(file).digest()) 63 filekey = nodemod.hex(hashlib.sha1(file).digest())
58 filedir = os.path.join(repo.path, 'store/data', filekey) 64 filedir = os.path.join(repo.path, 'store/data', filekey)
72 r = filelog.filelog(repo.svfs, 'temprevlog') 78 r = filelog.filelog(repo.svfs, 'temprevlog')
73 79
74 class faket(object): 80 class faket(object):
75 def add(self, a, b, c): 81 def add(self, a, b, c):
76 pass 82 pass
83
77 t = faket() 84 t = faket()
78 for fctx in fctxs: 85 for fctx in fctxs:
79 if fctx.node() not in repo: 86 if fctx.node() not in repo:
80 continue 87 continue
81 88
87 94
88 r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1]) 95 r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1])
89 96
90 return r 97 return r
91 98
99
92 def debugindex(orig, ui, repo, file_=None, **opts): 100 def debugindex(orig, ui, repo, file_=None, **opts):
93 """dump the contents of an index file""" 101 """dump the contents of an index file"""
94 if (opts.get(r'changelog') or 102 if (
95 opts.get(r'manifest') or 103 opts.get(r'changelog')
96 opts.get(r'dir') or 104 or opts.get(r'manifest')
97 not shallowutil.isenabled(repo) or 105 or opts.get(r'dir')
98 not repo.shallowmatch(file_)): 106 or not shallowutil.isenabled(repo)
107 or not repo.shallowmatch(file_)
108 ):
99 return orig(ui, repo, file_, **opts) 109 return orig(ui, repo, file_, **opts)
100 110
101 r = buildtemprevlog(repo, file_) 111 r = buildtemprevlog(repo, file_)
102 112
103 # debugindex like normal 113 # debugindex like normal
110 basehdr = ' delta' 120 basehdr = ' delta'
111 else: 121 else:
112 basehdr = ' base' 122 basehdr = ' base'
113 123
114 if format == 0: 124 if format == 0:
115 ui.write((" rev offset length " + basehdr + " linkrev" 125 ui.write(
116 " nodeid p1 p2\n")) 126 (
127 " rev offset length " + basehdr + " linkrev"
128 " nodeid p1 p2\n"
129 )
130 )
117 elif format == 1: 131 elif format == 1:
118 ui.write((" rev flag offset length" 132 ui.write(
119 " size " + basehdr + " link p1 p2" 133 (
120 " nodeid\n")) 134 " rev flag offset length"
135 " size " + basehdr + " link p1 p2"
136 " nodeid\n"
137 )
138 )
121 139
122 for i in r: 140 for i in r:
123 node = r.node(i) 141 node = r.node(i)
124 if generaldelta: 142 if generaldelta:
125 base = r.deltaparent(i) 143 base = r.deltaparent(i)
128 if format == 0: 146 if format == 0:
129 try: 147 try:
130 pp = r.parents(node) 148 pp = r.parents(node)
131 except Exception: 149 except Exception:
132 pp = [nullid, nullid] 150 pp = [nullid, nullid]
133 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( 151 ui.write(
134 i, r.start(i), r.length(i), base, r.linkrev(i), 152 "% 6d % 9d % 7d % 6d % 7d %s %s %s\n"
135 short(node), short(pp[0]), short(pp[1]))) 153 % (
154 i,
155 r.start(i),
156 r.length(i),
157 base,
158 r.linkrev(i),
159 short(node),
160 short(pp[0]),
161 short(pp[1]),
162 )
163 )
136 elif format == 1: 164 elif format == 1:
137 pr = r.parentrevs(i) 165 pr = r.parentrevs(i)
138 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % ( 166 ui.write(
139 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i), 167 "% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n"
140 base, r.linkrev(i), pr[0], pr[1], short(node))) 168 % (
169 i,
170 r.flags(i),
171 r.start(i),
172 r.length(i),
173 r.rawsize(i),
174 base,
175 r.linkrev(i),
176 pr[0],
177 pr[1],
178 short(node),
179 )
180 )
181
141 182
142 def debugindexdot(orig, ui, repo, file_): 183 def debugindexdot(orig, ui, repo, file_):
143 """dump an index DAG as a graphviz dot file""" 184 """dump an index DAG as a graphviz dot file"""
144 if not shallowutil.isenabled(repo): 185 if not shallowutil.isenabled(repo):
145 return orig(ui, repo, file_) 186 return orig(ui, repo, file_)
146 187
147 r = buildtemprevlog(repo, os.path.basename(file_)[:-2]) 188 r = buildtemprevlog(repo, os.path.basename(file_)[:-2])
148 189
149 ui.write(("digraph G {\n")) 190 ui.write("digraph G {\n")
150 for i in r: 191 for i in r:
151 node = r.node(i) 192 node = r.node(i)
152 pp = r.parents(node) 193 pp = r.parents(node)
153 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) 194 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
154 if pp[1] != nullid: 195 if pp[1] != nullid:
155 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) 196 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
156 ui.write("}\n") 197 ui.write("}\n")
198
157 199
158 def verifyremotefilelog(ui, path, **opts): 200 def verifyremotefilelog(ui, path, **opts):
159 decompress = opts.get(r'decompress') 201 decompress = opts.get(r'decompress')
160 202
161 for root, dirs, files in os.walk(path): 203 for root, dirs, files in os.walk(path):
165 filepath = os.path.join(root, file) 207 filepath = os.path.join(root, file)
166 size, firstnode, mapping = parsefileblob(filepath, decompress) 208 size, firstnode, mapping = parsefileblob(filepath, decompress)
167 for p1, p2, linknode, copyfrom in mapping.itervalues(): 209 for p1, p2, linknode, copyfrom in mapping.itervalues():
168 if linknode == nullid: 210 if linknode == nullid:
169 actualpath = os.path.relpath(root, path) 211 actualpath = os.path.relpath(root, path)
170 key = fileserverclient.getcachekey("reponame", actualpath, 212 key = fileserverclient.getcachekey(
171 file) 213 "reponame", actualpath, file
172 ui.status("%s %s\n" % (key, os.path.relpath(filepath, 214 )
173 path))) 215 ui.status(
216 "%s %s\n" % (key, os.path.relpath(filepath, path))
217 )
218
174 219
175 def _decompressblob(raw): 220 def _decompressblob(raw):
176 return zlib.decompress(raw) 221 return zlib.decompress(raw)
222
177 223
178 def parsefileblob(path, decompress): 224 def parsefileblob(path, decompress):
179 f = open(path, "rb") 225 f = open(path, "rb")
180 try: 226 try:
181 raw = f.read() 227 raw = f.read()
192 238
193 mapping = {} 239 mapping = {}
194 while start < len(raw): 240 while start < len(raw):
195 divider = raw.index('\0', start + 80) 241 divider = raw.index('\0', start + 80)
196 242
197 currentnode = raw[start:(start + 20)] 243 currentnode = raw[start : (start + 20)]
198 if not firstnode: 244 if not firstnode:
199 firstnode = currentnode 245 firstnode = currentnode
200 246
201 p1 = raw[(start + 20):(start + 40)] 247 p1 = raw[(start + 20) : (start + 40)]
202 p2 = raw[(start + 40):(start + 60)] 248 p2 = raw[(start + 40) : (start + 60)]
203 linknode = raw[(start + 60):(start + 80)] 249 linknode = raw[(start + 60) : (start + 80)]
204 copyfrom = raw[(start + 80):divider] 250 copyfrom = raw[(start + 80) : divider]
205 251
206 mapping[currentnode] = (p1, p2, linknode, copyfrom) 252 mapping[currentnode] = (p1, p2, linknode, copyfrom)
207 start = divider + 1 253 start = divider + 1
208 254
209 return size, firstnode, mapping 255 return size, firstnode, mapping
256
210 257
211 def debugdatapack(ui, *paths, **opts): 258 def debugdatapack(ui, *paths, **opts):
212 for path in paths: 259 for path in paths:
213 if '.data' in path: 260 if '.data' in path:
214 path = path[:path.index('.data')] 261 path = path[: path.index('.data')]
215 ui.write("%s:\n" % path) 262 ui.write("%s:\n" % path)
216 dpack = datapack.datapack(path) 263 dpack = datapack.datapack(path)
217 node = opts.get(r'node') 264 node = opts.get(r'node')
218 if node: 265 if node:
219 deltachain = dpack.getdeltachain('', bin(node)) 266 deltachain = dpack.getdeltachain('', bin(node))
228 hashlen = 14 275 hashlen = 14
229 276
230 lastfilename = None 277 lastfilename = None
231 totaldeltasize = 0 278 totaldeltasize = 0
232 totalblobsize = 0 279 totalblobsize = 0
280
233 def printtotals(): 281 def printtotals():
234 if lastfilename is not None: 282 if lastfilename is not None:
235 ui.write("\n") 283 ui.write("\n")
236 if not totaldeltasize or not totalblobsize: 284 if not totaldeltasize or not totalblobsize:
237 return 285 return
238 difference = totalblobsize - totaldeltasize 286 difference = totalblobsize - totaldeltasize
239 deltastr = "%0.1f%% %s" % ( 287 deltastr = "%0.1f%% %s" % (
240 (100.0 * abs(difference) / totalblobsize), 288 (100.0 * abs(difference) / totalblobsize),
241 ("smaller" if difference > 0 else "bigger")) 289 ("smaller" if difference > 0 else "bigger"),
242 290 )
243 ui.write(("Total:%s%s %s (%s)\n") % ( 291
244 "".ljust(2 * hashlen - len("Total:")), 292 ui.write(
245 ('%d' % totaldeltasize).ljust(12), 293 "Total:%s%s %s (%s)\n"
246 ('%d' % totalblobsize).ljust(9), 294 % (
247 deltastr 295 "".ljust(2 * hashlen - len("Total:")),
248 )) 296 ('%d' % totaldeltasize).ljust(12),
297 ('%d' % totalblobsize).ljust(9),
298 deltastr,
299 )
300 )
249 301
250 bases = {} 302 bases = {}
251 nodes = set() 303 nodes = set()
252 failures = 0 304 failures = 0
253 for filename, node, deltabase, deltalen in dpack.iterentries(): 305 for filename, node, deltabase, deltalen in dpack.iterentries():
258 nodes.add(node) 310 nodes.add(node)
259 if filename != lastfilename: 311 if filename != lastfilename:
260 printtotals() 312 printtotals()
261 name = '(empty name)' if filename == '' else filename 313 name = '(empty name)' if filename == '' else filename
262 ui.write("%s:\n" % name) 314 ui.write("%s:\n" % name)
263 ui.write("%s%s%s%s\n" % ( 315 ui.write(
264 "Node".ljust(hashlen), 316 "%s%s%s%s\n"
265 "Delta Base".ljust(hashlen), 317 % (
266 "Delta Length".ljust(14), 318 "Node".ljust(hashlen),
267 "Blob Size".ljust(9))) 319 "Delta Base".ljust(hashlen),
320 "Delta Length".ljust(14),
321 "Blob Size".ljust(9),
322 )
323 )
268 lastfilename = filename 324 lastfilename = filename
269 totalblobsize = 0 325 totalblobsize = 0
270 totaldeltasize = 0 326 totaldeltasize = 0
271 327
272 # Metadata could be missing, in which case it will be an empty dict. 328 # Metadata could be missing, in which case it will be an empty dict.
275 blobsize = meta[constants.METAKEYSIZE] 331 blobsize = meta[constants.METAKEYSIZE]
276 totaldeltasize += deltalen 332 totaldeltasize += deltalen
277 totalblobsize += blobsize 333 totalblobsize += blobsize
278 else: 334 else:
279 blobsize = "(missing)" 335 blobsize = "(missing)"
280 ui.write("%s %s %s%s\n" % ( 336 ui.write(
281 hashformatter(node), 337 "%s %s %s%s\n"
282 hashformatter(deltabase), 338 % (
283 ('%d' % deltalen).ljust(14), 339 hashformatter(node),
284 pycompat.bytestr(blobsize))) 340 hashformatter(deltabase),
341 ('%d' % deltalen).ljust(14),
342 pycompat.bytestr(blobsize),
343 )
344 )
285 345
286 if filename is not None: 346 if filename is not None:
287 printtotals() 347 printtotals()
288 348
289 failures += _sanitycheck(ui, set(nodes), bases) 349 failures += _sanitycheck(ui, set(nodes), bases)
290 if failures > 1: 350 if failures > 1:
291 ui.warn(("%d failures\n" % failures)) 351 ui.warn(("%d failures\n" % failures))
292 return 1 352 return 1
353
293 354
294 def _sanitycheck(ui, nodes, bases): 355 def _sanitycheck(ui, nodes, bases):
295 """ 356 """
296 Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a 357 Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a
297 mapping of node->base): 358 mapping of node->base):
305 current = node 366 current = node
306 deltabase = bases[current] 367 deltabase = bases[current]
307 368
308 while deltabase != nullid: 369 while deltabase != nullid:
309 if deltabase not in nodes: 370 if deltabase not in nodes:
310 ui.warn(("Bad entry: %s has an unknown deltabase (%s)\n" % 371 ui.warn(
311 (short(node), short(deltabase)))) 372 (
373 "Bad entry: %s has an unknown deltabase (%s)\n"
374 % (short(node), short(deltabase))
375 )
376 )
312 failures += 1 377 failures += 1
313 break 378 break
314 379
315 if deltabase in seen: 380 if deltabase in seen:
316 ui.warn(("Bad entry: %s has a cycle (at %s)\n" % 381 ui.warn(
317 (short(node), short(deltabase)))) 382 (
383 "Bad entry: %s has a cycle (at %s)\n"
384 % (short(node), short(deltabase))
385 )
386 )
318 failures += 1 387 failures += 1
319 break 388 break
320 389
321 current = deltabase 390 current = deltabase
322 seen.add(current) 391 seen.add(current)
324 # Since ``node`` begins a valid chain, reset/memoize its base to nullid 393 # Since ``node`` begins a valid chain, reset/memoize its base to nullid
325 # so we don't traverse it again. 394 # so we don't traverse it again.
326 bases[node] = nullid 395 bases[node] = nullid
327 return failures 396 return failures
328 397
398
329 def dumpdeltachain(ui, deltachain, **opts): 399 def dumpdeltachain(ui, deltachain, **opts):
330 hashformatter = hex 400 hashformatter = hex
331 hashlen = 40 401 hashlen = 40
332 402
333 lastfilename = None 403 lastfilename = None
334 for filename, node, filename, deltabasenode, delta in deltachain: 404 for filename, node, filename, deltabasenode, delta in deltachain:
335 if filename != lastfilename: 405 if filename != lastfilename:
336 ui.write("\n%s\n" % filename) 406 ui.write("\n%s\n" % filename)
337 lastfilename = filename 407 lastfilename = filename
338 ui.write("%s %s %s %s\n" % ( 408 ui.write(
339 "Node".ljust(hashlen), 409 "%s %s %s %s\n"
340 "Delta Base".ljust(hashlen), 410 % (
341 "Delta SHA1".ljust(hashlen), 411 "Node".ljust(hashlen),
342 "Delta Length".ljust(6), 412 "Delta Base".ljust(hashlen),
343 )) 413 "Delta SHA1".ljust(hashlen),
344 414 "Delta Length".ljust(6),
345 ui.write("%s %s %s %d\n" % ( 415 )
346 hashformatter(node), 416 )
347 hashformatter(deltabasenode), 417
348 nodemod.hex(hashlib.sha1(delta).digest()), 418 ui.write(
349 len(delta))) 419 "%s %s %s %d\n"
420 % (
421 hashformatter(node),
422 hashformatter(deltabasenode),
423 nodemod.hex(hashlib.sha1(delta).digest()),
424 len(delta),
425 )
426 )
427
350 428
351 def debughistorypack(ui, path): 429 def debughistorypack(ui, path):
352 if '.hist' in path: 430 if '.hist' in path:
353 path = path[:path.index('.hist')] 431 path = path[: path.index('.hist')]
354 hpack = historypack.historypack(path) 432 hpack = historypack.historypack(path)
355 433
356 lastfilename = None 434 lastfilename = None
357 for entry in hpack.iterentries(): 435 for entry in hpack.iterentries():
358 filename, node, p1node, p2node, linknode, copyfrom = entry 436 filename, node, p1node, p2node, linknode, copyfrom = entry
359 if filename != lastfilename: 437 if filename != lastfilename:
360 ui.write("\n%s\n" % filename) 438 ui.write("\n%s\n" % filename)
361 ui.write("%s%s%s%s%s\n" % ( 439 ui.write(
362 "Node".ljust(14), 440 "%s%s%s%s%s\n"
363 "P1 Node".ljust(14), 441 % (
364 "P2 Node".ljust(14), 442 "Node".ljust(14),
365 "Link Node".ljust(14), 443 "P1 Node".ljust(14),
366 "Copy From")) 444 "P2 Node".ljust(14),
445 "Link Node".ljust(14),
446 "Copy From",
447 )
448 )
367 lastfilename = filename 449 lastfilename = filename
368 ui.write("%s %s %s %s %s\n" % (short(node), short(p1node), 450 ui.write(
369 short(p2node), short(linknode), copyfrom)) 451 "%s %s %s %s %s\n"
452 % (
453 short(node),
454 short(p1node),
455 short(p2node),
456 short(linknode),
457 copyfrom,
458 )
459 )
460
370 461
371 def debugwaitonrepack(repo): 462 def debugwaitonrepack(repo):
372 with lockmod.lock(repack.repacklockvfs(repo), "repacklock", timeout=-1): 463 with lockmod.lock(repack.repacklockvfs(repo), "repacklock", timeout=-1):
373 return 464 return
374 465
466
375 def debugwaitonprefetch(repo): 467 def debugwaitonprefetch(repo):
376 with repo._lock(repo.svfs, "prefetchlock", True, None, 468 with repo._lock(
377 None, _('prefetching in %s') % repo.origroot): 469 repo.svfs,
470 "prefetchlock",
471 True,
472 None,
473 None,
474 _('prefetching in %s') % repo.origroot,
475 ):
378 pass 476 pass