comparison hgext/remotefilelog/shallowutil.py @ 43076:2372284d9457

formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:45:02 -0400
parents 1a4a41d39dfc
children 687b865b95ad
comparison
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
29 from . import constants 29 from . import constants
30 30
31 if not pycompat.iswindows: 31 if not pycompat.iswindows:
32 import grp 32 import grp
33 33
34
34 def isenabled(repo): 35 def isenabled(repo):
35 """returns whether the repository is remotefilelog enabled or not""" 36 """returns whether the repository is remotefilelog enabled or not"""
36 return constants.SHALLOWREPO_REQUIREMENT in repo.requirements 37 return constants.SHALLOWREPO_REQUIREMENT in repo.requirements
37 38
39
38 def getcachekey(reponame, file, id): 40 def getcachekey(reponame, file, id):
39 pathhash = node.hex(hashlib.sha1(file).digest()) 41 pathhash = node.hex(hashlib.sha1(file).digest())
40 return os.path.join(reponame, pathhash[:2], pathhash[2:], id) 42 return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
41 43
44
42 def getlocalkey(file, id): 45 def getlocalkey(file, id):
43 pathhash = node.hex(hashlib.sha1(file).digest()) 46 pathhash = node.hex(hashlib.sha1(file).digest())
44 return os.path.join(pathhash, id) 47 return os.path.join(pathhash, id)
48
45 49
46 def getcachepath(ui, allowempty=False): 50 def getcachepath(ui, allowempty=False):
47 cachepath = ui.config("remotefilelog", "cachepath") 51 cachepath = ui.config("remotefilelog", "cachepath")
48 if not cachepath: 52 if not cachepath:
49 if allowempty: 53 if allowempty:
50 return None 54 return None
51 else: 55 else:
52 raise error.Abort(_("could not find config option " 56 raise error.Abort(
53 "remotefilelog.cachepath")) 57 _("could not find config option " "remotefilelog.cachepath")
58 )
54 return util.expandpath(cachepath) 59 return util.expandpath(cachepath)
60
55 61
56 def getcachepackpath(repo, category): 62 def getcachepackpath(repo, category):
57 cachepath = getcachepath(repo.ui) 63 cachepath = getcachepath(repo.ui)
58 if category != constants.FILEPACK_CATEGORY: 64 if category != constants.FILEPACK_CATEGORY:
59 return os.path.join(cachepath, repo.name, 'packs', category) 65 return os.path.join(cachepath, repo.name, 'packs', category)
60 else: 66 else:
61 return os.path.join(cachepath, repo.name, 'packs') 67 return os.path.join(cachepath, repo.name, 'packs')
62 68
69
63 def getlocalpackpath(base, category): 70 def getlocalpackpath(base, category):
64 return os.path.join(base, 'packs', category) 71 return os.path.join(base, 'packs', category)
72
65 73
66 def createrevlogtext(text, copyfrom=None, copyrev=None): 74 def createrevlogtext(text, copyfrom=None, copyrev=None):
67 """returns a string that matches the revlog contents in a 75 """returns a string that matches the revlog contents in a
68 traditional revlog 76 traditional revlog
69 """ 77 """
74 meta['copyrev'] = copyrev 82 meta['copyrev'] = copyrev
75 text = storageutil.packmeta(meta, text) 83 text = storageutil.packmeta(meta, text)
76 84
77 return text 85 return text
78 86
87
79 def parsemeta(text): 88 def parsemeta(text):
80 """parse mercurial filelog metadata""" 89 """parse mercurial filelog metadata"""
81 meta, size = storageutil.parsemeta(text) 90 meta, size = storageutil.parsemeta(text)
82 if text.startswith('\1\n'): 91 if text.startswith('\1\n'):
83 s = text.index('\1\n', 2) 92 s = text.index('\1\n', 2)
84 text = text[s + 2:] 93 text = text[s + 2 :]
85 return meta or {}, text 94 return meta or {}, text
95
86 96
87 def sumdicts(*dicts): 97 def sumdicts(*dicts):
88 """Adds all the values of *dicts together into one dictionary. This assumes 98 """Adds all the values of *dicts together into one dictionary. This assumes
89 the values in *dicts are all summable. 99 the values in *dicts are all summable.
90 100
93 result = collections.defaultdict(lambda: 0) 103 result = collections.defaultdict(lambda: 0)
94 for dict in dicts: 104 for dict in dicts:
95 for k, v in dict.iteritems(): 105 for k, v in dict.iteritems():
96 result[k] += v 106 result[k] += v
97 return result 107 return result
108
98 109
99 def prefixkeys(dict, prefix): 110 def prefixkeys(dict, prefix):
100 """Returns ``dict`` with ``prefix`` prepended to all its keys.""" 111 """Returns ``dict`` with ``prefix`` prepended to all its keys."""
101 result = {} 112 result = {}
102 for k, v in dict.iteritems(): 113 for k, v in dict.iteritems():
103 result[prefix + k] = v 114 result[prefix + k] = v
104 return result 115 return result
105 116
117
106 def reportpackmetrics(ui, prefix, *stores): 118 def reportpackmetrics(ui, prefix, *stores):
107 dicts = [s.getmetrics() for s in stores] 119 dicts = [s.getmetrics() for s in stores]
108 dict = prefixkeys(sumdicts(*dicts), prefix + '_') 120 dict = prefixkeys(sumdicts(*dicts), prefix + '_')
109 ui.log(prefix + "_packsizes", "\n", **pycompat.strkwargs(dict)) 121 ui.log(prefix + "_packsizes", "\n", **pycompat.strkwargs(dict))
110 122
123
111 def _parsepackmeta(metabuf): 124 def _parsepackmeta(metabuf):
112 """parse datapack meta, bytes (<metadata-list>) -> dict 125 """parse datapack meta, bytes (<metadata-list>) -> dict
113 126
114 The dict contains raw content - both keys and values are strings. 127 The dict contains raw content - both keys and values are strings.
115 Upper-level business may want to convert some of them to other types like 128 Upper-level business may want to convert some of them to other types like
119 """ 132 """
120 metadict = {} 133 metadict = {}
121 offset = 0 134 offset = 0
122 buflen = len(metabuf) 135 buflen = len(metabuf)
123 while buflen - offset >= 3: 136 while buflen - offset >= 3:
124 key = metabuf[offset:offset + 1] 137 key = metabuf[offset : offset + 1]
125 offset += 1 138 offset += 1
126 metalen = struct.unpack_from('!H', metabuf, offset)[0] 139 metalen = struct.unpack_from('!H', metabuf, offset)[0]
127 offset += 2 140 offset += 2
128 if offset + metalen > buflen: 141 if offset + metalen > buflen:
129 raise ValueError('corrupted metadata: incomplete buffer') 142 raise ValueError('corrupted metadata: incomplete buffer')
130 value = metabuf[offset:offset + metalen] 143 value = metabuf[offset : offset + metalen]
131 metadict[key] = value 144 metadict[key] = value
132 offset += metalen 145 offset += metalen
133 if offset != buflen: 146 if offset != buflen:
134 raise ValueError('corrupted metadata: redundant data') 147 raise ValueError('corrupted metadata: redundant data')
135 return metadict 148 return metadict
136 149
150
137 def _buildpackmeta(metadict): 151 def _buildpackmeta(metadict):
138 """reverse of _parsepackmeta, dict -> bytes (<metadata-list>) 152 """reverse of _parsepackmeta, dict -> bytes (<metadata-list>)
139 153
140 The dict contains raw content - both keys and values are strings. 154 The dict contains raw content - both keys and values are strings.
141 Upper-level business may want to serialize some of other types (like 155 Upper-level business may want to serialize some of other types (like
146 """ 160 """
147 metabuf = '' 161 metabuf = ''
148 for k, v in sorted((metadict or {}).iteritems()): 162 for k, v in sorted((metadict or {}).iteritems()):
149 if len(k) != 1: 163 if len(k) != 1:
150 raise error.ProgrammingError('packmeta: illegal key: %s' % k) 164 raise error.ProgrammingError('packmeta: illegal key: %s' % k)
151 if len(v) > 0xfffe: 165 if len(v) > 0xFFFE:
152 raise ValueError('metadata value is too long: 0x%x > 0xfffe' 166 raise ValueError(
153 % len(v)) 167 'metadata value is too long: 0x%x > 0xfffe' % len(v)
168 )
154 metabuf += k 169 metabuf += k
155 metabuf += struct.pack('!H', len(v)) 170 metabuf += struct.pack('!H', len(v))
156 metabuf += v 171 metabuf += v
157 # len(metabuf) is guaranteed representable in 4 bytes, because there are 172 # len(metabuf) is guaranteed representable in 4 bytes, because there are
158 # only 256 keys, and for each value, len(value) <= 0xfffe. 173 # only 256 keys, and for each value, len(value) <= 0xfffe.
159 return metabuf 174 return metabuf
160 175
176
161 _metaitemtypes = { 177 _metaitemtypes = {
162 constants.METAKEYFLAG: (int, pycompat.long), 178 constants.METAKEYFLAG: (int, pycompat.long),
163 constants.METAKEYSIZE: (int, pycompat.long), 179 constants.METAKEYSIZE: (int, pycompat.long),
164 } 180 }
181
165 182
166 def buildpackmeta(metadict): 183 def buildpackmeta(metadict):
167 """like _buildpackmeta, but typechecks metadict and normalize it. 184 """like _buildpackmeta, but typechecks metadict and normalize it.
168 185
169 This means, METAKEYSIZE and METAKEYSIZE should have integers as values, 186 This means, METAKEYSIZE and METAKEYSIZE should have integers as values,
181 continue 198 continue
182 v = int2bin(v) 199 v = int2bin(v)
183 newmeta[k] = v 200 newmeta[k] = v
184 return _buildpackmeta(newmeta) 201 return _buildpackmeta(newmeta)
185 202
203
186 def parsepackmeta(metabuf): 204 def parsepackmeta(metabuf):
187 """like _parsepackmeta, but convert fields to desired types automatically. 205 """like _parsepackmeta, but convert fields to desired types automatically.
188 206
189 This means, METAKEYFLAG and METAKEYSIZE fields will be converted to 207 This means, METAKEYFLAG and METAKEYSIZE fields will be converted to
190 integers. 208 integers.
193 for k, v in metadict.iteritems(): 211 for k, v in metadict.iteritems():
194 if k in _metaitemtypes and int in _metaitemtypes[k]: 212 if k in _metaitemtypes and int in _metaitemtypes[k]:
195 metadict[k] = bin2int(v) 213 metadict[k] = bin2int(v)
196 return metadict 214 return metadict
197 215
216
198 def int2bin(n): 217 def int2bin(n):
199 """convert a non-negative integer to raw binary buffer""" 218 """convert a non-negative integer to raw binary buffer"""
200 buf = bytearray() 219 buf = bytearray()
201 while n > 0: 220 while n > 0:
202 buf.insert(0, n & 0xff) 221 buf.insert(0, n & 0xFF)
203 n >>= 8 222 n >>= 8
204 return bytes(buf) 223 return bytes(buf)
224
205 225
206 def bin2int(buf): 226 def bin2int(buf):
207 """the reverse of int2bin, convert a binary buffer to an integer""" 227 """the reverse of int2bin, convert a binary buffer to an integer"""
208 x = 0 228 x = 0
209 for b in bytearray(buf): 229 for b in bytearray(buf):
210 x <<= 8 230 x <<= 8
211 x |= b 231 x |= b
212 return x 232 return x
213 233
234
214 def parsesizeflags(raw): 235 def parsesizeflags(raw):
215 """given a remotefilelog blob, return (headersize, rawtextsize, flags) 236 """given a remotefilelog blob, return (headersize, rawtextsize, flags)
216 237
217 see remotefilelogserver.createfileblob for the format. 238 see remotefilelogserver.createfileblob for the format.
218 raise RuntimeError if the content is illformed. 239 raise RuntimeError if the content is illformed.
225 if header.startswith('v'): 246 if header.startswith('v'):
226 # v1 and above, header starts with 'v' 247 # v1 and above, header starts with 'v'
227 if header.startswith('v1\n'): 248 if header.startswith('v1\n'):
228 for s in header.split('\n'): 249 for s in header.split('\n'):
229 if s.startswith(constants.METAKEYSIZE): 250 if s.startswith(constants.METAKEYSIZE):
230 size = int(s[len(constants.METAKEYSIZE):]) 251 size = int(s[len(constants.METAKEYSIZE) :])
231 elif s.startswith(constants.METAKEYFLAG): 252 elif s.startswith(constants.METAKEYFLAG):
232 flags = int(s[len(constants.METAKEYFLAG):]) 253 flags = int(s[len(constants.METAKEYFLAG) :])
233 else: 254 else:
234 raise RuntimeError('unsupported remotefilelog header: %s' 255 raise RuntimeError(
235 % header) 256 'unsupported remotefilelog header: %s' % header
257 )
236 else: 258 else:
237 # v0, str(int(size)) is the header 259 # v0, str(int(size)) is the header
238 size = int(header) 260 size = int(header)
239 except ValueError: 261 except ValueError:
240 raise RuntimeError(r"unexpected remotefilelog header: illegal format") 262 raise RuntimeError(r"unexpected remotefilelog header: illegal format")
241 if size is None: 263 if size is None:
242 raise RuntimeError(r"unexpected remotefilelog header: no size found") 264 raise RuntimeError(r"unexpected remotefilelog header: no size found")
243 return index + 1, size, flags 265 return index + 1, size, flags
244 266
267
245 def buildfileblobheader(size, flags, version=None): 268 def buildfileblobheader(size, flags, version=None):
246 """return the header of a remotefilelog blob. 269 """return the header of a remotefilelog blob.
247 270
248 see remotefilelogserver.createfileblob for the format. 271 see remotefilelogserver.createfileblob for the format.
249 approximately the reverse of parsesizeflags. 272 approximately the reverse of parsesizeflags.
252 """ 275 """
253 # choose v0 if flags is empty, otherwise v1 276 # choose v0 if flags is empty, otherwise v1
254 if version is None: 277 if version is None:
255 version = int(bool(flags)) 278 version = int(bool(flags))
256 if version == 1: 279 if version == 1:
257 header = ('v1\n%s%d\n%s%d' 280 header = 'v1\n%s%d\n%s%d' % (
258 % (constants.METAKEYSIZE, size, 281 constants.METAKEYSIZE,
259 constants.METAKEYFLAG, flags)) 282 size,
283 constants.METAKEYFLAG,
284 flags,
285 )
260 elif version == 0: 286 elif version == 0:
261 if flags: 287 if flags:
262 raise error.ProgrammingError('fileblob v0 does not support flag') 288 raise error.ProgrammingError('fileblob v0 does not support flag')
263 header = '%d' % size 289 header = '%d' % size
264 else: 290 else:
265 raise error.ProgrammingError('unknown fileblob version %d' % version) 291 raise error.ProgrammingError('unknown fileblob version %d' % version)
266 return header 292 return header
267 293
294
268 def ancestormap(raw): 295 def ancestormap(raw):
269 offset, size, flags = parsesizeflags(raw) 296 offset, size, flags = parsesizeflags(raw)
270 start = offset + size 297 start = offset + size
271 298
272 mapping = {} 299 mapping = {}
273 while start < len(raw): 300 while start < len(raw):
274 divider = raw.index('\0', start + 80) 301 divider = raw.index('\0', start + 80)
275 302
276 currentnode = raw[start:(start + 20)] 303 currentnode = raw[start : (start + 20)]
277 p1 = raw[(start + 20):(start + 40)] 304 p1 = raw[(start + 20) : (start + 40)]
278 p2 = raw[(start + 40):(start + 60)] 305 p2 = raw[(start + 40) : (start + 60)]
279 linknode = raw[(start + 60):(start + 80)] 306 linknode = raw[(start + 60) : (start + 80)]
280 copyfrom = raw[(start + 80):divider] 307 copyfrom = raw[(start + 80) : divider]
281 308
282 mapping[currentnode] = (p1, p2, linknode, copyfrom) 309 mapping[currentnode] = (p1, p2, linknode, copyfrom)
283 start = divider + 1 310 start = divider + 1
284 311
285 return mapping 312 return mapping
313
286 314
287 def readfile(path): 315 def readfile(path):
288 f = open(path, 'rb') 316 f = open(path, 'rb')
289 try: 317 try:
290 result = f.read() 318 result = f.read()
295 raise IOError("empty file: %s" % path) 323 raise IOError("empty file: %s" % path)
296 324
297 return result 325 return result
298 finally: 326 finally:
299 f.close() 327 f.close()
328
300 329
301 def unlinkfile(filepath): 330 def unlinkfile(filepath):
302 if pycompat.iswindows: 331 if pycompat.iswindows:
303 # On Windows, os.unlink cannnot delete readonly files 332 # On Windows, os.unlink cannnot delete readonly files
304 os.chmod(filepath, stat.S_IWUSR) 333 os.chmod(filepath, stat.S_IWUSR)
305 os.unlink(filepath) 334 os.unlink(filepath)
335
306 336
307 def renamefile(source, destination): 337 def renamefile(source, destination):
308 if pycompat.iswindows: 338 if pycompat.iswindows:
309 # On Windows, os.rename cannot rename readonly files 339 # On Windows, os.rename cannot rename readonly files
310 # and cannot overwrite destination if it exists 340 # and cannot overwrite destination if it exists
312 if os.path.isfile(destination): 342 if os.path.isfile(destination):
313 os.chmod(destination, stat.S_IWUSR) 343 os.chmod(destination, stat.S_IWUSR)
314 os.unlink(destination) 344 os.unlink(destination)
315 345
316 os.rename(source, destination) 346 os.rename(source, destination)
347
317 348
318 def writefile(path, content, readonly=False): 349 def writefile(path, content, readonly=False):
319 dirname, filename = os.path.split(path) 350 dirname, filename = os.path.split(path)
320 if not os.path.exists(dirname): 351 if not os.path.exists(dirname):
321 try: 352 try:
349 try: 380 try:
350 unlinkfile(temp) 381 unlinkfile(temp)
351 except OSError: 382 except OSError:
352 pass 383 pass
353 raise 384 raise
385
354 386
355 def sortnodes(nodes, parentfunc): 387 def sortnodes(nodes, parentfunc):
356 """Topologically sorts the nodes, using the parentfunc to find 388 """Topologically sorts the nodes, using the parentfunc to find
357 the parents of nodes.""" 389 the parents of nodes."""
358 nodes = set(nodes) 390 nodes = set(nodes)
386 # parents. This gives better compression results. 418 # parents. This gives better compression results.
387 roots.insert(0, c) 419 roots.insert(0, c)
388 420
389 return results 421 return results
390 422
423
391 def readexactly(stream, n): 424 def readexactly(stream, n):
392 '''read n bytes from stream.read and abort if less was available''' 425 '''read n bytes from stream.read and abort if less was available'''
393 s = stream.read(n) 426 s = stream.read(n)
394 if len(s) < n: 427 if len(s) < n:
395 raise error.Abort(_("stream ended unexpectedly" 428 raise error.Abort(
396 " (got %d bytes, expected %d)") 429 _("stream ended unexpectedly" " (got %d bytes, expected %d)")
397 % (len(s), n)) 430 % (len(s), n)
431 )
398 return s 432 return s
433
399 434
400 def readunpack(stream, fmt): 435 def readunpack(stream, fmt):
401 data = readexactly(stream, struct.calcsize(fmt)) 436 data = readexactly(stream, struct.calcsize(fmt))
402 return struct.unpack(fmt, data) 437 return struct.unpack(fmt, data)
438
403 439
404 def readpath(stream): 440 def readpath(stream):
405 rawlen = readexactly(stream, constants.FILENAMESIZE) 441 rawlen = readexactly(stream, constants.FILENAMESIZE)
406 pathlen = struct.unpack(constants.FILENAMESTRUCT, rawlen)[0] 442 pathlen = struct.unpack(constants.FILENAMESTRUCT, rawlen)[0]
407 return readexactly(stream, pathlen) 443 return readexactly(stream, pathlen)
444
408 445
409 def readnodelist(stream): 446 def readnodelist(stream):
410 rawlen = readexactly(stream, constants.NODECOUNTSIZE) 447 rawlen = readexactly(stream, constants.NODECOUNTSIZE)
411 nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0] 448 nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0]
412 for i in pycompat.xrange(nodecount): 449 for i in pycompat.xrange(nodecount):
413 yield readexactly(stream, constants.NODESIZE) 450 yield readexactly(stream, constants.NODESIZE)
414 451
452
415 def readpathlist(stream): 453 def readpathlist(stream):
416 rawlen = readexactly(stream, constants.PATHCOUNTSIZE) 454 rawlen = readexactly(stream, constants.PATHCOUNTSIZE)
417 pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0] 455 pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0]
418 for i in pycompat.xrange(pathcount): 456 for i in pycompat.xrange(pathcount):
419 yield readpath(stream) 457 yield readpath(stream)
420 458
459
421 def getgid(groupname): 460 def getgid(groupname):
422 try: 461 try:
423 gid = grp.getgrnam(pycompat.fsdecode(groupname)).gr_gid 462 gid = grp.getgrnam(pycompat.fsdecode(groupname)).gr_gid
424 return gid 463 return gid
425 except KeyError: 464 except KeyError:
426 return None 465 return None
466
427 467
428 def setstickygroupdir(path, gid, warn=None): 468 def setstickygroupdir(path, gid, warn=None):
429 if gid is None: 469 if gid is None:
430 return 470 return
431 try: 471 try:
432 os.chown(path, -1, gid) 472 os.chown(path, -1, gid)
433 os.chmod(path, 0o2775) 473 os.chmod(path, 0o2775)
434 except (IOError, OSError) as ex: 474 except (IOError, OSError) as ex:
435 if warn: 475 if warn:
436 warn(_('unable to chown/chmod on %s: %s\n') % (path, ex)) 476 warn(_('unable to chown/chmod on %s: %s\n') % (path, ex))
477
437 478
438 def mkstickygroupdir(ui, path): 479 def mkstickygroupdir(ui, path):
439 """Creates the given directory (if it doesn't exist) and give it a 480 """Creates the given directory (if it doesn't exist) and give it a
440 particular group with setgid enabled.""" 481 particular group with setgid enabled."""
441 gid = None 482 gid = None
477 for path in missingdirs: 518 for path in missingdirs:
478 setstickygroupdir(path, gid, ui.warn) 519 setstickygroupdir(path, gid, ui.warn)
479 finally: 520 finally:
480 os.umask(oldumask) 521 os.umask(oldumask)
481 522
523
482 def getusername(ui): 524 def getusername(ui):
483 try: 525 try:
484 return stringutil.shortuser(ui.username()) 526 return stringutil.shortuser(ui.username())
485 except Exception: 527 except Exception:
486 return 'unknown' 528 return 'unknown'
529
487 530
488 def getreponame(ui): 531 def getreponame(ui):
489 reponame = ui.config('paths', 'default') 532 reponame = ui.config('paths', 'default')
490 if reponame: 533 if reponame:
491 return os.path.basename(reponame) 534 return os.path.basename(reponame)