comparison mercurial/util.py @ 43076:2372284d9457

formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:45:02 -0400
parents e94c8f584ee2
children 687b865b95ad
comparison
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
32 import sys 32 import sys
33 import time 33 import time
34 import traceback 34 import traceback
35 import warnings 35 import warnings
36 36
37 from .thirdparty import ( 37 from .thirdparty import attr
38 attr,
39 )
40 from hgdemandimport import tracing 38 from hgdemandimport import tracing
41 from . import ( 39 from . import (
42 encoding, 40 encoding,
43 error, 41 error,
44 i18n, 42 i18n,
140 138
141 # Python compatibility 139 # Python compatibility
142 140
143 _notset = object() 141 _notset = object()
144 142
143
145 def bitsfrom(container): 144 def bitsfrom(container):
146 bits = 0 145 bits = 0
147 for bit in container: 146 for bit in container:
148 bits |= bit 147 bits |= bit
149 return bits 148 return bits
149
150 150
151 # python 2.6 still have deprecation warning enabled by default. We do not want 151 # python 2.6 still have deprecation warning enabled by default. We do not want
152 # to display anything to standard user so detect if we are running test and 152 # to display anything to standard user so detect if we are running test and
153 # only use python deprecation warning in this case. 153 # only use python deprecation warning in this case.
154 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS')) 154 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
162 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial') 162 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
163 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext') 163 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
164 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd') 164 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
165 if _dowarn and pycompat.ispy3: 165 if _dowarn and pycompat.ispy3:
166 # silence warning emitted by passing user string to re.sub() 166 # silence warning emitted by passing user string to re.sub()
167 warnings.filterwarnings(r'ignore', r'bad escape', DeprecationWarning, 167 warnings.filterwarnings(
168 r'mercurial') 168 r'ignore', r'bad escape', DeprecationWarning, r'mercurial'
169 warnings.filterwarnings(r'ignore', r'invalid escape sequence', 169 )
170 DeprecationWarning, r'mercurial') 170 warnings.filterwarnings(
171 r'ignore', r'invalid escape sequence', DeprecationWarning, r'mercurial'
172 )
171 # TODO: reinvent imp.is_frozen() 173 # TODO: reinvent imp.is_frozen()
172 warnings.filterwarnings(r'ignore', r'the imp module is deprecated', 174 warnings.filterwarnings(
173 DeprecationWarning, r'mercurial') 175 r'ignore',
176 r'the imp module is deprecated',
177 DeprecationWarning,
178 r'mercurial',
179 )
180
174 181
175 def nouideprecwarn(msg, version, stacklevel=1): 182 def nouideprecwarn(msg, version, stacklevel=1):
176 """Issue an python native deprecation warning 183 """Issue an python native deprecation warning
177 184
178 This is a noop outside of tests, use 'ui.deprecwarn' when possible. 185 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
179 """ 186 """
180 if _dowarn: 187 if _dowarn:
181 msg += ("\n(compatibility will be dropped after Mercurial-%s," 188 msg += (
182 " update your code.)") % version 189 "\n(compatibility will be dropped after Mercurial-%s,"
190 " update your code.)"
191 ) % version
183 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1) 192 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
193
184 194
185 DIGESTS = { 195 DIGESTS = {
186 'md5': hashlib.md5, 196 'md5': hashlib.md5,
187 'sha1': hashlib.sha1, 197 'sha1': hashlib.sha1,
188 'sha512': hashlib.sha512, 198 'sha512': hashlib.sha512,
190 # List of digest types from strongest to weakest 200 # List of digest types from strongest to weakest
191 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5'] 201 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
192 202
193 for k in DIGESTS_BY_STRENGTH: 203 for k in DIGESTS_BY_STRENGTH:
194 assert k in DIGESTS 204 assert k in DIGESTS
205
195 206
196 class digester(object): 207 class digester(object):
197 """helper to compute digests. 208 """helper to compute digests.
198 209
199 This helper can be used to compute one or more digests given their name. 210 This helper can be used to compute one or more digests given their name.
238 for k in DIGESTS_BY_STRENGTH: 249 for k in DIGESTS_BY_STRENGTH:
239 if k in supported: 250 if k in supported:
240 return k 251 return k
241 return None 252 return None
242 253
254
243 class digestchecker(object): 255 class digestchecker(object):
244 """file handle wrapper that additionally checks content against a given 256 """file handle wrapper that additionally checks content against a given
245 size and digests. 257 size and digests.
246 258
247 d = digestchecker(fh, size, {'md5': '...'}) 259 d = digestchecker(fh, size, {'md5': '...'})
262 self._got += len(content) 274 self._got += len(content)
263 return content 275 return content
264 276
265 def validate(self): 277 def validate(self):
266 if self._size != self._got: 278 if self._size != self._got:
267 raise error.Abort(_('size mismatch: expected %d, got %d') % 279 raise error.Abort(
268 (self._size, self._got)) 280 _('size mismatch: expected %d, got %d')
281 % (self._size, self._got)
282 )
269 for k, v in self._digests.items(): 283 for k, v in self._digests.items():
270 if v != self._digester[k]: 284 if v != self._digester[k]:
271 # i18n: first parameter is a digest name 285 # i18n: first parameter is a digest name
272 raise error.Abort(_('%s mismatch: expected %s, got %s') % 286 raise error.Abort(
273 (k, v, self._digester[k])) 287 _('%s mismatch: expected %s, got %s')
288 % (k, v, self._digester[k])
289 )
290
274 291
275 try: 292 try:
276 buffer = buffer 293 buffer = buffer
277 except NameError: 294 except NameError:
295
278 def buffer(sliceable, offset=0, length=None): 296 def buffer(sliceable, offset=0, length=None):
279 if length is not None: 297 if length is not None:
280 return memoryview(sliceable)[offset:offset + length] 298 return memoryview(sliceable)[offset : offset + length]
281 return memoryview(sliceable)[offset:] 299 return memoryview(sliceable)[offset:]
282 300
301
283 _chunksize = 4096 302 _chunksize = 4096
303
284 304
285 class bufferedinputpipe(object): 305 class bufferedinputpipe(object):
286 """a manually buffered input pipe 306 """a manually buffered input pipe
287 307
288 Python will not let us use buffered IO and lazy reading with 'polling' at 308 Python will not let us use buffered IO and lazy reading with 'polling' at
294 empty from the output (allowing collaboration of the buffer with polling). 314 empty from the output (allowing collaboration of the buffer with polling).
295 315
296 This class lives in the 'util' module because it makes use of the 'os' 316 This class lives in the 'util' module because it makes use of the 'os'
297 module from the python stdlib. 317 module from the python stdlib.
298 """ 318 """
319
299 def __new__(cls, fh): 320 def __new__(cls, fh):
300 # If we receive a fileobjectproxy, we need to use a variation of this 321 # If we receive a fileobjectproxy, we need to use a variation of this
301 # class that notifies observers about activity. 322 # class that notifies observers about activity.
302 if isinstance(fh, fileobjectproxy): 323 if isinstance(fh, fileobjectproxy):
303 cls = observedbufferedinputpipe 324 cls = observedbufferedinputpipe
350 while (not self._eof) and lfi < 0: 371 while (not self._eof) and lfi < 0:
351 self._fillbuffer() 372 self._fillbuffer()
352 if self._buffer: 373 if self._buffer:
353 lfi = self._buffer[-1].find('\n') 374 lfi = self._buffer[-1].find('\n')
354 size = lfi + 1 375 size = lfi + 1
355 if lfi < 0: # end of file 376 if lfi < 0: # end of file
356 size = self._lenbuf 377 size = self._lenbuf
357 elif len(self._buffer) > 1: 378 elif len(self._buffer) > 1:
358 # we need to take previous chunks into account 379 # we need to take previous chunks into account
359 size += self._lenbuf - len(self._buffer[-1]) 380 size += self._lenbuf - len(self._buffer[-1])
360 return self._frombuffer(size) 381 return self._frombuffer(size)
368 buf = self._buffer[0] 389 buf = self._buffer[0]
369 if len(self._buffer) > 1: 390 if len(self._buffer) > 1:
370 buf = ''.join(self._buffer) 391 buf = ''.join(self._buffer)
371 392
372 data = buf[:size] 393 data = buf[:size]
373 buf = buf[len(data):] 394 buf = buf[len(data) :]
374 if buf: 395 if buf:
375 self._buffer = [buf] 396 self._buffer = [buf]
376 self._lenbuf = len(buf) 397 self._lenbuf = len(buf)
377 else: 398 else:
378 self._buffer = [] 399 self._buffer = []
387 else: 408 else:
388 self._lenbuf += len(data) 409 self._lenbuf += len(data)
389 self._buffer.append(data) 410 self._buffer.append(data)
390 411
391 return data 412 return data
413
392 414
393 def mmapread(fp): 415 def mmapread(fp):
394 try: 416 try:
395 fd = getattr(fp, 'fileno', lambda: fp)() 417 fd = getattr(fp, 'fileno', lambda: fp)()
396 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ) 418 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
399 # if the file is empty, and if so, return an empty buffer. 421 # if the file is empty, and if so, return an empty buffer.
400 if os.fstat(fd).st_size == 0: 422 if os.fstat(fd).st_size == 0:
401 return '' 423 return ''
402 raise 424 raise
403 425
426
404 class fileobjectproxy(object): 427 class fileobjectproxy(object):
405 """A proxy around file objects that tells a watcher when events occur. 428 """A proxy around file objects that tells a watcher when events occur.
406 429
407 This type is intended to only be used for testing purposes. Think hard 430 This type is intended to only be used for testing purposes. Think hard
408 before using it in important code. 431 before using it in important code.
409 """ 432 """
433
410 __slots__ = ( 434 __slots__ = (
411 r'_orig', 435 r'_orig',
412 r'_observer', 436 r'_observer',
413 ) 437 )
414 438
417 object.__setattr__(self, r'_observer', observer) 441 object.__setattr__(self, r'_observer', observer)
418 442
419 def __getattribute__(self, name): 443 def __getattribute__(self, name):
420 ours = { 444 ours = {
421 r'_observer', 445 r'_observer',
422
423 # IOBase 446 # IOBase
424 r'close', 447 r'close',
425 # closed if a property 448 # closed if a property
426 r'fileno', 449 r'fileno',
427 r'flush', 450 r'flush',
483 506
484 return res 507 return res
485 508
486 def close(self, *args, **kwargs): 509 def close(self, *args, **kwargs):
487 return object.__getattribute__(self, r'_observedcall')( 510 return object.__getattribute__(self, r'_observedcall')(
488 r'close', *args, **kwargs) 511 r'close', *args, **kwargs
512 )
489 513
490 def fileno(self, *args, **kwargs): 514 def fileno(self, *args, **kwargs):
491 return object.__getattribute__(self, r'_observedcall')( 515 return object.__getattribute__(self, r'_observedcall')(
492 r'fileno', *args, **kwargs) 516 r'fileno', *args, **kwargs
517 )
493 518
494 def flush(self, *args, **kwargs): 519 def flush(self, *args, **kwargs):
495 return object.__getattribute__(self, r'_observedcall')( 520 return object.__getattribute__(self, r'_observedcall')(
496 r'flush', *args, **kwargs) 521 r'flush', *args, **kwargs
522 )
497 523
498 def isatty(self, *args, **kwargs): 524 def isatty(self, *args, **kwargs):
499 return object.__getattribute__(self, r'_observedcall')( 525 return object.__getattribute__(self, r'_observedcall')(
500 r'isatty', *args, **kwargs) 526 r'isatty', *args, **kwargs
527 )
501 528
502 def readable(self, *args, **kwargs): 529 def readable(self, *args, **kwargs):
503 return object.__getattribute__(self, r'_observedcall')( 530 return object.__getattribute__(self, r'_observedcall')(
504 r'readable', *args, **kwargs) 531 r'readable', *args, **kwargs
532 )
505 533
506 def readline(self, *args, **kwargs): 534 def readline(self, *args, **kwargs):
507 return object.__getattribute__(self, r'_observedcall')( 535 return object.__getattribute__(self, r'_observedcall')(
508 r'readline', *args, **kwargs) 536 r'readline', *args, **kwargs
537 )
509 538
510 def readlines(self, *args, **kwargs): 539 def readlines(self, *args, **kwargs):
511 return object.__getattribute__(self, r'_observedcall')( 540 return object.__getattribute__(self, r'_observedcall')(
512 r'readlines', *args, **kwargs) 541 r'readlines', *args, **kwargs
542 )
513 543
514 def seek(self, *args, **kwargs): 544 def seek(self, *args, **kwargs):
515 return object.__getattribute__(self, r'_observedcall')( 545 return object.__getattribute__(self, r'_observedcall')(
516 r'seek', *args, **kwargs) 546 r'seek', *args, **kwargs
547 )
517 548
518 def seekable(self, *args, **kwargs): 549 def seekable(self, *args, **kwargs):
519 return object.__getattribute__(self, r'_observedcall')( 550 return object.__getattribute__(self, r'_observedcall')(
520 r'seekable', *args, **kwargs) 551 r'seekable', *args, **kwargs
552 )
521 553
522 def tell(self, *args, **kwargs): 554 def tell(self, *args, **kwargs):
523 return object.__getattribute__(self, r'_observedcall')( 555 return object.__getattribute__(self, r'_observedcall')(
524 r'tell', *args, **kwargs) 556 r'tell', *args, **kwargs
557 )
525 558
526 def truncate(self, *args, **kwargs): 559 def truncate(self, *args, **kwargs):
527 return object.__getattribute__(self, r'_observedcall')( 560 return object.__getattribute__(self, r'_observedcall')(
528 r'truncate', *args, **kwargs) 561 r'truncate', *args, **kwargs
562 )
529 563
530 def writable(self, *args, **kwargs): 564 def writable(self, *args, **kwargs):
531 return object.__getattribute__(self, r'_observedcall')( 565 return object.__getattribute__(self, r'_observedcall')(
532 r'writable', *args, **kwargs) 566 r'writable', *args, **kwargs
567 )
533 568
534 def writelines(self, *args, **kwargs): 569 def writelines(self, *args, **kwargs):
535 return object.__getattribute__(self, r'_observedcall')( 570 return object.__getattribute__(self, r'_observedcall')(
536 r'writelines', *args, **kwargs) 571 r'writelines', *args, **kwargs
572 )
537 573
538 def read(self, *args, **kwargs): 574 def read(self, *args, **kwargs):
539 return object.__getattribute__(self, r'_observedcall')( 575 return object.__getattribute__(self, r'_observedcall')(
540 r'read', *args, **kwargs) 576 r'read', *args, **kwargs
577 )
541 578
542 def readall(self, *args, **kwargs): 579 def readall(self, *args, **kwargs):
543 return object.__getattribute__(self, r'_observedcall')( 580 return object.__getattribute__(self, r'_observedcall')(
544 r'readall', *args, **kwargs) 581 r'readall', *args, **kwargs
582 )
545 583
546 def readinto(self, *args, **kwargs): 584 def readinto(self, *args, **kwargs):
547 return object.__getattribute__(self, r'_observedcall')( 585 return object.__getattribute__(self, r'_observedcall')(
548 r'readinto', *args, **kwargs) 586 r'readinto', *args, **kwargs
587 )
549 588
550 def write(self, *args, **kwargs): 589 def write(self, *args, **kwargs):
551 return object.__getattribute__(self, r'_observedcall')( 590 return object.__getattribute__(self, r'_observedcall')(
552 r'write', *args, **kwargs) 591 r'write', *args, **kwargs
592 )
553 593
554 def detach(self, *args, **kwargs): 594 def detach(self, *args, **kwargs):
555 return object.__getattribute__(self, r'_observedcall')( 595 return object.__getattribute__(self, r'_observedcall')(
556 r'detach', *args, **kwargs) 596 r'detach', *args, **kwargs
597 )
557 598
558 def read1(self, *args, **kwargs): 599 def read1(self, *args, **kwargs):
559 return object.__getattribute__(self, r'_observedcall')( 600 return object.__getattribute__(self, r'_observedcall')(
560 r'read1', *args, **kwargs) 601 r'read1', *args, **kwargs
602 )
603
561 604
562 class observedbufferedinputpipe(bufferedinputpipe): 605 class observedbufferedinputpipe(bufferedinputpipe):
563 """A variation of bufferedinputpipe that is aware of fileobjectproxy. 606 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
564 607
565 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that 608 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
568 611
569 This variation of ``bufferedinputpipe`` can notify observers about 612 This variation of ``bufferedinputpipe`` can notify observers about
570 ``os.read()`` events. It also re-publishes other events, such as 613 ``os.read()`` events. It also re-publishes other events, such as
571 ``read()`` and ``readline()``. 614 ``read()`` and ``readline()``.
572 """ 615 """
616
573 def _fillbuffer(self): 617 def _fillbuffer(self):
574 res = super(observedbufferedinputpipe, self)._fillbuffer() 618 res = super(observedbufferedinputpipe, self)._fillbuffer()
575 619
576 fn = getattr(self._input._observer, r'osread', None) 620 fn = getattr(self._input._observer, r'osread', None)
577 if fn: 621 if fn:
596 fn = getattr(self._input._observer, r'bufferedreadline', None) 640 fn = getattr(self._input._observer, r'bufferedreadline', None)
597 if fn: 641 if fn:
598 fn(res) 642 fn(res)
599 643
600 return res 644 return res
645
601 646
602 PROXIED_SOCKET_METHODS = { 647 PROXIED_SOCKET_METHODS = {
603 r'makefile', 648 r'makefile',
604 r'recv', 649 r'recv',
605 r'recvfrom', 650 r'recvfrom',
612 r'settimeout', 657 r'settimeout',
613 r'gettimeout', 658 r'gettimeout',
614 r'setsockopt', 659 r'setsockopt',
615 } 660 }
616 661
662
617 class socketproxy(object): 663 class socketproxy(object):
618 """A proxy around a socket that tells a watcher when events occur. 664 """A proxy around a socket that tells a watcher when events occur.
619 665
620 This is like ``fileobjectproxy`` except for sockets. 666 This is like ``fileobjectproxy`` except for sockets.
621 667
622 This type is intended to only be used for testing purposes. Think hard 668 This type is intended to only be used for testing purposes. Think hard
623 before using it in important code. 669 before using it in important code.
624 """ 670 """
671
625 __slots__ = ( 672 __slots__ = (
626 r'_orig', 673 r'_orig',
627 r'_observer', 674 r'_observer',
628 ) 675 )
629 676
662 709
663 return res 710 return res
664 711
665 def makefile(self, *args, **kwargs): 712 def makefile(self, *args, **kwargs):
666 res = object.__getattribute__(self, r'_observedcall')( 713 res = object.__getattribute__(self, r'_observedcall')(
667 r'makefile', *args, **kwargs) 714 r'makefile', *args, **kwargs
715 )
668 716
669 # The file object may be used for I/O. So we turn it into a 717 # The file object may be used for I/O. So we turn it into a
670 # proxy using our observer. 718 # proxy using our observer.
671 observer = object.__getattribute__(self, r'_observer') 719 observer = object.__getattribute__(self, r'_observer')
672 return makeloggingfileobject(observer.fh, res, observer.name, 720 return makeloggingfileobject(
673 reads=observer.reads, 721 observer.fh,
674 writes=observer.writes, 722 res,
675 logdata=observer.logdata, 723 observer.name,
676 logdataapis=observer.logdataapis) 724 reads=observer.reads,
725 writes=observer.writes,
726 logdata=observer.logdata,
727 logdataapis=observer.logdataapis,
728 )
677 729
678 def recv(self, *args, **kwargs): 730 def recv(self, *args, **kwargs):
679 return object.__getattribute__(self, r'_observedcall')( 731 return object.__getattribute__(self, r'_observedcall')(
680 r'recv', *args, **kwargs) 732 r'recv', *args, **kwargs
733 )
681 734
682 def recvfrom(self, *args, **kwargs): 735 def recvfrom(self, *args, **kwargs):
683 return object.__getattribute__(self, r'_observedcall')( 736 return object.__getattribute__(self, r'_observedcall')(
684 r'recvfrom', *args, **kwargs) 737 r'recvfrom', *args, **kwargs
738 )
685 739
686 def recvfrom_into(self, *args, **kwargs): 740 def recvfrom_into(self, *args, **kwargs):
687 return object.__getattribute__(self, r'_observedcall')( 741 return object.__getattribute__(self, r'_observedcall')(
688 r'recvfrom_into', *args, **kwargs) 742 r'recvfrom_into', *args, **kwargs
743 )
689 744
690 def recv_into(self, *args, **kwargs): 745 def recv_into(self, *args, **kwargs):
691 return object.__getattribute__(self, r'_observedcall')( 746 return object.__getattribute__(self, r'_observedcall')(
692 r'recv_info', *args, **kwargs) 747 r'recv_info', *args, **kwargs
748 )
693 749
694 def send(self, *args, **kwargs): 750 def send(self, *args, **kwargs):
695 return object.__getattribute__(self, r'_observedcall')( 751 return object.__getattribute__(self, r'_observedcall')(
696 r'send', *args, **kwargs) 752 r'send', *args, **kwargs
753 )
697 754
698 def sendall(self, *args, **kwargs): 755 def sendall(self, *args, **kwargs):
699 return object.__getattribute__(self, r'_observedcall')( 756 return object.__getattribute__(self, r'_observedcall')(
700 r'sendall', *args, **kwargs) 757 r'sendall', *args, **kwargs
758 )
701 759
702 def sendto(self, *args, **kwargs): 760 def sendto(self, *args, **kwargs):
703 return object.__getattribute__(self, r'_observedcall')( 761 return object.__getattribute__(self, r'_observedcall')(
704 r'sendto', *args, **kwargs) 762 r'sendto', *args, **kwargs
763 )
705 764
706 def setblocking(self, *args, **kwargs): 765 def setblocking(self, *args, **kwargs):
707 return object.__getattribute__(self, r'_observedcall')( 766 return object.__getattribute__(self, r'_observedcall')(
708 r'setblocking', *args, **kwargs) 767 r'setblocking', *args, **kwargs
768 )
709 769
710 def settimeout(self, *args, **kwargs): 770 def settimeout(self, *args, **kwargs):
711 return object.__getattribute__(self, r'_observedcall')( 771 return object.__getattribute__(self, r'_observedcall')(
712 r'settimeout', *args, **kwargs) 772 r'settimeout', *args, **kwargs
773 )
713 774
714 def gettimeout(self, *args, **kwargs): 775 def gettimeout(self, *args, **kwargs):
715 return object.__getattribute__(self, r'_observedcall')( 776 return object.__getattribute__(self, r'_observedcall')(
716 r'gettimeout', *args, **kwargs) 777 r'gettimeout', *args, **kwargs
778 )
717 779
718 def setsockopt(self, *args, **kwargs): 780 def setsockopt(self, *args, **kwargs):
719 return object.__getattribute__(self, r'_observedcall')( 781 return object.__getattribute__(self, r'_observedcall')(
720 r'setsockopt', *args, **kwargs) 782 r'setsockopt', *args, **kwargs
783 )
784
721 785
722 class baseproxyobserver(object): 786 class baseproxyobserver(object):
723 def _writedata(self, data): 787 def _writedata(self, data):
724 if not self.logdata: 788 if not self.logdata:
725 if self.logdataapis: 789 if self.logdataapis:
730 # Simple case writes all data on a single line. 794 # Simple case writes all data on a single line.
731 if b'\n' not in data: 795 if b'\n' not in data:
732 if self.logdataapis: 796 if self.logdataapis:
733 self.fh.write(': %s\n' % stringutil.escapestr(data)) 797 self.fh.write(': %s\n' % stringutil.escapestr(data))
734 else: 798 else:
735 self.fh.write('%s> %s\n' 799 self.fh.write(
736 % (self.name, stringutil.escapestr(data))) 800 '%s> %s\n' % (self.name, stringutil.escapestr(data))
801 )
737 self.fh.flush() 802 self.fh.flush()
738 return 803 return
739 804
740 # Data with newlines is written to multiple lines. 805 # Data with newlines is written to multiple lines.
741 if self.logdataapis: 806 if self.logdataapis:
742 self.fh.write(':\n') 807 self.fh.write(':\n')
743 808
744 lines = data.splitlines(True) 809 lines = data.splitlines(True)
745 for line in lines: 810 for line in lines:
746 self.fh.write('%s> %s\n' 811 self.fh.write(
747 % (self.name, stringutil.escapestr(line))) 812 '%s> %s\n' % (self.name, stringutil.escapestr(line))
813 )
748 self.fh.flush() 814 self.fh.flush()
815
749 816
750 class fileobjectobserver(baseproxyobserver): 817 class fileobjectobserver(baseproxyobserver):
751 """Logs file object activity.""" 818 """Logs file object activity."""
752 def __init__(self, fh, name, reads=True, writes=True, logdata=False, 819
753 logdataapis=True): 820 def __init__(
821 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
822 ):
754 self.fh = fh 823 self.fh = fh
755 self.name = name 824 self.name = name
756 self.logdata = logdata 825 self.logdata = logdata
757 self.logdataapis = logdataapis 826 self.logdataapis = logdataapis
758 self.reads = reads 827 self.reads = reads
789 def readinto(self, res, dest): 858 def readinto(self, res, dest):
790 if not self.reads: 859 if not self.reads:
791 return 860 return
792 861
793 if self.logdataapis: 862 if self.logdataapis:
794 self.fh.write('%s> readinto(%d) -> %r' % (self.name, len(dest), 863 self.fh.write(
795 res)) 864 '%s> readinto(%d) -> %r' % (self.name, len(dest), res)
865 )
796 866
797 data = dest[0:res] if res is not None else b'' 867 data = dest[0:res] if res is not None else b''
798 868
799 # _writedata() uses "in" operator and is confused by memoryview because 869 # _writedata() uses "in" operator and is confused by memoryview because
800 # characters are ints on Python 3. 870 # characters are ints on Python 3.
827 def bufferedread(self, res, size): 897 def bufferedread(self, res, size):
828 if not self.reads: 898 if not self.reads:
829 return 899 return
830 900
831 if self.logdataapis: 901 if self.logdataapis:
832 self.fh.write('%s> bufferedread(%d) -> %d' % ( 902 self.fh.write(
833 self.name, size, len(res))) 903 '%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
904 )
834 905
835 self._writedata(res) 906 self._writedata(res)
836 907
837 def bufferedreadline(self, res): 908 def bufferedreadline(self, res):
838 if not self.reads: 909 if not self.reads:
839 return 910 return
840 911
841 if self.logdataapis: 912 if self.logdataapis:
842 self.fh.write('%s> bufferedreadline() -> %d' % ( 913 self.fh.write(
843 self.name, len(res))) 914 '%s> bufferedreadline() -> %d' % (self.name, len(res))
915 )
844 916
845 self._writedata(res) 917 self._writedata(res)
846 918
847 def makeloggingfileobject(logh, fh, name, reads=True, writes=True, 919
848 logdata=False, logdataapis=True): 920 def makeloggingfileobject(
921 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
922 ):
849 """Turn a file object into a logging file object.""" 923 """Turn a file object into a logging file object."""
850 924
851 observer = fileobjectobserver(logh, name, reads=reads, writes=writes, 925 observer = fileobjectobserver(
852 logdata=logdata, logdataapis=logdataapis) 926 logh,
927 name,
928 reads=reads,
929 writes=writes,
930 logdata=logdata,
931 logdataapis=logdataapis,
932 )
853 return fileobjectproxy(fh, observer) 933 return fileobjectproxy(fh, observer)
934
854 935
855 class socketobserver(baseproxyobserver): 936 class socketobserver(baseproxyobserver):
856 """Logs socket activity.""" 937 """Logs socket activity."""
857 def __init__(self, fh, name, reads=True, writes=True, states=True, 938
858 logdata=False, logdataapis=True): 939 def __init__(
940 self,
941 fh,
942 name,
943 reads=True,
944 writes=True,
945 states=True,
946 logdata=False,
947 logdataapis=True,
948 ):
859 self.fh = fh 949 self.fh = fh
860 self.name = name 950 self.name = name
861 self.reads = reads 951 self.reads = reads
862 self.writes = writes 952 self.writes = writes
863 self.states = states 953 self.states = states
866 956
867 def makefile(self, res, mode=None, bufsize=None): 957 def makefile(self, res, mode=None, bufsize=None):
868 if not self.states: 958 if not self.states:
869 return 959 return
870 960
871 self.fh.write('%s> makefile(%r, %r)\n' % ( 961 self.fh.write('%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
872 self.name, mode, bufsize))
873 962
874 def recv(self, res, size, flags=0): 963 def recv(self, res, size, flags=0):
875 if not self.reads: 964 if not self.reads:
876 return 965 return
877 966
878 if self.logdataapis: 967 if self.logdataapis:
879 self.fh.write('%s> recv(%d, %d) -> %d' % ( 968 self.fh.write(
880 self.name, size, flags, len(res))) 969 '%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
970 )
881 self._writedata(res) 971 self._writedata(res)
882 972
883 def recvfrom(self, res, size, flags=0): 973 def recvfrom(self, res, size, flags=0):
884 if not self.reads: 974 if not self.reads:
885 return 975 return
886 976
887 if self.logdataapis: 977 if self.logdataapis:
888 self.fh.write('%s> recvfrom(%d, %d) -> %d' % ( 978 self.fh.write(
889 self.name, size, flags, len(res[0]))) 979 '%s> recvfrom(%d, %d) -> %d'
980 % (self.name, size, flags, len(res[0]))
981 )
890 982
891 self._writedata(res[0]) 983 self._writedata(res[0])
892 984
893 def recvfrom_into(self, res, buf, size, flags=0): 985 def recvfrom_into(self, res, buf, size, flags=0):
894 if not self.reads: 986 if not self.reads:
895 return 987 return
896 988
897 if self.logdataapis: 989 if self.logdataapis:
898 self.fh.write('%s> recvfrom_into(%d, %d) -> %d' % ( 990 self.fh.write(
899 self.name, size, flags, res[0])) 991 '%s> recvfrom_into(%d, %d) -> %d'
900 992 % (self.name, size, flags, res[0])
901 self._writedata(buf[0:res[0]]) 993 )
994
995 self._writedata(buf[0 : res[0]])
902 996
903 def recv_into(self, res, buf, size=0, flags=0): 997 def recv_into(self, res, buf, size=0, flags=0):
904 if not self.reads: 998 if not self.reads:
905 return 999 return
906 1000
907 if self.logdataapis: 1001 if self.logdataapis:
908 self.fh.write('%s> recv_into(%d, %d) -> %d' % ( 1002 self.fh.write(
909 self.name, size, flags, res)) 1003 '%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1004 )
910 1005
911 self._writedata(buf[0:res]) 1006 self._writedata(buf[0:res])
912 1007
913 def send(self, res, data, flags=0): 1008 def send(self, res, data, flags=0):
914 if not self.writes: 1009 if not self.writes:
915 return 1010 return
916 1011
917 self.fh.write('%s> send(%d, %d) -> %d' % ( 1012 self.fh.write(
918 self.name, len(data), flags, len(res))) 1013 '%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1014 )
919 self._writedata(data) 1015 self._writedata(data)
920 1016
921 def sendall(self, res, data, flags=0): 1017 def sendall(self, res, data, flags=0):
922 if not self.writes: 1018 if not self.writes:
923 return 1019 return
924 1020
925 if self.logdataapis: 1021 if self.logdataapis:
926 # Returns None on success. So don't bother reporting return value. 1022 # Returns None on success. So don't bother reporting return value.
927 self.fh.write('%s> sendall(%d, %d)' % ( 1023 self.fh.write('%s> sendall(%d, %d)' % (self.name, len(data), flags))
928 self.name, len(data), flags))
929 1024
930 self._writedata(data) 1025 self._writedata(data)
931 1026
932 def sendto(self, res, data, flagsoraddress, address=None): 1027 def sendto(self, res, data, flagsoraddress, address=None):
933 if not self.writes: 1028 if not self.writes:
937 flags = flagsoraddress 1032 flags = flagsoraddress
938 else: 1033 else:
939 flags = 0 1034 flags = 0
940 1035
941 if self.logdataapis: 1036 if self.logdataapis:
942 self.fh.write('%s> sendto(%d, %d, %r) -> %d' % ( 1037 self.fh.write(
943 self.name, len(data), flags, address, res)) 1038 '%s> sendto(%d, %d, %r) -> %d'
1039 % (self.name, len(data), flags, address, res)
1040 )
944 1041
945 self._writedata(data) 1042 self._writedata(data)
946 1043
947 def setblocking(self, res, flag): 1044 def setblocking(self, res, flag):
948 if not self.states: 1045 if not self.states:
964 1061
965 def setsockopt(self, res, level, optname, value): 1062 def setsockopt(self, res, level, optname, value):
966 if not self.states: 1063 if not self.states:
967 return 1064 return
968 1065
969 self.fh.write('%s> setsockopt(%r, %r, %r) -> %r\n' % ( 1066 self.fh.write(
970 self.name, level, optname, value, res)) 1067 '%s> setsockopt(%r, %r, %r) -> %r\n'
971 1068 % (self.name, level, optname, value, res)
972 def makeloggingsocket(logh, fh, name, reads=True, writes=True, states=True, 1069 )
973 logdata=False, logdataapis=True): 1070
1071
1072 def makeloggingsocket(
1073 logh,
1074 fh,
1075 name,
1076 reads=True,
1077 writes=True,
1078 states=True,
1079 logdata=False,
1080 logdataapis=True,
1081 ):
974 """Turn a socket into a logging socket.""" 1082 """Turn a socket into a logging socket."""
975 1083
976 observer = socketobserver(logh, name, reads=reads, writes=writes, 1084 observer = socketobserver(
977 states=states, logdata=logdata, 1085 logh,
978 logdataapis=logdataapis) 1086 name,
1087 reads=reads,
1088 writes=writes,
1089 states=states,
1090 logdata=logdata,
1091 logdataapis=logdataapis,
1092 )
979 return socketproxy(fh, observer) 1093 return socketproxy(fh, observer)
1094
980 1095
981 def version(): 1096 def version():
982 """Return version information if available.""" 1097 """Return version information if available."""
983 try: 1098 try:
984 from . import __version__ 1099 from . import __version__
1100
985 return __version__.version 1101 return __version__.version
986 except ImportError: 1102 except ImportError:
987 return 'unknown' 1103 return 'unknown'
1104
988 1105
989 def versiontuple(v=None, n=4): 1106 def versiontuple(v=None, n=4):
990 """Parses a Mercurial version string into an N-tuple. 1107 """Parses a Mercurial version string into an N-tuple.
991 1108
992 The version string to be parsed is specified with the ``v`` argument. 1109 The version string to be parsed is specified with the ``v`` argument.
1066 if n == 3: 1183 if n == 3:
1067 return (vints[0], vints[1], vints[2]) 1184 return (vints[0], vints[1], vints[2])
1068 if n == 4: 1185 if n == 4:
1069 return (vints[0], vints[1], vints[2], extra) 1186 return (vints[0], vints[1], vints[2], extra)
1070 1187
1188
1071 def cachefunc(func): 1189 def cachefunc(func):
1072 '''cache the result of function calls''' 1190 '''cache the result of function calls'''
1073 # XXX doesn't handle keywords args 1191 # XXX doesn't handle keywords args
1074 if func.__code__.co_argcount == 0: 1192 if func.__code__.co_argcount == 0:
1075 cache = [] 1193 cache = []
1194
1076 def f(): 1195 def f():
1077 if len(cache) == 0: 1196 if len(cache) == 0:
1078 cache.append(func()) 1197 cache.append(func())
1079 return cache[0] 1198 return cache[0]
1199
1080 return f 1200 return f
1081 cache = {} 1201 cache = {}
1082 if func.__code__.co_argcount == 1: 1202 if func.__code__.co_argcount == 1:
1083 # we gain a small amount of time because 1203 # we gain a small amount of time because
1084 # we don't need to pack/unpack the list 1204 # we don't need to pack/unpack the list
1085 def f(arg): 1205 def f(arg):
1086 if arg not in cache: 1206 if arg not in cache:
1087 cache[arg] = func(arg) 1207 cache[arg] = func(arg)
1088 return cache[arg] 1208 return cache[arg]
1209
1089 else: 1210 else:
1211
1090 def f(*args): 1212 def f(*args):
1091 if args not in cache: 1213 if args not in cache:
1092 cache[args] = func(*args) 1214 cache[args] = func(*args)
1093 return cache[args] 1215 return cache[args]
1094 1216
1095 return f 1217 return f
1218
1096 1219
1097 class cow(object): 1220 class cow(object):
1098 """helper class to make copy-on-write easier 1221 """helper class to make copy-on-write easier
1099 1222
1100 Call preparewrite before doing any writes. 1223 Call preparewrite before doing any writes.
1109 1232
1110 def copy(self): 1233 def copy(self):
1111 """always do a cheap copy""" 1234 """always do a cheap copy"""
1112 self._copied = getattr(self, '_copied', 0) + 1 1235 self._copied = getattr(self, '_copied', 0) + 1
1113 return self 1236 return self
1237
1114 1238
1115 class sortdict(collections.OrderedDict): 1239 class sortdict(collections.OrderedDict):
1116 '''a simple sorted dictionary 1240 '''a simple sorted dictionary
1117 1241
1118 >>> d1 = sortdict([(b'a', 0), (b'b', 1)]) 1242 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1134 def update(self, src): 1258 def update(self, src):
1135 if isinstance(src, dict): 1259 if isinstance(src, dict):
1136 src = src.iteritems() 1260 src = src.iteritems()
1137 for k, v in src: 1261 for k, v in src:
1138 self[k] = v 1262 self[k] = v
1263
1139 1264
1140 class cowdict(cow, dict): 1265 class cowdict(cow, dict):
1141 """copy-on-write dict 1266 """copy-on-write dict
1142 1267
1143 Be sure to call d = d.preparewrite() before writing to d. 1268 Be sure to call d = d.preparewrite() before writing to d.
1161 False 1286 False
1162 >>> b is b.preparewrite() 1287 >>> b is b.preparewrite()
1163 True 1288 True
1164 """ 1289 """
1165 1290
1291
1166 class cowsortdict(cow, sortdict): 1292 class cowsortdict(cow, sortdict):
1167 """copy-on-write sortdict 1293 """copy-on-write sortdict
1168 1294
1169 Be sure to call d = d.preparewrite() before writing to d. 1295 Be sure to call d = d.preparewrite() before writing to d.
1170 """ 1296 """
1171 1297
1298
1172 class transactional(object): 1299 class transactional(object):
1173 """Base class for making a transactional type into a context manager.""" 1300 """Base class for making a transactional type into a context manager."""
1301
1174 __metaclass__ = abc.ABCMeta 1302 __metaclass__ = abc.ABCMeta
1175 1303
1176 @abc.abstractmethod 1304 @abc.abstractmethod
1177 def close(self): 1305 def close(self):
1178 """Successfully closes the transaction.""" 1306 """Successfully closes the transaction."""
1191 try: 1319 try:
1192 if exc_type is None: 1320 if exc_type is None:
1193 self.close() 1321 self.close()
1194 finally: 1322 finally:
1195 self.release() 1323 self.release()
1324
1196 1325
1197 @contextlib.contextmanager 1326 @contextlib.contextmanager
1198 def acceptintervention(tr=None): 1327 def acceptintervention(tr=None):
1199 """A context manager that closes the transaction on InterventionRequired 1328 """A context manager that closes the transaction on InterventionRequired
1200 1329
1210 tr.close() 1339 tr.close()
1211 raise 1340 raise
1212 finally: 1341 finally:
1213 tr.release() 1342 tr.release()
1214 1343
1344
1215 @contextlib.contextmanager 1345 @contextlib.contextmanager
1216 def nullcontextmanager(): 1346 def nullcontextmanager():
1217 yield 1347 yield
1218 1348
1349
1219 class _lrucachenode(object): 1350 class _lrucachenode(object):
1220 """A node in a doubly linked list. 1351 """A node in a doubly linked list.
1221 1352
1222 Holds a reference to nodes on either side as well as a key-value 1353 Holds a reference to nodes on either side as well as a key-value
1223 pair for the dictionary entry. 1354 pair for the dictionary entry.
1224 """ 1355 """
1356
1225 __slots__ = (r'next', r'prev', r'key', r'value', r'cost') 1357 __slots__ = (r'next', r'prev', r'key', r'value', r'cost')
1226 1358
1227 def __init__(self): 1359 def __init__(self):
1228 self.next = None 1360 self.next = None
1229 self.prev = None 1361 self.prev = None
1235 def markempty(self): 1367 def markempty(self):
1236 """Mark the node as emptied.""" 1368 """Mark the node as emptied."""
1237 self.key = _notset 1369 self.key = _notset
1238 self.value = None 1370 self.value = None
1239 self.cost = 0 1371 self.cost = 0
1372
1240 1373
1241 class lrucachedict(object): 1374 class lrucachedict(object):
1242 """Dict that caches most recent accesses and sets. 1375 """Dict that caches most recent accesses and sets.
1243 1376
1244 The dict consists of an actual backing dict - indexed by original 1377 The dict consists of an actual backing dict - indexed by original
1258 cause the total cost of the cache to go beyond the maximum cost limit, 1391 cause the total cost of the cache to go beyond the maximum cost limit,
1259 nodes will be evicted to make room for the new code. This can be used 1392 nodes will be evicted to make room for the new code. This can be used
1260 to e.g. set a max memory limit and associate an estimated bytes size 1393 to e.g. set a max memory limit and associate an estimated bytes size
1261 cost to each item in the cache. By default, no maximum cost is enforced. 1394 cost to each item in the cache. By default, no maximum cost is enforced.
1262 """ 1395 """
1396
1263 def __init__(self, max, maxcost=0): 1397 def __init__(self, max, maxcost=0):
1264 self._cache = {} 1398 self._cache = {}
1265 1399
1266 self._head = head = _lrucachenode() 1400 self._head = head = _lrucachenode()
1267 head.prev = head 1401 head.prev = head
1528 del self._cache[n.key] 1662 del self._cache[n.key]
1529 self.totalcost -= n.cost 1663 self.totalcost -= n.cost
1530 n.markempty() 1664 n.markempty()
1531 n = n.prev 1665 n = n.prev
1532 1666
1667
1533 def lrucachefunc(func): 1668 def lrucachefunc(func):
1534 '''cache most recent results of function calls''' 1669 '''cache most recent results of function calls'''
1535 cache = {} 1670 cache = {}
1536 order = collections.deque() 1671 order = collections.deque()
1537 if func.__code__.co_argcount == 1: 1672 if func.__code__.co_argcount == 1:
1673
1538 def f(arg): 1674 def f(arg):
1539 if arg not in cache: 1675 if arg not in cache:
1540 if len(cache) > 20: 1676 if len(cache) > 20:
1541 del cache[order.popleft()] 1677 del cache[order.popleft()]
1542 cache[arg] = func(arg) 1678 cache[arg] = func(arg)
1543 else: 1679 else:
1544 order.remove(arg) 1680 order.remove(arg)
1545 order.append(arg) 1681 order.append(arg)
1546 return cache[arg] 1682 return cache[arg]
1683
1547 else: 1684 else:
1685
1548 def f(*args): 1686 def f(*args):
1549 if args not in cache: 1687 if args not in cache:
1550 if len(cache) > 20: 1688 if len(cache) > 20:
1551 del cache[order.popleft()] 1689 del cache[order.popleft()]
1552 cache[args] = func(*args) 1690 cache[args] = func(*args)
1555 order.append(args) 1693 order.append(args)
1556 return cache[args] 1694 return cache[args]
1557 1695
1558 return f 1696 return f
1559 1697
1698
1560 class propertycache(object): 1699 class propertycache(object):
1561 def __init__(self, func): 1700 def __init__(self, func):
1562 self.func = func 1701 self.func = func
1563 self.name = func.__name__ 1702 self.name = func.__name__
1703
1564 def __get__(self, obj, type=None): 1704 def __get__(self, obj, type=None):
1565 result = self.func(obj) 1705 result = self.func(obj)
1566 self.cachevalue(obj, result) 1706 self.cachevalue(obj, result)
1567 return result 1707 return result
1568 1708
1569 def cachevalue(self, obj, value): 1709 def cachevalue(self, obj, value):
1570 # __dict__ assignment required to bypass __setattr__ (eg: repoview) 1710 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1571 obj.__dict__[self.name] = value 1711 obj.__dict__[self.name] = value
1712
1572 1713
1573 def clearcachedproperty(obj, prop): 1714 def clearcachedproperty(obj, prop):
1574 '''clear a cached property value, if one has been set''' 1715 '''clear a cached property value, if one has been set'''
1575 prop = pycompat.sysstr(prop) 1716 prop = pycompat.sysstr(prop)
1576 if prop in obj.__dict__: 1717 if prop in obj.__dict__:
1577 del obj.__dict__[prop] 1718 del obj.__dict__[prop]
1578 1719
1720
1579 def increasingchunks(source, min=1024, max=65536): 1721 def increasingchunks(source, min=1024, max=65536):
1580 '''return no less than min bytes per chunk while data remains, 1722 '''return no less than min bytes per chunk while data remains,
1581 doubling min after each chunk until it reaches max''' 1723 doubling min after each chunk until it reaches max'''
1724
1582 def log2(x): 1725 def log2(x):
1583 if not x: 1726 if not x:
1584 return 0 1727 return 0
1585 i = 0 1728 i = 0
1586 while x: 1729 while x:
1605 blen = 0 1748 blen = 0
1606 buf = [] 1749 buf = []
1607 if buf: 1750 if buf:
1608 yield ''.join(buf) 1751 yield ''.join(buf)
1609 1752
1753
1610 def always(fn): 1754 def always(fn):
1611 return True 1755 return True
1612 1756
1757
1613 def never(fn): 1758 def never(fn):
1614 return False 1759 return False
1760
1615 1761
1616 def nogc(func): 1762 def nogc(func):
1617 """disable garbage collector 1763 """disable garbage collector
1618 1764
1619 Python's garbage collector triggers a GC each time a certain number of 1765 Python's garbage collector triggers a GC each time a certain number of
1624 containers. 1770 containers.
1625 1771
1626 This garbage collector issue have been fixed in 2.7. But it still affect 1772 This garbage collector issue have been fixed in 2.7. But it still affect
1627 CPython's performance. 1773 CPython's performance.
1628 """ 1774 """
1775
1629 def wrapper(*args, **kwargs): 1776 def wrapper(*args, **kwargs):
1630 gcenabled = gc.isenabled() 1777 gcenabled = gc.isenabled()
1631 gc.disable() 1778 gc.disable()
1632 try: 1779 try:
1633 return func(*args, **kwargs) 1780 return func(*args, **kwargs)
1634 finally: 1781 finally:
1635 if gcenabled: 1782 if gcenabled:
1636 gc.enable() 1783 gc.enable()
1784
1637 return wrapper 1785 return wrapper
1786
1638 1787
1639 if pycompat.ispypy: 1788 if pycompat.ispypy:
1640 # PyPy runs slower with gc disabled 1789 # PyPy runs slower with gc disabled
1641 nogc = lambda x: x 1790 nogc = lambda x: x
1791
1642 1792
1643 def pathto(root, n1, n2): 1793 def pathto(root, n1, n2):
1644 '''return the relative path from one place to another. 1794 '''return the relative path from one place to another.
1645 root should use os.sep to separate directories 1795 root should use os.sep to separate directories
1646 n1 should use os.sep to separate directories 1796 n1 should use os.sep to separate directories
1664 a.pop() 1814 a.pop()
1665 b.pop() 1815 b.pop()
1666 b.reverse() 1816 b.reverse()
1667 return pycompat.ossep.join((['..'] * len(a)) + b) or '.' 1817 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1668 1818
1819
1669 # the location of data files matching the source code 1820 # the location of data files matching the source code
1670 if procutil.mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app': 1821 if procutil.mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1671 # executable version (py2exe) doesn't support __file__ 1822 # executable version (py2exe) doesn't support __file__
1672 datapath = os.path.dirname(pycompat.sysexecutable) 1823 datapath = os.path.dirname(pycompat.sysexecutable)
1673 else: 1824 else:
1674 datapath = os.path.dirname(pycompat.fsencode(__file__)) 1825 datapath = os.path.dirname(pycompat.fsencode(__file__))
1675 1826
1676 i18n.setdatapath(datapath) 1827 i18n.setdatapath(datapath)
1677 1828
1829
1678 def checksignature(func): 1830 def checksignature(func):
1679 '''wrap a function with code to check for calling errors''' 1831 '''wrap a function with code to check for calling errors'''
1832
1680 def check(*args, **kwargs): 1833 def check(*args, **kwargs):
1681 try: 1834 try:
1682 return func(*args, **kwargs) 1835 return func(*args, **kwargs)
1683 except TypeError: 1836 except TypeError:
1684 if len(traceback.extract_tb(sys.exc_info()[2])) == 1: 1837 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1685 raise error.SignatureError 1838 raise error.SignatureError
1686 raise 1839 raise
1687 1840
1688 return check 1841 return check
1842
1689 1843
1690 # a whilelist of known filesystems where hardlink works reliably 1844 # a whilelist of known filesystems where hardlink works reliably
1691 _hardlinkfswhitelist = { 1845 _hardlinkfswhitelist = {
1692 'apfs', 1846 'apfs',
1693 'btrfs', 1847 'btrfs',
1701 'tmpfs', 1855 'tmpfs',
1702 'ufs', 1856 'ufs',
1703 'xfs', 1857 'xfs',
1704 'zfs', 1858 'zfs',
1705 } 1859 }
1860
1706 1861
1707 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False): 1862 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1708 '''copy a file, preserving mode and optionally other stat info like 1863 '''copy a file, preserving mode and optionally other stat info like
1709 atime/mtime 1864 atime/mtime
1710 1865
1732 if hardlink: 1887 if hardlink:
1733 try: 1888 try:
1734 oslink(src, dest) 1889 oslink(src, dest)
1735 return 1890 return
1736 except (IOError, OSError): 1891 except (IOError, OSError):
1737 pass # fall back to normal copy 1892 pass # fall back to normal copy
1738 if os.path.islink(src): 1893 if os.path.islink(src):
1739 os.symlink(os.readlink(src), dest) 1894 os.symlink(os.readlink(src), dest)
1740 # copytime is ignored for symlinks, but in general copytime isn't needed 1895 # copytime is ignored for symlinks, but in general copytime isn't needed
1741 # for them anyway 1896 # for them anyway
1742 else: 1897 else:
1750 if oldstat and oldstat.stat: 1905 if oldstat and oldstat.stat:
1751 newstat = filestat.frompath(dest) 1906 newstat = filestat.frompath(dest)
1752 if newstat.isambig(oldstat): 1907 if newstat.isambig(oldstat):
1753 # stat of copied file is ambiguous to original one 1908 # stat of copied file is ambiguous to original one
1754 advanced = ( 1909 advanced = (
1755 oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff 1910 oldstat.stat[stat.ST_MTIME] + 1
1911 ) & 0x7FFFFFFF
1756 os.utime(dest, (advanced, advanced)) 1912 os.utime(dest, (advanced, advanced))
1757 except shutil.Error as inst: 1913 except shutil.Error as inst:
1758 raise error.Abort(str(inst)) 1914 raise error.Abort(str(inst))
1759 1915
1916
1760 def copyfiles(src, dst, hardlink=None, progress=None): 1917 def copyfiles(src, dst, hardlink=None, progress=None):
1761 """Copy a directory tree using hardlinks if possible.""" 1918 """Copy a directory tree using hardlinks if possible."""
1762 num = 0 1919 num = 0
1763 1920
1764 def settopic(): 1921 def settopic():
1765 if progress: 1922 if progress:
1766 progress.topic = _('linking') if hardlink else _('copying') 1923 progress.topic = _('linking') if hardlink else _('copying')
1767 1924
1768 if os.path.isdir(src): 1925 if os.path.isdir(src):
1769 if hardlink is None: 1926 if hardlink is None:
1770 hardlink = (os.stat(src).st_dev == 1927 hardlink = (
1771 os.stat(os.path.dirname(dst)).st_dev) 1928 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1929 )
1772 settopic() 1930 settopic()
1773 os.mkdir(dst) 1931 os.mkdir(dst)
1774 for name, kind in listdir(src): 1932 for name, kind in listdir(src):
1775 srcname = os.path.join(src, name) 1933 srcname = os.path.join(src, name)
1776 dstname = os.path.join(dst, name) 1934 dstname = os.path.join(dst, name)
1777 hardlink, n = copyfiles(srcname, dstname, hardlink, progress) 1935 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1778 num += n 1936 num += n
1779 else: 1937 else:
1780 if hardlink is None: 1938 if hardlink is None:
1781 hardlink = (os.stat(os.path.dirname(src)).st_dev == 1939 hardlink = (
1782 os.stat(os.path.dirname(dst)).st_dev) 1940 os.stat(os.path.dirname(src)).st_dev
1941 == os.stat(os.path.dirname(dst)).st_dev
1942 )
1783 settopic() 1943 settopic()
1784 1944
1785 if hardlink: 1945 if hardlink:
1786 try: 1946 try:
1787 oslink(src, dst) 1947 oslink(src, dst)
1794 if progress: 1954 if progress:
1795 progress.increment() 1955 progress.increment()
1796 1956
1797 return hardlink, num 1957 return hardlink, num
1798 1958
1959
1799 _winreservednames = { 1960 _winreservednames = {
1800 'con', 'prn', 'aux', 'nul', 1961 'con',
1801 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9', 1962 'prn',
1802 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9', 1963 'aux',
1964 'nul',
1965 'com1',
1966 'com2',
1967 'com3',
1968 'com4',
1969 'com5',
1970 'com6',
1971 'com7',
1972 'com8',
1973 'com9',
1974 'lpt1',
1975 'lpt2',
1976 'lpt3',
1977 'lpt4',
1978 'lpt5',
1979 'lpt6',
1980 'lpt7',
1981 'lpt8',
1982 'lpt9',
1803 } 1983 }
1804 _winreservedchars = ':*?"<>|' 1984 _winreservedchars = ':*?"<>|'
1985
1986
1805 def checkwinfilename(path): 1987 def checkwinfilename(path):
1806 r'''Check that the base-relative path is a valid filename on Windows. 1988 r'''Check that the base-relative path is a valid filename on Windows.
1807 Returns None if the path is ok, or a UI string describing the problem. 1989 Returns None if the path is ok, or a UI string describing the problem.
1808 1990
1809 >>> checkwinfilename(b"just/a/normal/path") 1991 >>> checkwinfilename(b"just/a/normal/path")
1833 for n in path.replace('\\', '/').split('/'): 2015 for n in path.replace('\\', '/').split('/'):
1834 if not n: 2016 if not n:
1835 continue 2017 continue
1836 for c in _filenamebytestr(n): 2018 for c in _filenamebytestr(n):
1837 if c in _winreservedchars: 2019 if c in _winreservedchars:
1838 return _("filename contains '%s', which is reserved " 2020 return (
1839 "on Windows") % c 2021 _("filename contains '%s', which is reserved " "on Windows")
2022 % c
2023 )
1840 if ord(c) <= 31: 2024 if ord(c) <= 31:
1841 return _("filename contains '%s', which is invalid " 2025 return _(
1842 "on Windows") % stringutil.escapestr(c) 2026 "filename contains '%s', which is invalid " "on Windows"
2027 ) % stringutil.escapestr(c)
1843 base = n.split('.')[0] 2028 base = n.split('.')[0]
1844 if base and base.lower() in _winreservednames: 2029 if base and base.lower() in _winreservednames:
1845 return _("filename contains '%s', which is reserved " 2030 return (
1846 "on Windows") % base 2031 _("filename contains '%s', which is reserved " "on Windows")
2032 % base
2033 )
1847 t = n[-1:] 2034 t = n[-1:]
1848 if t in '. ' and n not in '..': 2035 if t in '. ' and n not in '..':
1849 return _("filename ends with '%s', which is not allowed " 2036 return (
1850 "on Windows") % t 2037 _("filename ends with '%s', which is not allowed " "on Windows")
2038 % t
2039 )
2040
1851 2041
1852 if pycompat.iswindows: 2042 if pycompat.iswindows:
1853 checkosfilename = checkwinfilename 2043 checkosfilename = checkwinfilename
1854 timer = time.clock 2044 timer = time.clock
1855 else: 2045 else:
1856 checkosfilename = platform.checkosfilename 2046 checkosfilename = platform.checkosfilename
1857 timer = time.time 2047 timer = time.time
1858 2048
1859 if safehasattr(time, "perf_counter"): 2049 if safehasattr(time, "perf_counter"):
1860 timer = time.perf_counter 2050 timer = time.perf_counter
2051
1861 2052
1862 def makelock(info, pathname): 2053 def makelock(info, pathname):
1863 """Create a lock file atomically if possible 2054 """Create a lock file atomically if possible
1864 2055
1865 This may leave a stale lock file if symlink isn't supported and signal 2056 This may leave a stale lock file if symlink isn't supported and signal
1868 try: 2059 try:
1869 return os.symlink(info, pathname) 2060 return os.symlink(info, pathname)
1870 except OSError as why: 2061 except OSError as why:
1871 if why.errno == errno.EEXIST: 2062 if why.errno == errno.EEXIST:
1872 raise 2063 raise
1873 except AttributeError: # no symlink in os 2064 except AttributeError: # no symlink in os
1874 pass 2065 pass
1875 2066
1876 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0) 2067 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
1877 ld = os.open(pathname, flags) 2068 ld = os.open(pathname, flags)
1878 os.write(ld, info) 2069 os.write(ld, info)
1879 os.close(ld) 2070 os.close(ld)
2071
1880 2072
1881 def readlock(pathname): 2073 def readlock(pathname):
1882 try: 2074 try:
1883 return readlink(pathname) 2075 return readlink(pathname)
1884 except OSError as why: 2076 except OSError as why:
1885 if why.errno not in (errno.EINVAL, errno.ENOSYS): 2077 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1886 raise 2078 raise
1887 except AttributeError: # no symlink in os 2079 except AttributeError: # no symlink in os
1888 pass 2080 pass
1889 with posixfile(pathname, 'rb') as fp: 2081 with posixfile(pathname, 'rb') as fp:
1890 return fp.read() 2082 return fp.read()
2083
1891 2084
1892 def fstat(fp): 2085 def fstat(fp):
1893 '''stat file object that may not have fileno method.''' 2086 '''stat file object that may not have fileno method.'''
1894 try: 2087 try:
1895 return os.fstat(fp.fileno()) 2088 return os.fstat(fp.fileno())
1896 except AttributeError: 2089 except AttributeError:
1897 return os.stat(fp.name) 2090 return os.stat(fp.name)
1898 2091
2092
1899 # File system features 2093 # File system features
2094
1900 2095
1901 def fscasesensitive(path): 2096 def fscasesensitive(path):
1902 """ 2097 """
1903 Return true if the given path is on a case-sensitive filesystem 2098 Return true if the given path is on a case-sensitive filesystem
1904 2099
1909 d, b = os.path.split(path) 2104 d, b = os.path.split(path)
1910 b2 = b.upper() 2105 b2 = b.upper()
1911 if b == b2: 2106 if b == b2:
1912 b2 = b.lower() 2107 b2 = b.lower()
1913 if b == b2: 2108 if b == b2:
1914 return True # no evidence against case sensitivity 2109 return True # no evidence against case sensitivity
1915 p2 = os.path.join(d, b2) 2110 p2 = os.path.join(d, b2)
1916 try: 2111 try:
1917 s2 = os.lstat(p2) 2112 s2 = os.lstat(p2)
1918 if s2 == s1: 2113 if s2 == s1:
1919 return False 2114 return False
1920 return True 2115 return True
1921 except OSError: 2116 except OSError:
1922 return True 2117 return True
1923 2118
2119
1924 try: 2120 try:
1925 import re2 2121 import re2
2122
1926 _re2 = None 2123 _re2 = None
1927 except ImportError: 2124 except ImportError:
1928 _re2 = False 2125 _re2 = False
2126
1929 2127
1930 class _re(object): 2128 class _re(object):
1931 def _checkre2(self): 2129 def _checkre2(self):
1932 global _re2 2130 global _re2
1933 try: 2131 try:
1968 if _re2: 2166 if _re2:
1969 return re2.escape 2167 return re2.escape
1970 else: 2168 else:
1971 return remod.escape 2169 return remod.escape
1972 2170
2171
1973 re = _re() 2172 re = _re()
1974 2173
1975 _fspathcache = {} 2174 _fspathcache = {}
2175
2176
1976 def fspath(name, root): 2177 def fspath(name, root):
1977 '''Get name in the case stored in the filesystem 2178 '''Get name in the case stored in the filesystem
1978 2179
1979 The name should be relative to root, and be normcase-ed for efficiency. 2180 The name should be relative to root, and be normcase-ed for efficiency.
1980 2181
1981 Note that this function is unnecessary, and should not be 2182 Note that this function is unnecessary, and should not be
1982 called, for case-sensitive filesystems (simply because it's expensive). 2183 called, for case-sensitive filesystems (simply because it's expensive).
1983 2184
1984 The root should be normcase-ed, too. 2185 The root should be normcase-ed, too.
1985 ''' 2186 '''
2187
1986 def _makefspathcacheentry(dir): 2188 def _makefspathcacheentry(dir):
1987 return dict((normcase(n), n) for n in os.listdir(dir)) 2189 return dict((normcase(n), n) for n in os.listdir(dir))
1988 2190
1989 seps = pycompat.ossep 2191 seps = pycompat.ossep
1990 if pycompat.osaltsep: 2192 if pycompat.osaltsep:
1991 seps = seps + pycompat.osaltsep 2193 seps = seps + pycompat.osaltsep
1992 # Protect backslashes. This gets silly very quickly. 2194 # Protect backslashes. This gets silly very quickly.
1993 seps.replace('\\','\\\\') 2195 seps.replace('\\', '\\\\')
1994 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps)) 2196 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1995 dir = os.path.normpath(root) 2197 dir = os.path.normpath(root)
1996 result = [] 2198 result = []
1997 for part, sep in pattern.findall(name): 2199 for part, sep in pattern.findall(name):
1998 if sep: 2200 if sep:
2013 result.append(found or part) 2215 result.append(found or part)
2014 dir = os.path.join(dir, part) 2216 dir = os.path.join(dir, part)
2015 2217
2016 return ''.join(result) 2218 return ''.join(result)
2017 2219
2220
2018 def checknlink(testfile): 2221 def checknlink(testfile):
2019 '''check whether hardlink count reporting works properly''' 2222 '''check whether hardlink count reporting works properly'''
2020 2223
2021 # testfile may be open, so we need a separate file for checking to 2224 # testfile may be open, so we need a separate file for checking to
2022 # work around issue2543 (or testfile may get lost on Samba shares) 2225 # work around issue2543 (or testfile may get lost on Samba shares)
2023 f1, f2, fp = None, None, None 2226 f1, f2, fp = None, None, None
2024 try: 2227 try:
2025 fd, f1 = pycompat.mkstemp(prefix='.%s-' % os.path.basename(testfile), 2228 fd, f1 = pycompat.mkstemp(
2026 suffix='1~', dir=os.path.dirname(testfile)) 2229 prefix='.%s-' % os.path.basename(testfile),
2230 suffix='1~',
2231 dir=os.path.dirname(testfile),
2232 )
2027 os.close(fd) 2233 os.close(fd)
2028 f2 = '%s2~' % f1[:-2] 2234 f2 = '%s2~' % f1[:-2]
2029 2235
2030 oslink(f1, f2) 2236 oslink(f1, f2)
2031 # nlinks() may behave differently for files on Windows shares if 2237 # nlinks() may behave differently for files on Windows shares if
2042 if f is not None: 2248 if f is not None:
2043 os.unlink(f) 2249 os.unlink(f)
2044 except OSError: 2250 except OSError:
2045 pass 2251 pass
2046 2252
2253
2047 def endswithsep(path): 2254 def endswithsep(path):
2048 '''Check path ends with os.sep or os.altsep.''' 2255 '''Check path ends with os.sep or os.altsep.'''
2049 return (path.endswith(pycompat.ossep) 2256 return (
2050 or pycompat.osaltsep and path.endswith(pycompat.osaltsep)) 2257 path.endswith(pycompat.ossep)
2258 or pycompat.osaltsep
2259 and path.endswith(pycompat.osaltsep)
2260 )
2261
2051 2262
2052 def splitpath(path): 2263 def splitpath(path):
2053 '''Split path by os.sep. 2264 '''Split path by os.sep.
2054 Note that this function does not use os.altsep because this is 2265 Note that this function does not use os.altsep because this is
2055 an alternative of simple "xxx.split(os.sep)". 2266 an alternative of simple "xxx.split(os.sep)".
2056 It is recommended to use os.path.normpath() before using this 2267 It is recommended to use os.path.normpath() before using this
2057 function if need.''' 2268 function if need.'''
2058 return path.split(pycompat.ossep) 2269 return path.split(pycompat.ossep)
2270
2059 2271
2060 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False): 2272 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2061 """Create a temporary file with the same contents from name 2273 """Create a temporary file with the same contents from name
2062 2274
2063 The permission bits are copied from the original file. 2275 The permission bits are copied from the original file.
2089 ofp = posixfile(temp, "wb") 2301 ofp = posixfile(temp, "wb")
2090 for chunk in filechunkiter(ifp): 2302 for chunk in filechunkiter(ifp):
2091 ofp.write(chunk) 2303 ofp.write(chunk)
2092 ifp.close() 2304 ifp.close()
2093 ofp.close() 2305 ofp.close()
2094 except: # re-raises 2306 except: # re-raises
2095 try: 2307 try:
2096 os.unlink(temp) 2308 os.unlink(temp)
2097 except OSError: 2309 except OSError:
2098 pass 2310 pass
2099 raise 2311 raise
2100 return temp 2312 return temp
2101 2313
2314
2102 class filestat(object): 2315 class filestat(object):
2103 """help to exactly detect change of a file 2316 """help to exactly detect change of a file
2104 2317
2105 'stat' attribute is result of 'os.stat()' if specified 'path' 2318 'stat' attribute is result of 'os.stat()' if specified 'path'
2106 exists. Otherwise, it is None. This can avoid preparative 2319 exists. Otherwise, it is None. This can avoid preparative
2107 'exists()' examination on client side of this class. 2320 'exists()' examination on client side of this class.
2108 """ 2321 """
2322
2109 def __init__(self, stat): 2323 def __init__(self, stat):
2110 self.stat = stat 2324 self.stat = stat
2111 2325
2112 @classmethod 2326 @classmethod
2113 def frompath(cls, path): 2327 def frompath(cls, path):
2129 def __eq__(self, old): 2343 def __eq__(self, old):
2130 try: 2344 try:
2131 # if ambiguity between stat of new and old file is 2345 # if ambiguity between stat of new and old file is
2132 # avoided, comparison of size, ctime and mtime is enough 2346 # avoided, comparison of size, ctime and mtime is enough
2133 # to exactly detect change of a file regardless of platform 2347 # to exactly detect change of a file regardless of platform
2134 return (self.stat.st_size == old.stat.st_size and 2348 return (
2135 self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME] and 2349 self.stat.st_size == old.stat.st_size
2136 self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]) 2350 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2351 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2352 )
2137 except AttributeError: 2353 except AttributeError:
2138 pass 2354 pass
2139 try: 2355 try:
2140 return self.stat is None and old.stat is None 2356 return self.stat is None and old.stat is None
2141 except AttributeError: 2357 except AttributeError:
2170 2386
2171 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime != 2387 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2172 S[n].mtime", even if size of a file isn't changed. 2388 S[n].mtime", even if size of a file isn't changed.
2173 """ 2389 """
2174 try: 2390 try:
2175 return (self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]) 2391 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2176 except AttributeError: 2392 except AttributeError:
2177 return False 2393 return False
2178 2394
2179 def avoidambig(self, path, old): 2395 def avoidambig(self, path, old):
2180 """Change file stat of specified path to avoid ambiguity 2396 """Change file stat of specified path to avoid ambiguity
2185 appropriate privileges for 'path'. This returns False in this 2401 appropriate privileges for 'path'. This returns False in this
2186 case. 2402 case.
2187 2403
2188 Otherwise, this returns True, as "ambiguity is avoided". 2404 Otherwise, this returns True, as "ambiguity is avoided".
2189 """ 2405 """
2190 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7fffffff 2406 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2191 try: 2407 try:
2192 os.utime(path, (advanced, advanced)) 2408 os.utime(path, (advanced, advanced))
2193 except OSError as inst: 2409 except OSError as inst:
2194 if inst.errno == errno.EPERM: 2410 if inst.errno == errno.EPERM:
2195 # utime() on the file created by another user causes EPERM, 2411 # utime() on the file created by another user causes EPERM,
2199 return True 2415 return True
2200 2416
2201 def __ne__(self, other): 2417 def __ne__(self, other):
2202 return not self == other 2418 return not self == other
2203 2419
2420
2204 class atomictempfile(object): 2421 class atomictempfile(object):
2205 '''writable file object that atomically updates a file 2422 '''writable file object that atomically updates a file
2206 2423
2207 All writes will go to a temporary copy of the original file. Call 2424 All writes will go to a temporary copy of the original file. Call
2208 close() when you are done writing, and atomictempfile will rename 2425 close() when you are done writing, and atomictempfile will rename
2212 2429
2213 checkambig argument of constructor is used with filestat, and is 2430 checkambig argument of constructor is used with filestat, and is
2214 useful only if target file is guarded by any lock (e.g. repo.lock 2431 useful only if target file is guarded by any lock (e.g. repo.lock
2215 or repo.wlock). 2432 or repo.wlock).
2216 ''' 2433 '''
2434
2217 def __init__(self, name, mode='w+b', createmode=None, checkambig=False): 2435 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
2218 self.__name = name # permanent name 2436 self.__name = name # permanent name
2219 self._tempname = mktempcopy(name, emptyok=('w' in mode), 2437 self._tempname = mktempcopy(
2220 createmode=createmode, 2438 name,
2221 enforcewritable=('w' in mode)) 2439 emptyok=('w' in mode),
2440 createmode=createmode,
2441 enforcewritable=('w' in mode),
2442 )
2222 2443
2223 self._fp = posixfile(self._tempname, mode) 2444 self._fp = posixfile(self._tempname, mode)
2224 self._checkambig = checkambig 2445 self._checkambig = checkambig
2225 2446
2226 # delegated methods 2447 # delegated methods
2238 if oldstat and oldstat.stat: 2459 if oldstat and oldstat.stat:
2239 rename(self._tempname, filename) 2460 rename(self._tempname, filename)
2240 newstat = filestat.frompath(filename) 2461 newstat = filestat.frompath(filename)
2241 if newstat.isambig(oldstat): 2462 if newstat.isambig(oldstat):
2242 # stat of changed file is ambiguous to original one 2463 # stat of changed file is ambiguous to original one
2243 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff 2464 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2244 os.utime(filename, (advanced, advanced)) 2465 os.utime(filename, (advanced, advanced))
2245 else: 2466 else:
2246 rename(self._tempname, filename) 2467 rename(self._tempname, filename)
2247 2468
2248 def discard(self): 2469 def discard(self):
2252 except OSError: 2473 except OSError:
2253 pass 2474 pass
2254 self._fp.close() 2475 self._fp.close()
2255 2476
2256 def __del__(self): 2477 def __del__(self):
2257 if safehasattr(self, '_fp'): # constructor actually did something 2478 if safehasattr(self, '_fp'): # constructor actually did something
2258 self.discard() 2479 self.discard()
2259 2480
2260 def __enter__(self): 2481 def __enter__(self):
2261 return self 2482 return self
2262 2483
2263 def __exit__(self, exctype, excvalue, traceback): 2484 def __exit__(self, exctype, excvalue, traceback):
2264 if exctype is not None: 2485 if exctype is not None:
2265 self.discard() 2486 self.discard()
2266 else: 2487 else:
2267 self.close() 2488 self.close()
2489
2268 2490
2269 def unlinkpath(f, ignoremissing=False, rmdir=True): 2491 def unlinkpath(f, ignoremissing=False, rmdir=True):
2270 """unlink and remove the directory if it is empty""" 2492 """unlink and remove the directory if it is empty"""
2271 if ignoremissing: 2493 if ignoremissing:
2272 tryunlink(f) 2494 tryunlink(f)
2277 try: 2499 try:
2278 removedirs(os.path.dirname(f)) 2500 removedirs(os.path.dirname(f))
2279 except OSError: 2501 except OSError:
2280 pass 2502 pass
2281 2503
2504
2282 def tryunlink(f): 2505 def tryunlink(f):
2283 """Attempt to remove a file, ignoring ENOENT errors.""" 2506 """Attempt to remove a file, ignoring ENOENT errors."""
2284 try: 2507 try:
2285 unlink(f) 2508 unlink(f)
2286 except OSError as e: 2509 except OSError as e:
2287 if e.errno != errno.ENOENT: 2510 if e.errno != errno.ENOENT:
2288 raise 2511 raise
2512
2289 2513
2290 def makedirs(name, mode=None, notindexed=False): 2514 def makedirs(name, mode=None, notindexed=False):
2291 """recursive directory creation with parent mode inheritance 2515 """recursive directory creation with parent mode inheritance
2292 2516
2293 Newly created directories are marked as "not to be indexed by 2517 Newly created directories are marked as "not to be indexed by
2313 return 2537 return
2314 raise 2538 raise
2315 if mode is not None: 2539 if mode is not None:
2316 os.chmod(name, mode) 2540 os.chmod(name, mode)
2317 2541
2542
2318 def readfile(path): 2543 def readfile(path):
2319 with open(path, 'rb') as fp: 2544 with open(path, 'rb') as fp:
2320 return fp.read() 2545 return fp.read()
2321 2546
2547
2322 def writefile(path, text): 2548 def writefile(path, text):
2323 with open(path, 'wb') as fp: 2549 with open(path, 'wb') as fp:
2324 fp.write(text) 2550 fp.write(text)
2325 2551
2552
2326 def appendfile(path, text): 2553 def appendfile(path, text):
2327 with open(path, 'ab') as fp: 2554 with open(path, 'ab') as fp:
2328 fp.write(text) 2555 fp.write(text)
2329 2556
2557
2330 class chunkbuffer(object): 2558 class chunkbuffer(object):
2331 """Allow arbitrary sized chunks of data to be efficiently read from an 2559 """Allow arbitrary sized chunks of data to be efficiently read from an
2332 iterator over chunks of arbitrary size.""" 2560 iterator over chunks of arbitrary size."""
2333 2561
2334 def __init__(self, in_iter): 2562 def __init__(self, in_iter):
2335 """in_iter is the iterator that's iterating over the input chunks.""" 2563 """in_iter is the iterator that's iterating over the input chunks."""
2564
2336 def splitbig(chunks): 2565 def splitbig(chunks):
2337 for chunk in chunks: 2566 for chunk in chunks:
2338 if len(chunk) > 2**20: 2567 if len(chunk) > 2 ** 20:
2339 pos = 0 2568 pos = 0
2340 while pos < len(chunk): 2569 while pos < len(chunk):
2341 end = pos + 2 ** 18 2570 end = pos + 2 ** 18
2342 yield chunk[pos:end] 2571 yield chunk[pos:end]
2343 pos = end 2572 pos = end
2344 else: 2573 else:
2345 yield chunk 2574 yield chunk
2575
2346 self.iter = splitbig(in_iter) 2576 self.iter = splitbig(in_iter)
2347 self._queue = collections.deque() 2577 self._queue = collections.deque()
2348 self._chunkoffset = 0 2578 self._chunkoffset = 0
2349 2579
2350 def read(self, l=None): 2580 def read(self, l=None):
2359 buf = [] 2589 buf = []
2360 queue = self._queue 2590 queue = self._queue
2361 while left > 0: 2591 while left > 0:
2362 # refill the queue 2592 # refill the queue
2363 if not queue: 2593 if not queue:
2364 target = 2**18 2594 target = 2 ** 18
2365 for chunk in self.iter: 2595 for chunk in self.iter:
2366 queue.append(chunk) 2596 queue.append(chunk)
2367 target -= len(chunk) 2597 target -= len(chunk)
2368 if target <= 0: 2598 if target <= 0:
2369 break 2599 break
2399 buf.append(chunk[offset:]) 2629 buf.append(chunk[offset:])
2400 self._chunkoffset = 0 2630 self._chunkoffset = 0
2401 2631
2402 # Partial chunk needed. 2632 # Partial chunk needed.
2403 else: 2633 else:
2404 buf.append(chunk[offset:offset + left]) 2634 buf.append(chunk[offset : offset + left])
2405 self._chunkoffset += left 2635 self._chunkoffset += left
2406 left -= chunkremaining 2636 left -= chunkremaining
2407 2637
2408 return ''.join(buf) 2638 return ''.join(buf)
2639
2409 2640
2410 def filechunkiter(f, size=131072, limit=None): 2641 def filechunkiter(f, size=131072, limit=None):
2411 """Create a generator that produces the data in the file size 2642 """Create a generator that produces the data in the file size
2412 (default 131072) bytes at a time, up to optional limit (default is 2643 (default 131072) bytes at a time, up to optional limit (default is
2413 to read all data). Chunks may be less than size bytes if the 2644 to read all data). Chunks may be less than size bytes if the
2426 break 2657 break
2427 if limit: 2658 if limit:
2428 limit -= len(s) 2659 limit -= len(s)
2429 yield s 2660 yield s
2430 2661
2662
2431 class cappedreader(object): 2663 class cappedreader(object):
2432 """A file object proxy that allows reading up to N bytes. 2664 """A file object proxy that allows reading up to N bytes.
2433 2665
2434 Given a source file object, instances of this type allow reading up to 2666 Given a source file object, instances of this type allow reading up to
2435 N bytes from that source file object. Attempts to read past the allowed 2667 N bytes from that source file object. Attempts to read past the allowed
2437 2669
2438 It is assumed that I/O is not performed on the original file object 2670 It is assumed that I/O is not performed on the original file object
2439 in addition to I/O that is performed by this instance. If there is, 2671 in addition to I/O that is performed by this instance. If there is,
2440 state tracking will get out of sync and unexpected results will ensue. 2672 state tracking will get out of sync and unexpected results will ensue.
2441 """ 2673 """
2674
2442 def __init__(self, fh, limit): 2675 def __init__(self, fh, limit):
2443 """Allow reading up to <limit> bytes from <fh>.""" 2676 """Allow reading up to <limit> bytes from <fh>."""
2444 self._fh = fh 2677 self._fh = fh
2445 self._left = limit 2678 self._left = limit
2446 2679
2460 def readinto(self, b): 2693 def readinto(self, b):
2461 res = self.read(len(b)) 2694 res = self.read(len(b))
2462 if res is None: 2695 if res is None:
2463 return None 2696 return None
2464 2697
2465 b[0:len(res)] = res 2698 b[0 : len(res)] = res
2466 return len(res) 2699 return len(res)
2700
2467 2701
2468 def unitcountfn(*unittable): 2702 def unitcountfn(*unittable):
2469 '''return a function that renders a readable count of some quantity''' 2703 '''return a function that renders a readable count of some quantity'''
2470 2704
2471 def go(count): 2705 def go(count):
2473 if abs(count) >= divisor * multiplier: 2707 if abs(count) >= divisor * multiplier:
2474 return format % (count / float(divisor)) 2708 return format % (count / float(divisor))
2475 return unittable[-1][2] % count 2709 return unittable[-1][2] % count
2476 2710
2477 return go 2711 return go
2712
2478 2713
2479 def processlinerange(fromline, toline): 2714 def processlinerange(fromline, toline):
2480 """Check that linerange <fromline>:<toline> makes sense and return a 2715 """Check that linerange <fromline>:<toline> makes sense and return a
2481 0-based range. 2716 0-based range.
2482 2717
2495 raise error.ParseError(_("line range must be positive")) 2730 raise error.ParseError(_("line range must be positive"))
2496 if fromline < 1: 2731 if fromline < 1:
2497 raise error.ParseError(_("fromline must be strictly positive")) 2732 raise error.ParseError(_("fromline must be strictly positive"))
2498 return fromline - 1, toline 2733 return fromline - 1, toline
2499 2734
2735
2500 bytecount = unitcountfn( 2736 bytecount = unitcountfn(
2501 (100, 1 << 30, _('%.0f GB')), 2737 (100, 1 << 30, _('%.0f GB')),
2502 (10, 1 << 30, _('%.1f GB')), 2738 (10, 1 << 30, _('%.1f GB')),
2503 (1, 1 << 30, _('%.2f GB')), 2739 (1, 1 << 30, _('%.2f GB')),
2504 (100, 1 << 20, _('%.0f MB')), 2740 (100, 1 << 20, _('%.0f MB')),
2506 (1, 1 << 20, _('%.2f MB')), 2742 (1, 1 << 20, _('%.2f MB')),
2507 (100, 1 << 10, _('%.0f KB')), 2743 (100, 1 << 10, _('%.0f KB')),
2508 (10, 1 << 10, _('%.1f KB')), 2744 (10, 1 << 10, _('%.1f KB')),
2509 (1, 1 << 10, _('%.2f KB')), 2745 (1, 1 << 10, _('%.2f KB')),
2510 (1, 1, _('%.0f bytes')), 2746 (1, 1, _('%.0f bytes')),
2511 ) 2747 )
2748
2512 2749
2513 class transformingwriter(object): 2750 class transformingwriter(object):
2514 """Writable file wrapper to transform data by function""" 2751 """Writable file wrapper to transform data by function"""
2515 2752
2516 def __init__(self, fp, encode): 2753 def __init__(self, fp, encode):
2523 def flush(self): 2760 def flush(self):
2524 self._fp.flush() 2761 self._fp.flush()
2525 2762
2526 def write(self, data): 2763 def write(self, data):
2527 return self._fp.write(self._encode(data)) 2764 return self._fp.write(self._encode(data))
2765
2528 2766
2529 # Matches a single EOL which can either be a CRLF where repeated CR 2767 # Matches a single EOL which can either be a CRLF where repeated CR
2530 # are removed or a LF. We do not care about old Macintosh files, so a 2768 # are removed or a LF. We do not care about old Macintosh files, so a
2531 # stray CR is an error. 2769 # stray CR is an error.
2532 _eolre = remod.compile(br'\r*\n') 2770 _eolre = remod.compile(br'\r*\n')
2533 2771
2772
2534 def tolf(s): 2773 def tolf(s):
2535 return _eolre.sub('\n', s) 2774 return _eolre.sub('\n', s)
2536 2775
2776
2537 def tocrlf(s): 2777 def tocrlf(s):
2538 return _eolre.sub('\r\n', s) 2778 return _eolre.sub('\r\n', s)
2539 2779
2780
2540 def _crlfwriter(fp): 2781 def _crlfwriter(fp):
2541 return transformingwriter(fp, tocrlf) 2782 return transformingwriter(fp, tocrlf)
2783
2542 2784
2543 if pycompat.oslinesep == '\r\n': 2785 if pycompat.oslinesep == '\r\n':
2544 tonativeeol = tocrlf 2786 tonativeeol = tocrlf
2545 fromnativeeol = tolf 2787 fromnativeeol = tolf
2546 nativeeolwriter = _crlfwriter 2788 nativeeolwriter = _crlfwriter
2547 else: 2789 else:
2548 tonativeeol = pycompat.identity 2790 tonativeeol = pycompat.identity
2549 fromnativeeol = pycompat.identity 2791 fromnativeeol = pycompat.identity
2550 nativeeolwriter = pycompat.identity 2792 nativeeolwriter = pycompat.identity
2551 2793
2552 if (pyplatform.python_implementation() == 'CPython' and 2794 if pyplatform.python_implementation() == 'CPython' and sys.version_info < (
2553 sys.version_info < (3, 0)): 2795 3,
2796 0,
2797 ):
2554 # There is an issue in CPython that some IO methods do not handle EINTR 2798 # There is an issue in CPython that some IO methods do not handle EINTR
2555 # correctly. The following table shows what CPython version (and functions) 2799 # correctly. The following table shows what CPython version (and functions)
2556 # are affected (buggy: has the EINTR bug, okay: otherwise): 2800 # are affected (buggy: has the EINTR bug, okay: otherwise):
2557 # 2801 #
2558 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0 2802 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2577 # to minimize the performance impact. 2821 # to minimize the performance impact.
2578 if sys.version_info >= (2, 7, 4): 2822 if sys.version_info >= (2, 7, 4):
2579 # fp.readline deals with EINTR correctly, use it as a workaround. 2823 # fp.readline deals with EINTR correctly, use it as a workaround.
2580 def _safeiterfile(fp): 2824 def _safeiterfile(fp):
2581 return iter(fp.readline, '') 2825 return iter(fp.readline, '')
2826
2582 else: 2827 else:
2583 # fp.read* are broken too, manually deal with EINTR in a stupid way. 2828 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2584 # note: this may block longer than necessary because of bufsize. 2829 # note: this may block longer than necessary because of bufsize.
2585 def _safeiterfile(fp, bufsize=4096): 2830 def _safeiterfile(fp, bufsize=4096):
2586 fd = fp.fileno() 2831 fd = fp.fileno()
2614 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode) 2859 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2615 if fastpath: 2860 if fastpath:
2616 return fp 2861 return fp
2617 else: 2862 else:
2618 return _safeiterfile(fp) 2863 return _safeiterfile(fp)
2864
2865
2619 else: 2866 else:
2620 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed. 2867 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2621 def iterfile(fp): 2868 def iterfile(fp):
2622 return fp 2869 return fp
2623 2870
2871
2624 def iterlines(iterator): 2872 def iterlines(iterator):
2625 for chunk in iterator: 2873 for chunk in iterator:
2626 for line in chunk.splitlines(): 2874 for line in chunk.splitlines():
2627 yield line 2875 yield line
2628 2876
2877
2629 def expandpath(path): 2878 def expandpath(path):
2630 return os.path.expanduser(os.path.expandvars(path)) 2879 return os.path.expanduser(os.path.expandvars(path))
2880
2631 2881
2632 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False): 2882 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2633 """Return the result of interpolating items in the mapping into string s. 2883 """Return the result of interpolating items in the mapping into string s.
2634 2884
2635 prefix is a single character string, or a two character string with 2885 prefix is a single character string, or a two character string with
2652 prefix_char = prefix 2902 prefix_char = prefix
2653 mapping[prefix_char] = prefix_char 2903 mapping[prefix_char] = prefix_char
2654 r = remod.compile(br'%s(%s)' % (prefix, patterns)) 2904 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2655 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s) 2905 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2656 2906
2907
2657 def getport(port): 2908 def getport(port):
2658 """Return the port for a given network service. 2909 """Return the port for a given network service.
2659 2910
2660 If port is an integer, it's returned as is. If it's a string, it's 2911 If port is an integer, it's returned as is. If it's a string, it's
2661 looked up using socket.getservbyname(). If there's no matching 2912 looked up using socket.getservbyname(). If there's no matching
2667 pass 2918 pass
2668 2919
2669 try: 2920 try:
2670 return socket.getservbyname(pycompat.sysstr(port)) 2921 return socket.getservbyname(pycompat.sysstr(port))
2671 except socket.error: 2922 except socket.error:
2672 raise error.Abort(_("no port number associated with service '%s'") 2923 raise error.Abort(
2673 % port) 2924 _("no port number associated with service '%s'") % port
2925 )
2926
2674 2927
2675 class url(object): 2928 class url(object):
2676 r"""Reliable URL parser. 2929 r"""Reliable URL parser.
2677 2930
2678 This parses URLs and provides attributes for the following 2931 This parses URLs and provides attributes for the following
2820 self.user, self.passwd = self.user.split(':', 1) 3073 self.user, self.passwd = self.user.split(':', 1)
2821 if not self.host: 3074 if not self.host:
2822 self.host = None 3075 self.host = None
2823 3076
2824 # Don't split on colons in IPv6 addresses without ports 3077 # Don't split on colons in IPv6 addresses without ports
2825 if (self.host and ':' in self.host and 3078 if (
2826 not (self.host.startswith('[') and self.host.endswith(']'))): 3079 self.host
3080 and ':' in self.host
3081 and not (self.host.startswith('[') and self.host.endswith(']'))
3082 ):
2827 self._hostport = self.host 3083 self._hostport = self.host
2828 self.host, self.port = self.host.rsplit(':', 1) 3084 self.host, self.port = self.host.rsplit(':', 1)
2829 if not self.host: 3085 if not self.host:
2830 self.host = None 3086 self.host = None
2831 3087
2832 if (self.host and self.scheme == 'file' and 3088 if (
2833 self.host not in ('localhost', '127.0.0.1', '[::1]')): 3089 self.host
3090 and self.scheme == 'file'
3091 and self.host not in ('localhost', '127.0.0.1', '[::1]')
3092 ):
2834 raise error.Abort(_('file:// URLs can only refer to localhost')) 3093 raise error.Abort(_('file:// URLs can only refer to localhost'))
2835 3094
2836 self.path = path 3095 self.path = path
2837 3096
2838 # leave the query string escaped 3097 # leave the query string escaped
2839 for a in ('user', 'passwd', 'host', 'port', 3098 for a in ('user', 'passwd', 'host', 'port', 'path', 'fragment'):
2840 'path', 'fragment'):
2841 v = getattr(self, a) 3099 v = getattr(self, a)
2842 if v is not None: 3100 if v is not None:
2843 setattr(self, a, urlreq.unquote(v)) 3101 setattr(self, a, urlreq.unquote(v))
2844 3102
2845 @encoding.strmethod 3103 @encoding.strmethod
2846 def __repr__(self): 3104 def __repr__(self):
2847 attrs = [] 3105 attrs = []
2848 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path', 3106 for a in (
2849 'query', 'fragment'): 3107 'scheme',
3108 'user',
3109 'passwd',
3110 'host',
3111 'port',
3112 'path',
3113 'query',
3114 'fragment',
3115 ):
2850 v = getattr(self, a) 3116 v = getattr(self, a)
2851 if v is not None: 3117 if v is not None:
2852 attrs.append('%s: %r' % (a, pycompat.bytestr(v))) 3118 attrs.append('%s: %r' % (a, pycompat.bytestr(v)))
2853 return '<url %s>' % ', '.join(attrs) 3119 return '<url %s>' % ', '.join(attrs)
2854 3120
2895 return s 3161 return s
2896 3162
2897 s = self.scheme + ':' 3163 s = self.scheme + ':'
2898 if self.user or self.passwd or self.host: 3164 if self.user or self.passwd or self.host:
2899 s += '//' 3165 s += '//'
2900 elif self.scheme and (not self.path or self.path.startswith('/') 3166 elif self.scheme and (
2901 or hasdriveletter(self.path)): 3167 not self.path
3168 or self.path.startswith('/')
3169 or hasdriveletter(self.path)
3170 ):
2902 s += '//' 3171 s += '//'
2903 if hasdriveletter(self.path): 3172 if hasdriveletter(self.path):
2904 s += '/' 3173 s += '/'
2905 if self.user: 3174 if self.user:
2906 s += urlreq.quote(self.user, safe=self._safechars) 3175 s += urlreq.quote(self.user, safe=self._safechars)
2942 return (s, None) 3211 return (s, None)
2943 # authinfo[1] is passed to urllib2 password manager, and its 3212 # authinfo[1] is passed to urllib2 password manager, and its
2944 # URIs must not contain credentials. The host is passed in the 3213 # URIs must not contain credentials. The host is passed in the
2945 # URIs list because Python < 2.4.3 uses only that to search for 3214 # URIs list because Python < 2.4.3 uses only that to search for
2946 # a password. 3215 # a password.
2947 return (s, (None, (s, self.host), 3216 return (s, (None, (s, self.host), self.user, self.passwd or ''))
2948 self.user, self.passwd or ''))
2949 3217
2950 def isabs(self): 3218 def isabs(self):
2951 if self.scheme and self.scheme != 'file': 3219 if self.scheme and self.scheme != 'file':
2952 return True # remote URL 3220 return True # remote URL
2953 if hasdriveletter(self.path): 3221 if hasdriveletter(self.path):
2954 return True # absolute for our purposes - can't be joined() 3222 return True # absolute for our purposes - can't be joined()
2955 if self.path.startswith(br'\\'): 3223 if self.path.startswith(br'\\'):
2956 return True # Windows UNC path 3224 return True # Windows UNC path
2957 if self.path.startswith('/'): 3225 if self.path.startswith('/'):
2958 return True # POSIX-style 3226 return True # POSIX-style
2959 return False 3227 return False
2960 3228
2961 def localpath(self): 3229 def localpath(self):
2962 if self.scheme == 'file' or self.scheme == 'bundle': 3230 if self.scheme == 'file' or self.scheme == 'bundle':
2963 path = self.path or '/' 3231 path = self.path or '/'
2964 # For Windows, we need to promote hosts containing drive 3232 # For Windows, we need to promote hosts containing drive
2965 # letters to paths with drive letters. 3233 # letters to paths with drive letters.
2966 if hasdriveletter(self._hostport): 3234 if hasdriveletter(self._hostport):
2967 path = self._hostport + '/' + self.path 3235 path = self._hostport + '/' + self.path
2968 elif (self.host is not None and self.path 3236 elif (
2969 and not hasdriveletter(path)): 3237 self.host is not None and self.path and not hasdriveletter(path)
3238 ):
2970 path = '/' + path 3239 path = '/' + path
2971 return path 3240 return path
2972 return self._origpath 3241 return self._origpath
2973 3242
2974 def islocal(self): 3243 def islocal(self):
2975 '''whether localpath will return something that posixfile can open''' 3244 '''whether localpath will return something that posixfile can open'''
2976 return (not self.scheme or self.scheme == 'file' 3245 return (
2977 or self.scheme == 'bundle') 3246 not self.scheme or self.scheme == 'file' or self.scheme == 'bundle'
3247 )
3248
2978 3249
2979 def hasscheme(path): 3250 def hasscheme(path):
2980 return bool(url(path).scheme) 3251 return bool(url(path).scheme)
2981 3252
3253
2982 def hasdriveletter(path): 3254 def hasdriveletter(path):
2983 return path and path[1:2] == ':' and path[0:1].isalpha() 3255 return path and path[1:2] == ':' and path[0:1].isalpha()
2984 3256
3257
2985 def urllocalpath(path): 3258 def urllocalpath(path):
2986 return url(path, parsequery=False, parsefragment=False).localpath() 3259 return url(path, parsequery=False, parsefragment=False).localpath()
3260
2987 3261
2988 def checksafessh(path): 3262 def checksafessh(path):
2989 """check if a path / url is a potentially unsafe ssh exploit (SEC) 3263 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2990 3264
2991 This is a sanity check for ssh urls. ssh will parse the first item as 3265 This is a sanity check for ssh urls. ssh will parse the first item as
2995 3269
2996 Raises an error.Abort when the url is unsafe. 3270 Raises an error.Abort when the url is unsafe.
2997 """ 3271 """
2998 path = urlreq.unquote(path) 3272 path = urlreq.unquote(path)
2999 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'): 3273 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3000 raise error.Abort(_('potentially unsafe url: %r') % 3274 raise error.Abort(
3001 (pycompat.bytestr(path),)) 3275 _('potentially unsafe url: %r') % (pycompat.bytestr(path),)
3276 )
3277
3002 3278
3003 def hidepassword(u): 3279 def hidepassword(u):
3004 '''hide user credential in a url string''' 3280 '''hide user credential in a url string'''
3005 u = url(u) 3281 u = url(u)
3006 if u.passwd: 3282 if u.passwd:
3007 u.passwd = '***' 3283 u.passwd = '***'
3008 return bytes(u) 3284 return bytes(u)
3009 3285
3286
3010 def removeauth(u): 3287 def removeauth(u):
3011 '''remove all authentication information from a url string''' 3288 '''remove all authentication information from a url string'''
3012 u = url(u) 3289 u = url(u)
3013 u.user = u.passwd = None 3290 u.user = u.passwd = None
3014 return bytes(u) 3291 return bytes(u)
3292
3015 3293
3016 timecount = unitcountfn( 3294 timecount = unitcountfn(
3017 (1, 1e3, _('%.0f s')), 3295 (1, 1e3, _('%.0f s')),
3018 (100, 1, _('%.1f s')), 3296 (100, 1, _('%.1f s')),
3019 (10, 1, _('%.2f s')), 3297 (10, 1, _('%.2f s')),
3025 (10, 0.000001, _('%.2f us')), 3303 (10, 0.000001, _('%.2f us')),
3026 (1, 0.000001, _('%.3f us')), 3304 (1, 0.000001, _('%.3f us')),
3027 (100, 0.000000001, _('%.1f ns')), 3305 (100, 0.000000001, _('%.1f ns')),
3028 (10, 0.000000001, _('%.2f ns')), 3306 (10, 0.000000001, _('%.2f ns')),
3029 (1, 0.000000001, _('%.3f ns')), 3307 (1, 0.000000001, _('%.3f ns')),
3030 ) 3308 )
3309
3031 3310
3032 @attr.s 3311 @attr.s
3033 class timedcmstats(object): 3312 class timedcmstats(object):
3034 """Stats information produced by the timedcm context manager on entering.""" 3313 """Stats information produced by the timedcm context manager on entering."""
3035 3314
3044 3323
3045 def __bytes__(self): 3324 def __bytes__(self):
3046 return timecount(self.elapsed) if self.elapsed else '<unknown>' 3325 return timecount(self.elapsed) if self.elapsed else '<unknown>'
3047 3326
3048 __str__ = encoding.strmethod(__bytes__) 3327 __str__ = encoding.strmethod(__bytes__)
3328
3049 3329
3050 @contextlib.contextmanager 3330 @contextlib.contextmanager
3051 def timedcm(whencefmt, *whenceargs): 3331 def timedcm(whencefmt, *whenceargs):
3052 """A context manager that produces timing information for a given context. 3332 """A context manager that produces timing information for a given context.
3053 3333
3064 yield timing_stats 3344 yield timing_stats
3065 finally: 3345 finally:
3066 timing_stats.elapsed = timer() - timing_stats.start 3346 timing_stats.elapsed = timer() - timing_stats.start
3067 timedcm._nested -= 1 3347 timedcm._nested -= 1
3068 3348
3349
3069 timedcm._nested = 0 3350 timedcm._nested = 0
3351
3070 3352
3071 def timed(func): 3353 def timed(func):
3072 '''Report the execution time of a function call to stderr. 3354 '''Report the execution time of a function call to stderr.
3073 3355
3074 During development, use as a decorator when you need to measure 3356 During development, use as a decorator when you need to measure
3081 3363
3082 def wrapper(*args, **kwargs): 3364 def wrapper(*args, **kwargs):
3083 with timedcm(pycompat.bytestr(func.__name__)) as time_stats: 3365 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3084 result = func(*args, **kwargs) 3366 result = func(*args, **kwargs)
3085 stderr = procutil.stderr 3367 stderr = procutil.stderr
3086 stderr.write('%s%s: %s\n' % ( 3368 stderr.write(
3087 ' ' * time_stats.level * 2, pycompat.bytestr(func.__name__), 3369 '%s%s: %s\n'
3088 time_stats)) 3370 % (
3371 ' ' * time_stats.level * 2,
3372 pycompat.bytestr(func.__name__),
3373 time_stats,
3374 )
3375 )
3089 return result 3376 return result
3377
3090 return wrapper 3378 return wrapper
3091 3379
3092 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30), 3380
3093 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1)) 3381 _sizeunits = (
3382 ('m', 2 ** 20),
3383 ('k', 2 ** 10),
3384 ('g', 2 ** 30),
3385 ('kb', 2 ** 10),
3386 ('mb', 2 ** 20),
3387 ('gb', 2 ** 30),
3388 ('b', 1),
3389 )
3390
3094 3391
3095 def sizetoint(s): 3392 def sizetoint(s):
3096 '''Convert a space specifier to a byte count. 3393 '''Convert a space specifier to a byte count.
3097 3394
3098 >>> sizetoint(b'30') 3395 >>> sizetoint(b'30')
3104 ''' 3401 '''
3105 t = s.strip().lower() 3402 t = s.strip().lower()
3106 try: 3403 try:
3107 for k, u in _sizeunits: 3404 for k, u in _sizeunits:
3108 if t.endswith(k): 3405 if t.endswith(k):
3109 return int(float(t[:-len(k)]) * u) 3406 return int(float(t[: -len(k)]) * u)
3110 return int(t) 3407 return int(t)
3111 except ValueError: 3408 except ValueError:
3112 raise error.ParseError(_("couldn't parse size: %s") % s) 3409 raise error.ParseError(_("couldn't parse size: %s") % s)
3410
3113 3411
3114 class hooks(object): 3412 class hooks(object):
3115 '''A collection of hook functions that can be used to extend a 3413 '''A collection of hook functions that can be used to extend a
3116 function's behavior. Hooks are called in lexicographic order, 3414 function's behavior. Hooks are called in lexicographic order,
3117 based on the names of their sources.''' 3415 based on the names of their sources.'''
3126 self._hooks.sort(key=lambda x: x[0]) 3424 self._hooks.sort(key=lambda x: x[0])
3127 results = [] 3425 results = []
3128 for source, hook in self._hooks: 3426 for source, hook in self._hooks:
3129 results.append(hook(*args)) 3427 results.append(hook(*args))
3130 return results 3428 return results
3429
3131 3430
3132 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0): 3431 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3133 '''Yields lines for a nicely formatted stacktrace. 3432 '''Yields lines for a nicely formatted stacktrace.
3134 Skips the 'skip' last entries, then return the last 'depth' entries. 3433 Skips the 'skip' last entries, then return the last 'depth' entries.
3135 Each file+linenumber is formatted according to fileline. 3434 Each file+linenumber is formatted according to fileline.
3139 filepath+linenumber, 3438 filepath+linenumber,
3140 function 3439 function
3141 3440
3142 Not be used in production code but very convenient while developing. 3441 Not be used in production code but very convenient while developing.
3143 ''' 3442 '''
3144 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func)) 3443 entries = [
3145 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1] 3444 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3146 ][-depth:] 3445 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3446 ][-depth:]
3147 if entries: 3447 if entries:
3148 fnmax = max(len(entry[0]) for entry in entries) 3448 fnmax = max(len(entry[0]) for entry in entries)
3149 for fnln, func in entries: 3449 for fnln, func in entries:
3150 if line is None: 3450 if line is None:
3151 yield (fnmax, fnln, func) 3451 yield (fnmax, fnln, func)
3152 else: 3452 else:
3153 yield line % (fnmax, fnln, func) 3453 yield line % (fnmax, fnln, func)
3154 3454
3155 def debugstacktrace(msg='stacktrace', skip=0, 3455
3156 f=procutil.stderr, otherf=procutil.stdout, depth=0): 3456 def debugstacktrace(
3457 msg='stacktrace', skip=0, f=procutil.stderr, otherf=procutil.stdout, depth=0
3458 ):
3157 '''Writes a message to f (stderr) with a nicely formatted stacktrace. 3459 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3158 Skips the 'skip' entries closest to the call, then show 'depth' entries. 3460 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3159 By default it will flush stdout first. 3461 By default it will flush stdout first.
3160 It can be used everywhere and intentionally does not require an ui object. 3462 It can be used everywhere and intentionally does not require an ui object.
3161 Not be used in production code but very convenient while developing. 3463 Not be used in production code but very convenient while developing.
3165 f.write('%s at:\n' % msg.rstrip()) 3467 f.write('%s at:\n' % msg.rstrip())
3166 for line in getstackframes(skip + 1, depth=depth): 3468 for line in getstackframes(skip + 1, depth=depth):
3167 f.write(line) 3469 f.write(line)
3168 f.flush() 3470 f.flush()
3169 3471
3472
3170 class dirs(object): 3473 class dirs(object):
3171 '''a multiset of directory names from a dirstate or manifest''' 3474 '''a multiset of directory names from a dirstate or manifest'''
3172 3475
3173 def __init__(self, map, skip=None): 3476 def __init__(self, map, skip=None):
3174 self._dirs = {} 3477 self._dirs = {}
3176 if isinstance(map, dict) and skip is not None: 3479 if isinstance(map, dict) and skip is not None:
3177 for f, s in map.iteritems(): 3480 for f, s in map.iteritems():
3178 if s[0] != skip: 3481 if s[0] != skip:
3179 addpath(f) 3482 addpath(f)
3180 elif skip is not None: 3483 elif skip is not None:
3181 raise error.ProgrammingError("skip character is only supported " 3484 raise error.ProgrammingError(
3182 "with a dict source") 3485 "skip character is only supported " "with a dict source"
3486 )
3183 else: 3487 else:
3184 for f in map: 3488 for f in map:
3185 addpath(f) 3489 addpath(f)
3186 3490
3187 def addpath(self, path): 3491 def addpath(self, path):
3204 return iter(self._dirs) 3508 return iter(self._dirs)
3205 3509
3206 def __contains__(self, d): 3510 def __contains__(self, d):
3207 return d in self._dirs 3511 return d in self._dirs
3208 3512
3513
3209 if safehasattr(parsers, 'dirs'): 3514 if safehasattr(parsers, 'dirs'):
3210 dirs = parsers.dirs 3515 dirs = parsers.dirs
3211 3516
3212 if rustdirs is not None: 3517 if rustdirs is not None:
3213 dirs = rustdirs 3518 dirs = rustdirs
3519
3214 3520
3215 def finddirs(path): 3521 def finddirs(path):
3216 pos = path.rfind('/') 3522 pos = path.rfind('/')
3217 while pos != -1: 3523 while pos != -1:
3218 yield path[:pos] 3524 yield path[:pos]
3220 yield '' 3526 yield ''
3221 3527
3222 3528
3223 # convenient shortcut 3529 # convenient shortcut
3224 dst = debugstacktrace 3530 dst = debugstacktrace
3531
3225 3532
3226 def safename(f, tag, ctx, others=None): 3533 def safename(f, tag, ctx, others=None):
3227 """ 3534 """
3228 Generate a name that it is safe to rename f to in the given context. 3535 Generate a name that it is safe to rename f to in the given context.
3229 3536
3244 for n in itertools.count(1): 3551 for n in itertools.count(1):
3245 fn = '%s~%s~%s' % (f, tag, n) 3552 fn = '%s~%s~%s' % (f, tag, n)
3246 if fn not in ctx and fn not in others: 3553 if fn not in ctx and fn not in others:
3247 return fn 3554 return fn
3248 3555
3556
3249 def readexactly(stream, n): 3557 def readexactly(stream, n):
3250 '''read n bytes from stream.read and abort if less was available''' 3558 '''read n bytes from stream.read and abort if less was available'''
3251 s = stream.read(n) 3559 s = stream.read(n)
3252 if len(s) < n: 3560 if len(s) < n:
3253 raise error.Abort(_("stream ended unexpectedly" 3561 raise error.Abort(
3254 " (got %d bytes, expected %d)") 3562 _("stream ended unexpectedly" " (got %d bytes, expected %d)")
3255 % (len(s), n)) 3563 % (len(s), n)
3564 )
3256 return s 3565 return s
3566
3257 3567
3258 def uvarintencode(value): 3568 def uvarintencode(value):
3259 """Encode an unsigned integer value to a varint. 3569 """Encode an unsigned integer value to a varint.
3260 3570
3261 A varint is a variable length integer of 1 or more bytes. Each byte 3571 A varint is a variable length integer of 1 or more bytes. Each byte
3277 Traceback (most recent call last): 3587 Traceback (most recent call last):
3278 ... 3588 ...
3279 ProgrammingError: negative value for uvarint: -1 3589 ProgrammingError: negative value for uvarint: -1
3280 """ 3590 """
3281 if value < 0: 3591 if value < 0:
3282 raise error.ProgrammingError('negative value for uvarint: %d' 3592 raise error.ProgrammingError('negative value for uvarint: %d' % value)
3283 % value) 3593 bits = value & 0x7F
3284 bits = value & 0x7f
3285 value >>= 7 3594 value >>= 7
3286 bytes = [] 3595 bytes = []
3287 while value: 3596 while value:
3288 bytes.append(pycompat.bytechr(0x80 | bits)) 3597 bytes.append(pycompat.bytechr(0x80 | bits))
3289 bits = value & 0x7f 3598 bits = value & 0x7F
3290 value >>= 7 3599 value >>= 7
3291 bytes.append(pycompat.bytechr(bits)) 3600 bytes.append(pycompat.bytechr(bits))
3292 3601
3293 return ''.join(bytes) 3602 return ''.join(bytes)
3603
3294 3604
3295 def uvarintdecodestream(fh): 3605 def uvarintdecodestream(fh):
3296 """Decode an unsigned variable length integer from a stream. 3606 """Decode an unsigned variable length integer from a stream.
3297 3607
3298 The passed argument is anything that has a ``.read(N)`` method. 3608 The passed argument is anything that has a ``.read(N)`` method.
3318 """ 3628 """
3319 result = 0 3629 result = 0
3320 shift = 0 3630 shift = 0
3321 while True: 3631 while True:
3322 byte = ord(readexactly(fh, 1)) 3632 byte = ord(readexactly(fh, 1))
3323 result |= ((byte & 0x7f) << shift) 3633 result |= (byte & 0x7F) << shift
3324 if not (byte & 0x80): 3634 if not (byte & 0x80):
3325 return result 3635 return result
3326 shift += 7 3636 shift += 7