Mercurial > hg
changeset 43495:daade078f1f0
merge with stable
author | Yuya Nishihara <yuya@tcha.org> |
---|---|
date | Fri, 08 Nov 2019 23:26:50 +0900 |
parents | 5d40317d42b7 (diff) be0f77fd274d (current diff) |
children | 2ade00f3b03b |
files | contrib/perf.py mercurial/bundle2.py |
diffstat | 50 files changed, 790 insertions(+), 466 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgignore Wed Nov 06 16:54:34 2019 +0100 +++ b/.hgignore Fri Nov 08 23:26:50 2019 +0900 @@ -51,6 +51,7 @@ cscope.* .idea/* .asv/* +.pytype/* i18n/hg.pot locale/*/LC_MESSAGES/hg.mo hgext/__index__.py
--- a/contrib/perf.py Wed Nov 06 16:54:34 2019 +0100 +++ b/contrib/perf.py Fri Nov 08 23:26:50 2019 +0900 @@ -760,7 +760,10 @@ @command( b'perfstatus', - [(b'u', b'unknown', False, b'ask status to look for unknown files')] + [ + (b'u', b'unknown', False, b'ask status to look for unknown files'), + (b'', b'dirstate', False, b'benchmark the internal dirstate call'), + ] + formatteropts, ) def perfstatus(ui, repo, **opts): @@ -776,7 +779,20 @@ # timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, # False)))) timer, fm = gettimer(ui, opts) - timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown'])))) + if opts[b'dirstate']: + dirstate = repo.dirstate + m = scmutil.matchall(repo) + unknown = opts[b'unknown'] + + def status_dirstate(): + s = dirstate.status( + m, subrepos=[], ignored=False, clean=False, unknown=unknown + ) + sum(map(len, s)) + + timer(status_dirstate) + else: + timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown'])))) fm.end()
--- a/contrib/testparseutil.py Wed Nov 06 16:54:34 2019 +0100 +++ b/contrib/testparseutil.py Fri Nov 08 23:26:50 2019 +0900 @@ -80,7 +80,7 @@ #################### -class embeddedmatcher(object): +class embeddedmatcher(object): # pytype: disable=ignored-metaclass """Base class to detect embedded code fragments in *.t test script """
--- a/hgext/censor.py Wed Nov 06 16:54:34 2019 +0100 +++ b/hgext/censor.py Fri Nov 08 23:26:50 2019 +0900 @@ -23,6 +23,9 @@ ``hg update``, must be capable of tolerating censored data to continue to function in a meaningful way. Such commands only tolerate censored file revisions if they are allowed by the "censor.policy=ignore" config option. + +A few informative commands such as ``hg grep`` will unconditionally +ignore censored data and merely report that it was encountered. """ from __future__ import absolute_import
--- a/hgext/infinitepush/store.py Wed Nov 06 16:54:34 2019 +0100 +++ b/hgext/infinitepush/store.py Fri Nov 08 23:26:50 2019 +0900 @@ -29,7 +29,7 @@ pass -class abstractbundlestore(object): +class abstractbundlestore(object): # pytype: disable=ignored-metaclass """Defines the interface for bundle stores. A bundle store is an entity that stores raw bundle data. It is a simple
--- a/hgext/largefiles/reposetup.py Wed Nov 06 16:54:34 2019 +0100 +++ b/hgext/largefiles/reposetup.py Fri Nov 08 23:26:50 2019 +0900 @@ -360,20 +360,6 @@ ) return result - def push(self, remote, force=False, revs=None, newbranch=False): - if remote.local(): - missing = set(self.requirements) - remote.local().supported - if missing: - msg = _( - b"required features are not" - b" supported in the destination:" - b" %s" - ) % (b', '.join(sorted(missing))) - raise error.Abort(msg) - return super(lfilesrepo, self).push( - remote, force=force, revs=revs, newbranch=newbranch - ) - # TODO: _subdirlfs should be moved into "lfutil.py", because # it is referred only from "lfutil.updatestandinsbymatch" def _subdirlfs(self, files, lfiles):
--- a/mercurial/archival.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/archival.py Fri Nov 08 23:26:50 2019 +0900 @@ -154,9 +154,11 @@ fname = fname[:-3] flags = 0 if fname: - flags = gzip.FNAME + flags = gzip.FNAME # pytype: disable=module-attr self.fileobj.write(pycompat.bytechr(flags)) - gzip.write32u(self.fileobj, int(self.timestamp)) + gzip.write32u( # pytype: disable=module-attr + self.fileobj, int(self.timestamp) + ) self.fileobj.write(b'\002') self.fileobj.write(b'\377') if fname: @@ -179,7 +181,7 @@ timestamp=mtime, ) self.fileobj = gzfileobj - return tarfile.TarFile.taropen( + return tarfile.TarFile.taropen( # pytype: disable=attribute-error name, pycompat.sysstr(mode), gzfileobj ) else: @@ -234,7 +236,7 @@ def addfile(self, name, mode, islink, data): i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time) - i.compress_type = self.z.compression + i.compress_type = self.z.compression # pytype: disable=attribute-error # unzip will not honor unix file modes unless file creator is # set to unix (id 3). i.create_system = 3
--- a/mercurial/bundle2.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/bundle2.py Fri Nov 08 23:26:50 2019 +0900 @@ -653,7 +653,9 @@ """add a stream level parameter""" if not name: raise error.ProgrammingError(b'empty parameter name') - if name[0:1] not in pycompat.bytestr(string.ascii_letters): + if name[0:1] not in pycompat.bytestr( + string.ascii_letters # pytype: disable=wrong-arg-types + ): raise error.ProgrammingError( b'non letter first character: %s' % name ) @@ -836,7 +838,9 @@ """ if not name: raise ValueError(r'empty parameter name') - if name[0:1] not in pycompat.bytestr(string.ascii_letters): + if name[0:1] not in pycompat.bytestr( + string.ascii_letters # pytype: disable=wrong-arg-types + ): raise ValueError(r'non letter first character: %s' % name) try: handler = b2streamparamsmap[name.lower()]
--- a/mercurial/cext/dirs.c Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/cext/dirs.c Fri Nov 08 23:26:50 2019 +0900 @@ -66,6 +66,14 @@ while ((pos = _finddir(cpath, pos - 1)) != -1) { PyObject *val; + /* Sniff for trailing slashes, a marker of an invalid input. */ + if (pos > 0 && cpath[pos - 1] == '/') { + PyErr_SetString( + PyExc_ValueError, + "found invalid consecutive slashes in path"); + goto bail; + } + key = PyBytes_FromStringAndSize(cpath, pos); if (key == NULL) goto bail;
--- a/mercurial/changegroup.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/changegroup.py Fri Nov 08 23:26:50 2019 +0900 @@ -315,15 +315,15 @@ ) self.callback = progress.increment - efiles = set() + efilesset = set() def onchangelog(cl, node): - efiles.update(cl.readfiles(node)) + efilesset.update(cl.readfiles(node)) self.changelogheader() deltas = self.deltaiter() cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog) - efiles = len(efiles) + efiles = len(efilesset) if not cgnodes: repo.ui.develwarn( @@ -1150,7 +1150,9 @@ def makelookupmflinknode(tree, nodes): if fastpathlinkrev: assert not tree - return manifests.__getitem__ + return ( + manifests.__getitem__ # pytype: disable=unsupported-operands + ) def lookupmflinknode(x): """Callback for looking up the linknode for manifests.
--- a/mercurial/changelog.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/changelog.py Fri Nov 08 23:26:50 2019 +0900 @@ -21,7 +21,6 @@ error, pycompat, revlog, - util, ) from .utils import ( dateutil, @@ -405,110 +404,6 @@ self.filteredrevs = frozenset() self._copiesstorage = opener.options.get(b'copies-storage') - def tiprev(self): - for i in pycompat.xrange(len(self) - 1, -2, -1): - if i not in self.filteredrevs: - return i - - def tip(self): - """filtered version of revlog.tip""" - return self.node(self.tiprev()) - - def __contains__(self, rev): - """filtered version of revlog.__contains__""" - return 0 <= rev < len(self) and rev not in self.filteredrevs - - def __iter__(self): - """filtered version of revlog.__iter__""" - if len(self.filteredrevs) == 0: - return revlog.revlog.__iter__(self) - - def filterediter(): - for i in pycompat.xrange(len(self)): - if i not in self.filteredrevs: - yield i - - return filterediter() - - def revs(self, start=0, stop=None): - """filtered version of revlog.revs""" - for i in super(changelog, self).revs(start, stop): - if i not in self.filteredrevs: - yield i - - def _checknofilteredinrevs(self, revs): - """raise the appropriate error if 'revs' contains a filtered revision - - This returns a version of 'revs' to be used thereafter by the caller. - In particular, if revs is an iterator, it is converted into a set. - """ - safehasattr = util.safehasattr - if safehasattr(revs, '__next__'): - # Note that inspect.isgenerator() is not true for iterators, - revs = set(revs) - - filteredrevs = self.filteredrevs - if safehasattr(revs, 'first'): # smartset - offenders = revs & filteredrevs - else: - offenders = filteredrevs.intersection(revs) - - for rev in offenders: - raise error.FilteredIndexError(rev) - return revs - - def headrevs(self, revs=None): - if revs is None and self.filteredrevs: - try: - return self.index.headrevsfiltered(self.filteredrevs) - # AttributeError covers non-c-extension environments and - # old c extensions without filter handling. - except AttributeError: - return self._headrevs() - - if self.filteredrevs: - revs = self._checknofilteredinrevs(revs) - return super(changelog, self).headrevs(revs) - - def strip(self, *args, **kwargs): - # XXX make something better than assert - # We can't expect proper strip behavior if we are filtered. - assert not self.filteredrevs - super(changelog, self).strip(*args, **kwargs) - - def rev(self, node): - """filtered version of revlog.rev""" - r = super(changelog, self).rev(node) - if r in self.filteredrevs: - raise error.FilteredLookupError( - hex(node), self.indexfile, _(b'filtered node') - ) - return r - - def node(self, rev): - """filtered version of revlog.node""" - if rev in self.filteredrevs: - raise error.FilteredIndexError(rev) - return super(changelog, self).node(rev) - - def linkrev(self, rev): - """filtered version of revlog.linkrev""" - if rev in self.filteredrevs: - raise error.FilteredIndexError(rev) - return super(changelog, self).linkrev(rev) - - def parentrevs(self, rev): - """filtered version of revlog.parentrevs""" - if rev in self.filteredrevs: - raise error.FilteredIndexError(rev) - return super(changelog, self).parentrevs(rev) - - def flags(self, rev): - """filtered version of revlog.flags""" - if rev in self.filteredrevs: - raise error.FilteredIndexError(rev) - return super(changelog, self).flags(rev) - def delayupdate(self, tr): b"delay visibility of index updates to other readers"
--- a/mercurial/cmdutil.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/cmdutil.py Fri Nov 08 23:26:50 2019 +0900 @@ -347,7 +347,7 @@ ui, originalhunks, recordfn, operation ) except crecordmod.fallbackerror as e: - ui.warn(b'%s\n' % e.message) + ui.warn(b'%s\n' % e.message) # pytype: disable=attribute-error ui.warn(_(b'falling back to text mode\n')) return patch.filterpatch(ui, originalhunks, match, operation) @@ -435,7 +435,7 @@ copymod.copy(status[3]), copymod.copy(status[4]), copymod.copy(status[5]), - copymod.copy(status[6]), + copymod.copy(status[6]), # pytype: disable=wrong-arg-count ) # Force allows -X subrepo to skip the subrepo. @@ -1289,7 +1289,7 @@ if isinstance(r, revlog.revlog): pass elif util.safehasattr(r, b'_revlog'): - r = r._revlog + r = r._revlog # pytype: disable=attribute-error elif r is not None: raise error.Abort(_(b'%r does not appear to be a revlog') % r)
--- a/mercurial/color.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/color.py Fri Nov 08 23:26:50 2019 +0900 @@ -435,7 +435,7 @@ if pycompat.iswindows: import ctypes - _kernel32 = ctypes.windll.kernel32 + _kernel32 = ctypes.windll.kernel32 # pytype: disable=module-attr _WORD = ctypes.c_ushort
--- a/mercurial/commands.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/commands.py Fri Nov 08 23:26:50 2019 +0900 @@ -3446,6 +3446,9 @@ def grepbody(fn, rev, body): matches[rev].setdefault(fn, []) m = matches[rev][fn] + if body is None: + return + for lnum, cstart, cend, line in matchlines(body): s = linestate(line, lnum, cstart, cend) m.append(s) @@ -3581,6 +3584,19 @@ getrenamed = scmutil.getrenamedfn(repo) + def get_file_content(filename, filelog, filenode, context, revision): + try: + content = filelog.read(filenode) + except error.WdirUnsupported: + content = context[filename].data() + except error.CensoredNodeError: + content = None + ui.warn( + _(b'cannot search in censored file: %(filename)s:%(revnum)s\n') + % {b'filename': filename, b'revnum': pycompat.bytestr(revision)} + ) + return content + def prep(ctx, fns): rev = ctx.rev() pctx = ctx.p1() @@ -3607,17 +3623,15 @@ files.append(fn) if fn not in matches[rev]: - try: - content = flog.read(fnode) - except error.WdirUnsupported: - content = ctx[fn].data() + content = get_file_content(fn, flog, fnode, ctx, rev) grepbody(fn, rev, content) pfn = copy or fn if pfn not in matches[parent]: try: - fnode = pctx.filenode(pfn) - grepbody(pfn, parent, flog.read(fnode)) + pfnode = pctx.filenode(pfn) + pcontent = get_file_content(pfn, flog, pfnode, pctx, parent) + grepbody(pfn, parent, pcontent) except error.LookupError: pass
--- a/mercurial/dirstate.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/dirstate.py Fri Nov 08 23:26:50 2019 +0900 @@ -687,8 +687,7 @@ delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite') if delaywrite > 0: # do we have any files to delay for? - items = pycompat.iteritems(self._map) - for f, e in items: + for f, e in pycompat.iteritems(self._map): if e[0] == b'n' and e[3] == now: import time # to avoid useless import @@ -700,12 +699,6 @@ time.sleep(end - clock) now = end # trust our estimate that the end is near now break - # since the iterator is potentially not deleted, - # delete the iterator to release the reference for the Rust - # implementation. - # TODO make the Rust implementation behave like Python - # since this would not work with a non ref-counting GC. - del items self._map.write(st, now) self._lastnormaltime = 0 @@ -1127,7 +1120,6 @@ ) = rustmod.status( dmap._rustmap, self._rootdir, - match.files(), bool(listclean), self._lastnormaltime, self._checkexec,
--- a/mercurial/encoding.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/encoding.py Fri Nov 08 23:26:50 2019 +0900 @@ -241,8 +241,15 @@ strfromlocal = unifromlocal strmethod = unimethod else: - strtolocal = pycompat.identity - strfromlocal = pycompat.identity + + def strtolocal(s): + # type: (str) -> bytes + return s + + def strfromlocal(s): + # type: (bytes) -> str + return s + strmethod = pycompat.identity if not _nativeenviron:
--- a/mercurial/fancyopts.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/fancyopts.py Fri Nov 08 23:26:50 2019 +0900 @@ -205,7 +205,7 @@ return parsedopts, parsedargs -class customopt(object): +class customopt(object): # pytype: disable=ignored-metaclass """Manage defaults and mutations for any type of opt.""" __metaclass__ = abc.ABCMeta
--- a/mercurial/linelog.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/linelog.py Fri Nov 08 23:26:50 2019 +0900 @@ -53,7 +53,7 @@ return iter(self.lines) -class _llinstruction(object): +class _llinstruction(object): # pytype: disable=ignored-metaclass __metaclass__ = abc.ABCMeta @@ -278,8 +278,14 @@ fakejge = _decodeone(buf, 0) if isinstance(fakejge, _jump): maxrev = 0 + elif isinstance(fakejge, (_jge, _jl)): + maxrev = fakejge._cmprev else: - maxrev = fakejge._cmprev + raise LineLogError( + 'Expected one of _jump, _jge, or _jl. Got %s.' + % type(fakejge).__name__ + ) + assert isinstance(fakejge, (_jump, _jge, _jl)) # help pytype numentries = fakejge._target if expected != numentries: raise LineLogError(
--- a/mercurial/match.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/match.py Fri Nov 08 23:26:50 2019 +0900 @@ -345,7 +345,10 @@ ): kindpats.append((k, p, source or pat)) except error.Abort as inst: - raise error.Abort(b'%s: %s' % (pat, inst[0])) + raise error.Abort( + b'%s: %s' + % (pat, inst[0]) # pytype: disable=unsupported-operands + ) except IOError as inst: if warn: warn(
--- a/mercurial/mdiff.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/mdiff.py Fri Nov 08 23:26:50 2019 +0900 @@ -38,6 +38,7 @@ splitnewlines = bdiff.splitnewlines +# TODO: this looks like it could be an attrs, which might help pytype class diffopts(object): '''context is the number of context lines text treats all files as text @@ -52,6 +53,8 @@ upgrade generates git diffs to avoid data loss ''' + _HAS_DYNAMIC_ATTRIBUTES = True + defaults = { b'context': 3, b'text': False,
--- a/mercurial/posix.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/posix.py Fri Nov 08 23:26:50 2019 +0900 @@ -323,7 +323,10 @@ fullpath = os.path.join(cachedir, target) open(fullpath, b'w').close() except IOError as inst: - if inst[0] == errno.EACCES: + if ( + inst[0] # pytype: disable=unsupported-operands + == errno.EACCES + ): # If we can't write to cachedir, just pretend # that the fs is readonly and by association # that the fs won't support symlinks. This
--- a/mercurial/pvec.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/pvec.py Fri Nov 08 23:26:50 2019 +0900 @@ -48,7 +48,7 @@ different branches ''' -from __future__ import absolute_import +from __future__ import absolute_import, division from .node import nullrev from . import ( @@ -57,12 +57,12 @@ ) _size = 448 # 70 chars b85-encoded -_bytes = _size / 8 +_bytes = _size // 8 _depthbits = 24 -_depthbytes = _depthbits / 8 +_depthbytes = _depthbits // 8 _vecbytes = _bytes - _depthbytes _vecbits = _vecbytes * 8 -_radius = (_vecbits - 30) / 2 # high probability vectors are related +_radius = (_vecbits - 30) // 2 # high probability vectors are related def _bin(bs): @@ -74,9 +74,10 @@ def _str(v, l): + # type: (int, int) -> bytes bs = b"" for p in pycompat.xrange(l): - bs = chr(v & 255) + bs + bs = pycompat.bytechr(v & 255) + bs v >>= 8 return bs @@ -131,7 +132,7 @@ if hdist > ddist: # if delta = 10 and hdist = 100, then we need to go up 55 steps # to the ancestor and down 45 - changes = (hdist - ddist + 1) / 2 + changes = (hdist - ddist + 1) // 2 else: # must make at least one change changes = 1
--- a/mercurial/pycompat.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/pycompat.py Fri Nov 08 23:26:50 2019 +0900 @@ -94,6 +94,13 @@ import io import struct + if os.name == r'nt' and sys.version_info >= (3, 6): + # MBCS (or ANSI) filesystem encoding must be used as before. + # Otherwise non-ASCII filenames in existing repositories would be + # corrupted. + # This must be set once prior to any fsencode/fsdecode calls. + sys._enablelegacywindowsfsencoding() + fsencode = os.fsencode fsdecode = os.fsdecode oscurdir = os.curdir.encode('ascii') @@ -139,8 +146,8 @@ # # https://hg.python.org/cpython/file/v3.5.1/Programs/python.c#l55 # - # TODO: On Windows, the native argv is wchar_t, so we'll need a different - # workaround to simulate the Python 2 (i.e. ANSI Win32 API) behavior. + # On Windows, the native argv is unicode and is converted to MBCS bytes + # since we do enable the legacy filesystem encoding. if getattr(sys, 'argv', None) is not None: sysargv = list(map(os.fsencode, sys.argv))
--- a/mercurial/repoview.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/repoview.py Fri Nov 08 23:26:50 2019 +0900 @@ -11,13 +11,18 @@ import copy import weakref -from .node import nullrev +from .i18n import _ +from .node import ( + hex, + nullrev, +) from .pycompat import ( delattr, getattr, setattr, ) from . import ( + error, obsolete, phases, pycompat, @@ -218,6 +223,117 @@ return repo.filteredrevcache[filtername] +def wrapchangelog(unfichangelog, filteredrevs): + cl = copy.copy(unfichangelog) + cl.filteredrevs = filteredrevs + + cl.__class__ = type( + 'filteredchangelog', (filteredchangelogmixin, cl.__class__), {} + ) + + return cl + + +class filteredchangelogmixin(object): + def tiprev(self): + """filtered version of revlog.tiprev""" + for i in pycompat.xrange(len(self) - 1, -2, -1): + if i not in self.filteredrevs: + return i + + def __contains__(self, rev): + """filtered version of revlog.__contains__""" + return 0 <= rev < len(self) and rev not in self.filteredrevs + + def __iter__(self): + """filtered version of revlog.__iter__""" + + def filterediter(): + for i in pycompat.xrange(len(self)): + if i not in self.filteredrevs: + yield i + + return filterediter() + + def revs(self, start=0, stop=None): + """filtered version of revlog.revs""" + for i in super(filteredchangelogmixin, self).revs(start, stop): + if i not in self.filteredrevs: + yield i + + def _checknofilteredinrevs(self, revs): + """raise the appropriate error if 'revs' contains a filtered revision + + This returns a version of 'revs' to be used thereafter by the caller. + In particular, if revs is an iterator, it is converted into a set. + """ + safehasattr = util.safehasattr + if safehasattr(revs, '__next__'): + # Note that inspect.isgenerator() is not true for iterators, + revs = set(revs) + + filteredrevs = self.filteredrevs + if safehasattr(revs, 'first'): # smartset + offenders = revs & filteredrevs + else: + offenders = filteredrevs.intersection(revs) + + for rev in offenders: + raise error.FilteredIndexError(rev) + return revs + + def headrevs(self, revs=None): + if revs is None: + try: + return self.index.headrevsfiltered(self.filteredrevs) + # AttributeError covers non-c-extension environments and + # old c extensions without filter handling. + except AttributeError: + return self._headrevs() + + revs = self._checknofilteredinrevs(revs) + return super(filteredchangelogmixin, self).headrevs(revs) + + def strip(self, *args, **kwargs): + # XXX make something better than assert + # We can't expect proper strip behavior if we are filtered. + assert not self.filteredrevs + super(filteredchangelogmixin, self).strip(*args, **kwargs) + + def rev(self, node): + """filtered version of revlog.rev""" + r = super(filteredchangelogmixin, self).rev(node) + if r in self.filteredrevs: + raise error.FilteredLookupError( + hex(node), self.indexfile, _(b'filtered node') + ) + return r + + def node(self, rev): + """filtered version of revlog.node""" + if rev in self.filteredrevs: + raise error.FilteredIndexError(rev) + return super(filteredchangelogmixin, self).node(rev) + + def linkrev(self, rev): + """filtered version of revlog.linkrev""" + if rev in self.filteredrevs: + raise error.FilteredIndexError(rev) + return super(filteredchangelogmixin, self).linkrev(rev) + + def parentrevs(self, rev): + """filtered version of revlog.parentrevs""" + if rev in self.filteredrevs: + raise error.FilteredIndexError(rev) + return super(filteredchangelogmixin, self).parentrevs(rev) + + def flags(self, rev): + """filtered version of revlog.flags""" + if rev in self.filteredrevs: + raise error.FilteredIndexError(rev) + return super(filteredchangelogmixin, self).flags(rev) + + class repoview(object): """Provide a read/write view of a repo through a filtered changelog @@ -286,8 +402,8 @@ cl = None # could have been made None by the previous if if cl is None: - cl = copy.copy(unfichangelog) - cl.filteredrevs = revs + # Only filter if there's something to filter + cl = wrapchangelog(unfichangelog, revs) if revs else unfichangelog object.__setattr__(self, r'_clcache', cl) object.__setattr__(self, r'_clcachekey', newkey) return cl
--- a/mercurial/revlog.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/revlog.py Fri Nov 08 23:26:50 2019 +0900 @@ -593,8 +593,11 @@ with func() as fp: yield fp + def tiprev(self): + return len(self.index) - 1 + def tip(self): - return self.node(len(self.index) - 1) + return self.node(self.tiprev()) def __contains__(self, rev): return 0 <= rev < len(self)
--- a/mercurial/scmposix.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/scmposix.py Fri Nov 08 23:26:50 2019 +0900 @@ -90,7 +90,7 @@ except ValueError: pass except IOError as e: - if e[0] == errno.EINVAL: + if e[0] == errno.EINVAL: # pytype: disable=unsupported-operands pass else: raise
--- a/mercurial/simplemerge.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/simplemerge.py Fri Nov 08 23:26:50 2019 +0900 @@ -291,7 +291,19 @@ if region[0] != b"conflict": yield region continue - issue, z1, z2, a1, a2, b1, b2 = region + # pytype thinks this tuple contains only 3 things, but + # that's clearly not true because this code successfully + # executes. It might be wise to rework merge_regions to be + # some kind of attrs type. + ( + issue, + z1, + z2, + a1, + a2, + b1, + b2, + ) = region # pytype: disable=bad-unpacking alen = a2 - a1 blen = b2 - b1
--- a/mercurial/statprof.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/statprof.py Fri Nov 08 23:26:50 2019 +0900 @@ -352,7 +352,7 @@ frame = inspect.currentframe() tid = [k for k, f in sys._current_frames().items() if f == frame][0] state.thread = threading.Thread( - target=samplerthread, args=(tid,), name=b"samplerthread" + target=samplerthread, args=(tid,), name="samplerthread" ) state.thread.start()
--- a/mercurial/templater.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/templater.py Fri Nov 08 23:26:50 2019 +0900 @@ -611,7 +611,7 @@ return s[1:-1] -class resourcemapper(object): +class resourcemapper(object): # pytype: disable=ignored-metaclass """Mapper of internal template resources""" __metaclass__ = abc.ABCMeta
--- a/mercurial/templateutil.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/templateutil.py Fri Nov 08 23:26:50 2019 +0900 @@ -31,7 +31,7 @@ pass -class wrapped(object): +class wrapped(object): # pytype: disable=ignored-metaclass """Object requiring extra conversion prior to displaying or processing as value @@ -108,9 +108,11 @@ """ -class mappable(object): +class mappable(object): # pytype: disable=ignored-metaclass """Object which can be converted to a single template mapping""" + __metaclass__ = abc.ABCMeta + def itermaps(self, context): yield self.tomap(context)
--- a/mercurial/util.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/util.py Fri Nov 08 23:26:50 2019 +0900 @@ -1304,7 +1304,7 @@ """ -class transactional(object): +class transactional(object): # pytype: disable=ignored-metaclass """Base class for making a transactional type into a context manager.""" __metaclass__ = abc.ABCMeta @@ -3515,6 +3515,10 @@ def addpath(self, path): dirs = self._dirs for base in finddirs(path): + if base.endswith(b'/'): + raise ValueError( + "found invalid consecutive slashes in path: %r" % base + ) if base in dirs: dirs[base] += 1 return
--- a/mercurial/utils/compression.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/utils/compression.py Fri Nov 08 23:26:50 2019 +0900 @@ -646,7 +646,7 @@ # Not all installs have the zstd module available. So defer importing # until first access. try: - from .. import zstd + from .. import zstd # pytype: disable=import-error # Force delayed import. zstd.__version__
--- a/mercurial/utils/procutil.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/utils/procutil.py Fri Nov 08 23:26:50 2019 +0900 @@ -378,7 +378,9 @@ if pycompat.iswindows: def shelltonative(cmd, env): - return platform.shelltocmdexe(cmd, shellenviron(env)) + return platform.shelltocmdexe( # pytype: disable=module-attr + cmd, shellenviron(env) + ) tonativestr = encoding.strfromlocal else: @@ -545,7 +547,10 @@ # Following creation flags might create a console GUI window. # Using subprocess.CREATE_NEW_CONSOLE might helps. # See https://phab.mercurial-scm.org/D1701 for discussion - _creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP + _creationflags = ( + DETACHED_PROCESS + | subprocess.CREATE_NEW_PROCESS_GROUP # pytype: disable=module-attr + ) def runbgcommand( script, env, shell=False, stdout=None, stderr=None, ensurestart=True
--- a/mercurial/vfs.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/vfs.py Fri Nov 08 23:26:50 2019 +0900 @@ -52,11 +52,17 @@ def __init__(self, *args, **kwargs): '''Prevent instantiation; don't call this from subclasses.''' - raise NotImplementedError(b'attempted instantiating ' + str(type(self))) + raise NotImplementedError('attempted instantiating ' + str(type(self))) + + def __call__(self, path, mode=b'rb', **kwargs): + raise NotImplementedError def _auditpath(self, path, mode): raise NotImplementedError + def join(self, path, *insidef): + raise NotImplementedError + def tryread(self, path): '''gracefully return an empty string for missing files''' try: @@ -301,7 +307,10 @@ # Sharing backgroundfilecloser between threads is complex and using # multiple instances puts us at risk of running out of file descriptors # only allow to use backgroundfilecloser when in main thread. - if not isinstance(threading.currentThread(), threading._MainThread): + if not isinstance( + threading.currentThread(), + threading._MainThread, # pytype: disable=module-attr + ): yield return vfs = getattr(self, 'vfs', self) @@ -312,10 +321,14 @@ with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc: try: - vfs._backgroundfilecloser = bfc + vfs._backgroundfilecloser = ( + bfc # pytype: disable=attribute-error + ) yield bfc finally: - vfs._backgroundfilecloser = None + vfs._backgroundfilecloser = ( + None # pytype: disable=attribute-error + ) class vfs(abstractvfs): @@ -471,9 +484,12 @@ fp = checkambigatclosing(fp) if backgroundclose and isinstance( - threading.currentThread(), threading._MainThread + threading.currentThread(), + threading._MainThread, # pytype: disable=module-attr ): - if not self._backgroundfilecloser: + if ( + not self._backgroundfilecloser # pytype: disable=attribute-error + ): raise error.Abort( _( b'backgroundclose can only be used when a ' @@ -481,7 +497,10 @@ ) ) - fp = delayclosedfile(fp, self._backgroundfilecloser) + fp = delayclosedfile( + fp, + self._backgroundfilecloser, # pytype: disable=attribute-error + ) return fp @@ -589,10 +608,10 @@ return self def __exit__(self, exc_type, exc_value, exc_tb): - raise NotImplementedError(b'attempted instantiating ' + str(type(self))) + raise NotImplementedError('attempted instantiating ' + str(type(self))) def close(self): - raise NotImplementedError(b'attempted instantiating ' + str(type(self))) + raise NotImplementedError('attempted instantiating ' + str(type(self))) class delayclosedfile(closewrapbase): @@ -649,7 +668,7 @@ self._running = True for i in range(threadcount): - t = threading.Thread(target=self._worker, name=b'backgroundcloser') + t = threading.Thread(target=self._worker, name='backgroundcloser') self._threads.append(t) t.start()
--- a/mercurial/wireprototypes.py Wed Nov 06 16:54:34 2019 +0100 +++ b/mercurial/wireprototypes.py Fri Nov 08 23:26:50 2019 +0900 @@ -404,7 +404,7 @@ ) % config, hint=_(b'usable compression engines: %s') - % b', '.sorted(validnames), + % b', '.sorted(validnames), # pytype: disable=attribute-error ) return compengines
--- a/rust/hg-core/src/dirstate/status.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-core/src/dirstate/status.rs Fri Nov 08 23:26:50 2019 +0900 @@ -10,68 +10,14 @@ //! and will only be triggered in narrow cases. use crate::utils::files::HgMetadata; -use crate::utils::hg_path::{hg_path_to_path_buf, HgPath, HgPathBuf}; +use crate::utils::hg_path::{hg_path_to_path_buf, HgPathBuf}; use crate::{DirstateEntry, DirstateMap, EntryState}; use rayon::prelude::*; -use std::collections::HashMap; -use std::fs::Metadata; use std::path::Path; -/// Get stat data about the files explicitly specified by match. -/// TODO subrepos -fn walk_explicit( - files: &[impl AsRef<HgPath> + Sync], - dmap: &DirstateMap, - root_dir: impl AsRef<Path> + Sync, -) -> std::io::Result<HashMap<HgPathBuf, Option<HgMetadata>>> { - let mut results = HashMap::new(); - - // A tuple of the normalized filename and the `Result` of the call to - // `symlink_metadata` for separate handling. - type WalkTuple<'a> = (&'a HgPath, std::io::Result<Metadata>); - - let stats_res: std::io::Result<Vec<WalkTuple>> = files - .par_iter() - .map(|filename| { - // TODO normalization - let normalized = filename.as_ref(); - - let target_filename = - root_dir.as_ref().join(hg_path_to_path_buf(normalized)?); - - Ok((normalized, target_filename.symlink_metadata())) - }) - .collect(); - - for res in stats_res? { - match res { - (normalized, Ok(stat)) => { - if stat.is_file() { - results.insert( - normalized.to_owned(), - Some(HgMetadata::from_metadata(stat)), - ); - } else { - if dmap.contains_key(normalized) { - results.insert(normalized.to_owned(), None); - } - } - } - (normalized, Err(_)) => { - if dmap.contains_key(normalized) { - results.insert(normalized.to_owned(), None); - } - } - }; - } - - Ok(results) -} - // Stat all entries in the `DirstateMap` and return their new metadata. pub fn stat_dmap_entries( dmap: &DirstateMap, - results: &HashMap<HgPathBuf, Option<HgMetadata>>, root_dir: impl AsRef<Path> + Sync, ) -> std::io::Result<Vec<(HgPathBuf, Option<HgMetadata>)>> { dmap.par_iter() @@ -81,9 +27,6 @@ |(filename, _)| -> Option< std::io::Result<(HgPathBuf, Option<HgMetadata>)> > { - if results.contains_key(filename) { - return None; - } let meta = match hg_path_to_path_buf(filename) { Ok(p) => root_dir.as_ref().join(p).symlink_metadata(), Err(e) => return Some(Err(e.into())), @@ -132,7 +75,7 @@ list_clean: bool, last_normal_time: i64, check_exec: bool, - results: HashMap<HgPathBuf, Option<HgMetadata>>, + results: Vec<(HgPathBuf, Option<HgMetadata>)>, ) -> (Vec<HgPathBuf>, StatusResult) { let mut lookup = vec![]; let mut modified = vec![]; @@ -229,14 +172,11 @@ pub fn status( dmap: &DirstateMap, root_dir: impl AsRef<Path> + Sync + Copy, - files: &[impl AsRef<HgPath> + Sync], list_clean: bool, last_normal_time: i64, check_exec: bool, ) -> std::io::Result<(Vec<HgPathBuf>, StatusResult)> { - let mut results = walk_explicit(files, &dmap, root_dir)?; - - results.extend(stat_dmap_entries(&dmap, &results, root_dir)?); + let results = stat_dmap_entries(&dmap, root_dir)?; Ok(build_response( &dmap,
--- a/rust/hg-core/src/lib.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-core/src/lib.rs Fri Nov 08 23:26:50 2019 +0900 @@ -17,6 +17,7 @@ StateMap, StateMapIter, }; mod filepatterns; +pub mod matchers; pub mod utils; use crate::utils::hg_path::HgPathBuf;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/rust/hg-core/src/matchers.rs Fri Nov 08 23:26:50 2019 +0900 @@ -0,0 +1,105 @@ +// matchers.rs +// +// Copyright 2019 Raphaël Gomès <rgomes@octobus.net> +// +// This software may be used and distributed according to the terms of the +// GNU General Public License version 2 or any later version. + +//! Structs and types for matching files and directories. + +use crate::utils::hg_path::{HgPath, HgPathBuf}; +use std::collections::HashSet; + +pub enum VisitChildrenSet { + /// Don't visit anything + Empty, + /// Only visit this directory + This, + /// Visit this directory and these subdirectories + /// TODO Should we implement a `NonEmptyHashSet`? + Set(HashSet<HgPathBuf>), + /// Visit this directory and all subdirectories + Recursive, +} + +pub trait Matcher { + /// Explicitly listed files + fn file_set(&self) -> HashSet<&HgPath>; + /// Returns whether `filename` is in `file_set` + fn exact_match(&self, _filename: impl AsRef<HgPath>) -> bool { + false + } + /// Returns whether `filename` is matched by this matcher + fn matches(&self, _filename: impl AsRef<HgPath>) -> bool { + false + } + /// Decides whether a directory should be visited based on whether it + /// has potential matches in it or one of its subdirectories, and + /// potentially lists which subdirectories of that directory should be + /// visited. This is based on the match's primary, included, and excluded + /// patterns. + /// + /// # Example + /// + /// Assume matchers `['path:foo/bar', 'rootfilesin:qux']`, we would + /// return the following values (assuming the implementation of + /// visit_children_set is capable of recognizing this; some implementations + /// are not). + /// + /// ```ignore + /// '' -> {'foo', 'qux'} + /// 'baz' -> set() + /// 'foo' -> {'bar'} + /// // Ideally this would be `Recursive`, but since the prefix nature of + /// // matchers is applied to the entire matcher, we have to downgrade this + /// // to `This` due to the (yet to be implemented in Rust) non-prefix + /// // `RootFilesIn'-kind matcher being mixed in. + /// 'foo/bar' -> 'this' + /// 'qux' -> 'this' + /// ``` + /// # Important + /// + /// Most matchers do not know if they're representing files or + /// directories. They see `['path:dir/f']` and don't know whether `f` is a + /// file or a directory, so `visit_children_set('dir')` for most matchers + /// will return `HashSet{ HgPath { "f" } }`, but if the matcher knows it's + /// a file (like the yet to be implemented in Rust `ExactMatcher` does), + /// it may return `VisitChildrenSet::This`. + /// Do not rely on the return being a `HashSet` indicating that there are + /// no files in this dir to investigate (or equivalently that if there are + /// files to investigate in 'dir' that it will always return + /// `VisitChildrenSet::This`). + fn visit_children_set( + &self, + _directory: impl AsRef<HgPath>, + ) -> VisitChildrenSet { + VisitChildrenSet::This + } + /// Matcher will match everything and `files_set()` will be empty: + /// optimization might be possible. + fn matches_everything(&self) -> bool { + false + } + /// Matcher will match exactly the files in `files_set()`: optimization + /// might be possible. + fn is_exact(&self) -> bool { + false + } +} + +/// Matches everything. +#[derive(Debug)] +pub struct AlwaysMatcher; + +impl Matcher for AlwaysMatcher { + fn file_set(&self) -> HashSet<&HgPath> { + HashSet::new() + } + + fn visit_children_set( + &self, + _directory: impl AsRef<HgPath>, + ) -> VisitChildrenSet { + VisitChildrenSet::Recursive + } +}
--- a/rust/hg-cpython/src/dirstate.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-cpython/src/dirstate.rs Fri Nov 08 23:26:50 2019 +0900 @@ -17,8 +17,8 @@ dirs_multiset::Dirs, dirstate_map::DirstateMap, status::status_wrapper, }; use cpython::{ - exc, PyBytes, PyDict, PyErr, PyList, PyModule, PyObject, PyResult, - PySequence, Python, + exc, PyBytes, PyDict, PyErr, PyModule, PyObject, PyResult, PySequence, + Python, }; use hg::{ utils::hg_path::HgPathBuf, DirstateEntry, DirstateParseError, EntryState, @@ -116,7 +116,6 @@ status_wrapper( dmap: DirstateMap, root_dir: PyObject, - files: PyList, list_clean: bool, last_normal_time: i64, check_exec: bool
--- a/rust/hg-cpython/src/dirstate/copymap.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-cpython/src/dirstate/copymap.rs Fri Nov 08 23:26:50 2019 +0900 @@ -12,7 +12,7 @@ use std::cell::RefCell; use crate::dirstate::dirstate_map::DirstateMap; -use crate::ref_sharing::PyLeakedRef; +use crate::ref_sharing::PyLeaked; use hg::{utils::hg_path::HgPathBuf, CopyMapIter}; py_class!(pub class CopyMap |py| { @@ -104,14 +104,14 @@ py_shared_iterator!( CopyMapKeysIterator, - PyLeakedRef<CopyMapIter<'static>>, + PyLeaked<CopyMapIter<'static>>, CopyMap::translate_key, Option<PyBytes> ); py_shared_iterator!( CopyMapItemsIterator, - PyLeakedRef<CopyMapIter<'static>>, + PyLeaked<CopyMapIter<'static>>, CopyMap::translate_key_value, Option<(PyBytes, PyBytes)> );
--- a/rust/hg-cpython/src/dirstate/dirs_multiset.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-cpython/src/dirstate/dirs_multiset.rs Fri Nov 08 23:26:50 2019 +0900 @@ -17,7 +17,7 @@ }; use crate::dirstate::extract_dirstate; -use crate::ref_sharing::{PyLeakedRef, PySharedRefCell}; +use crate::ref_sharing::{PyLeaked, PySharedRefCell}; use hg::{ utils::hg_path::{HgPath, HgPathBuf}, DirsMultiset, DirsMultisetIter, DirstateMapError, DirstateParseError, @@ -92,7 +92,7 @@ }) } def __iter__(&self) -> PyResult<DirsMultisetKeysIterator> { - let leaked_ref = self.inner_shared(py).leak_immutable()?; + let leaked_ref = self.inner_shared(py).leak_immutable(); DirsMultisetKeysIterator::from_inner( py, unsafe { leaked_ref.map(py, |o| o.iter()) }, @@ -123,7 +123,7 @@ py_shared_iterator!( DirsMultisetKeysIterator, - PyLeakedRef<DirsMultisetIter<'static>>, + PyLeaked<DirsMultisetIter<'static>>, Dirs::translate_key, Option<PyBytes> );
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs Fri Nov 08 23:26:50 2019 +0900 @@ -20,7 +20,7 @@ use crate::{ dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator}, dirstate::{dirs_multiset::Dirs, make_dirstate_tuple}, - ref_sharing::{PyLeakedRef, PySharedRefCell}, + ref_sharing::{PyLeaked, PySharedRefCell}, }; use hg::{ utils::hg_path::{HgPath, HgPathBuf}, @@ -304,7 +304,7 @@ } def keys(&self) -> PyResult<DirstateMapKeysIterator> { - let leaked_ref = self.inner_shared(py).leak_immutable()?; + let leaked_ref = self.inner_shared(py).leak_immutable(); DirstateMapKeysIterator::from_inner( py, unsafe { leaked_ref.map(py, |o| o.iter()) }, @@ -312,7 +312,7 @@ } def items(&self) -> PyResult<DirstateMapItemsIterator> { - let leaked_ref = self.inner_shared(py).leak_immutable()?; + let leaked_ref = self.inner_shared(py).leak_immutable(); DirstateMapItemsIterator::from_inner( py, unsafe { leaked_ref.map(py, |o| o.iter()) }, @@ -320,7 +320,7 @@ } def __iter__(&self) -> PyResult<DirstateMapKeysIterator> { - let leaked_ref = self.inner_shared(py).leak_immutable()?; + let leaked_ref = self.inner_shared(py).leak_immutable(); DirstateMapKeysIterator::from_inner( py, unsafe { leaked_ref.map(py, |o| o.iter()) }, @@ -437,7 +437,7 @@ } def copymapiter(&self) -> PyResult<CopyMapKeysIterator> { - let leaked_ref = self.inner_shared(py).leak_immutable()?; + let leaked_ref = self.inner_shared(py).leak_immutable(); CopyMapKeysIterator::from_inner( py, unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) }, @@ -445,7 +445,7 @@ } def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> { - let leaked_ref = self.inner_shared(py).leak_immutable()?; + let leaked_ref = self.inner_shared(py).leak_immutable(); CopyMapItemsIterator::from_inner( py, unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) }, @@ -483,14 +483,14 @@ py_shared_iterator!( DirstateMapKeysIterator, - PyLeakedRef<StateMapIter<'static>>, + PyLeaked<StateMapIter<'static>>, DirstateMap::translate_key, Option<PyBytes> ); py_shared_iterator!( DirstateMapItemsIterator, - PyLeakedRef<StateMapIter<'static>>, + PyLeaked<StateMapIter<'static>>, DirstateMap::translate_key_value, Option<(PyBytes, PyObject)> );
--- a/rust/hg-cpython/src/dirstate/status.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-cpython/src/dirstate/status.rs Fri Nov 08 23:26:50 2019 +0900 @@ -6,7 +6,8 @@ // GNU General Public License version 2 or any later version. //! Bindings for the `hg::status` module provided by the -//! `hg-core` crate. From Python, this will be seen as `rustext.dirstate.status`. +//! `hg-core` crate. From Python, this will be seen as +//! `rustext.dirstate.status`. //! use crate::dirstate::DirstateMap; @@ -17,8 +18,8 @@ }; use hg::utils::files::get_path_from_bytes; +use hg::status; use hg::utils::hg_path::HgPath; -use hg::{status, utils::hg_path::HgPathBuf}; /// This will be useless once trait impls for collection are added to `PyBytes` /// upstream. @@ -43,7 +44,6 @@ py: Python, dmap: DirstateMap, root_dir: PyObject, - files: PyList, list_clean: bool, last_normal_time: i64, check_exec: bool, @@ -54,21 +54,9 @@ let dmap: DirstateMap = dmap.to_py_object(py); let dmap = dmap.get_inner(py); - let files: PyResult<Vec<HgPathBuf>> = files - .iter(py) - .map(|f| Ok(HgPathBuf::from_bytes(f.extract::<PyBytes>(py)?.data(py)))) - .collect(); - let files = files?; - - let (lookup, status_res) = status( - &dmap, - &root_dir, - &files, - list_clean, - last_normal_time, - check_exec, - ) - .map_err(|e| PyErr::new::<ValueError, _>(py, e.to_string()))?; + let (lookup, status_res) = + status(&dmap, &root_dir, list_clean, last_normal_time, check_exec) + .map_err(|e| PyErr::new::<ValueError, _>(py, e.to_string()))?; let modified = collect_pybytes_list(py, status_res.modified.as_ref()); let added = collect_pybytes_list(py, status_res.added.as_ref());
--- a/rust/hg-cpython/src/ref_sharing.rs Wed Nov 06 16:54:34 2019 +0100 +++ b/rust/hg-cpython/src/ref_sharing.rs Fri Nov 08 23:26:50 2019 +0900 @@ -23,53 +23,56 @@ //! Macros for use in the `hg-cpython` bridge library. use crate::exceptions::AlreadyBorrowed; -use cpython::{PyClone, PyObject, PyResult, Python}; -use std::cell::{Cell, Ref, RefCell, RefMut}; +use cpython::{exc, PyClone, PyErr, PyObject, PyResult, Python}; +use std::cell::{Ref, RefCell, RefMut}; +use std::ops::{Deref, DerefMut}; +use std::sync::atomic::{AtomicUsize, Ordering}; /// Manages the shared state between Python and Rust +/// +/// `PySharedState` is owned by `PySharedRefCell`, and is shared across its +/// derived references. The consistency of these references are guaranteed +/// as follows: +/// +/// - The immutability of `py_class!` object fields. Any mutation of +/// `PySharedRefCell` is allowed only through its `borrow_mut()`. +/// - The `py: Python<'_>` token, which makes sure that any data access is +/// synchronized by the GIL. +/// - The underlying `RefCell`, which prevents `PySharedRefCell` data from +/// being directly borrowed or leaked while it is mutably borrowed. +/// - The `borrow_count`, which is the number of references borrowed from +/// `PyLeaked`. Just like `RefCell`, mutation is prohibited while `PyLeaked` +/// is borrowed. +/// - The `generation` counter, which increments on `borrow_mut()`. `PyLeaked` +/// reference is valid only if the `current_generation()` equals to the +/// `generation` at the time of `leak_immutable()`. #[derive(Debug, Default)] struct PySharedState { - leak_count: Cell<usize>, - mutably_borrowed: Cell<bool>, + // The counter variable could be Cell<usize> since any operation on + // PySharedState is synchronized by the GIL, but being "atomic" makes + // PySharedState inherently Sync. The ordering requirement doesn't + // matter thanks to the GIL. + borrow_count: AtomicUsize, + generation: AtomicUsize, } -// &PySharedState can be Send because any access to inner cells is -// synchronized by the GIL. -unsafe impl Sync for PySharedState {} - impl PySharedState { fn borrow_mut<'a, T>( &'a self, py: Python<'a>, pyrefmut: RefMut<'a, T>, - ) -> PyResult<PyRefMut<'a, T>> { - if self.mutably_borrowed.get() { - return Err(AlreadyBorrowed::new( - py, - "Cannot borrow mutably while there exists another \ - mutable reference in a Python object", - )); - } - match self.leak_count.get() { + ) -> PyResult<RefMut<'a, T>> { + match self.current_borrow_count(py) { 0 => { - self.mutably_borrowed.replace(true); - Ok(PyRefMut::new(py, pyrefmut, self)) + // Note that this wraps around to the same value if mutably + // borrowed more than usize::MAX times, which wouldn't happen + // in practice. + self.generation.fetch_add(1, Ordering::Relaxed); + Ok(pyrefmut) } - // TODO - // For now, this works differently than Python references - // in the case of iterators. - // Python does not complain when the data an iterator - // points to is modified if the iterator is never used - // afterwards. - // Here, we are stricter than this by refusing to give a - // mutable reference if it is already borrowed. - // While the additional safety might be argued for, it - // breaks valid programming patterns in Python and we need - // to fix this issue down the line. _ => Err(AlreadyBorrowed::new( py, - "Cannot borrow mutably while there are \ - immutable references in Python objects", + "Cannot borrow mutably while immutably borrowed", )), } } @@ -84,41 +87,60 @@ /// extended. Do not call this function directly. unsafe fn leak_immutable<T>( &self, - py: Python, - data: &PySharedRefCell<T>, - ) -> PyResult<(&'static T, &'static PySharedState)> { - if self.mutably_borrowed.get() { - return Err(AlreadyBorrowed::new( - py, - "Cannot borrow immutably while there is a \ - mutable reference in Python objects", - )); - } - // TODO: it's weird that self is data.py_shared_state. Maybe we - // can move stuff to PySharedRefCell? - let ptr = data.as_ptr(); - let state_ptr: *const PySharedState = &data.py_shared_state; - self.leak_count.replace(self.leak_count.get() + 1); - Ok((&*ptr, &*state_ptr)) + _py: Python, + data: Ref<T>, + ) -> (&'static T, &'static PySharedState) { + let ptr: *const T = &*data; + let state_ptr: *const PySharedState = self; + (&*ptr, &*state_ptr) + } + + fn current_borrow_count(&self, _py: Python) -> usize { + self.borrow_count.load(Ordering::Relaxed) + } + + fn increase_borrow_count(&self, _py: Python) { + // Note that this wraps around if there are more than usize::MAX + // borrowed references, which shouldn't happen due to memory limit. + self.borrow_count.fetch_add(1, Ordering::Relaxed); + } + + fn decrease_borrow_count(&self, _py: Python) { + let prev_count = self.borrow_count.fetch_sub(1, Ordering::Relaxed); + assert!(prev_count > 0); } - /// # Safety - /// - /// It's up to you to make sure the reference is about to be deleted - /// when updating the leak count. - fn decrease_leak_count(&self, _py: Python, mutable: bool) { - if mutable { - assert_eq!(self.leak_count.get(), 0); - assert!(self.mutably_borrowed.get()); - self.mutably_borrowed.replace(false); - } else { - let count = self.leak_count.get(); - assert!(count > 0); - self.leak_count.replace(count - 1); + fn current_generation(&self, _py: Python) -> usize { + self.generation.load(Ordering::Relaxed) + } +} + +/// Helper to keep the borrow count updated while the shared object is +/// immutably borrowed without using the `RefCell` interface. +struct BorrowPyShared<'a> { + py: Python<'a>, + py_shared_state: &'a PySharedState, +} + +impl<'a> BorrowPyShared<'a> { + fn new( + py: Python<'a>, + py_shared_state: &'a PySharedState, + ) -> BorrowPyShared<'a> { + py_shared_state.increase_borrow_count(py); + BorrowPyShared { + py, + py_shared_state, } } } +impl Drop for BorrowPyShared<'_> { + fn drop(&mut self) { + self.py_shared_state.decrease_borrow_count(self.py); + } +} + /// `RefCell` wrapper to be safely used in conjunction with `PySharedState`. /// /// This object can be stored in a `py_class!` object as a data field. Any @@ -144,15 +166,11 @@ self.inner.borrow() } - fn as_ptr(&self) -> *mut T { - self.inner.as_ptr() - } - // TODO: maybe this should be named as try_borrow_mut(), and use // inner.try_borrow_mut(). The current implementation panics if // self.inner has been borrowed, but returns error if py_shared_state // refuses to borrow. - fn borrow_mut<'a>(&'a self, py: Python<'a>) -> PyResult<PyRefMut<'a, T>> { + fn borrow_mut<'a>(&'a self, py: Python<'a>) -> PyResult<RefMut<'a, T>> { self.py_shared_state.borrow_mut(py, self.inner.borrow_mut()) } } @@ -181,78 +199,31 @@ self.data.borrow(self.py) } - pub fn borrow_mut(&self) -> PyResult<PyRefMut<'a, T>> { + pub fn borrow_mut(&self) -> PyResult<RefMut<'a, T>> { self.data.borrow_mut(self.py) } /// Returns a leaked reference. - pub fn leak_immutable(&self) -> PyResult<PyLeakedRef<&'static T>> { + /// + /// # Panics + /// + /// Panics if this is mutably borrowed. + pub fn leak_immutable(&self) -> PyLeaked<&'static T> { let state = &self.data.py_shared_state; + // make sure self.data isn't mutably borrowed; otherwise the + // generation number can't be trusted. + let data_ref = self.borrow(); unsafe { let (static_ref, static_state_ref) = - state.leak_immutable(self.py, self.data)?; - Ok(PyLeakedRef::new( - self.py, - self.owner, - static_ref, - static_state_ref, - )) + state.leak_immutable(self.py, data_ref); + PyLeaked::new(self.py, self.owner, static_ref, static_state_ref) } } } -/// Holds a mutable reference to data shared between Python and Rust. -pub struct PyRefMut<'a, T> { - py: Python<'a>, - inner: RefMut<'a, T>, - py_shared_state: &'a PySharedState, -} - -impl<'a, T> PyRefMut<'a, T> { - // Must be constructed by PySharedState after checking its leak_count. - // Otherwise, drop() would incorrectly update the state. - fn new( - py: Python<'a>, - inner: RefMut<'a, T>, - py_shared_state: &'a PySharedState, - ) -> Self { - Self { - py, - inner, - py_shared_state, - } - } -} - -impl<'a, T> std::ops::Deref for PyRefMut<'a, T> { - type Target = RefMut<'a, T>; - - fn deref(&self) -> &Self::Target { - &self.inner - } -} -impl<'a, T> std::ops::DerefMut for PyRefMut<'a, T> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.inner - } -} - -impl<'a, T> Drop for PyRefMut<'a, T> { - fn drop(&mut self) { - self.py_shared_state.decrease_leak_count(self.py, true); - } -} - /// Allows a `py_class!` generated struct to share references to one of its /// data members with Python. /// -/// # Warning -/// -/// TODO allow Python container types: for now, integration with the garbage -/// collector does not extend to Rust structs holding references to Python -/// objects. Should the need surface, `__traverse__` and `__clear__` will -/// need to be written as per the `rust-cpython` docs on GC integration. -/// /// # Parameters /// /// * `$name` is the same identifier used in for `py_class!` macro call. @@ -307,16 +278,22 @@ } /// Manage immutable references to `PyObject` leaked into Python iterators. -pub struct PyLeakedRef<T> { +/// +/// This reference will be invalidated once the original value is mutably +/// borrowed. +pub struct PyLeaked<T> { inner: PyObject, data: Option<T>, py_shared_state: &'static PySharedState, + /// Generation counter of data `T` captured when PyLeaked is created. + generation: usize, } -// DO NOT implement Deref for PyLeakedRef<T>! Dereferencing PyLeakedRef -// without taking Python GIL wouldn't be safe. +// DO NOT implement Deref for PyLeaked<T>! Dereferencing PyLeaked +// without taking Python GIL wouldn't be safe. Also, the underling reference +// is invalid if generation != py_shared_state.generation. -impl<T> PyLeakedRef<T> { +impl<T> PyLeaked<T> { /// # Safety /// /// The `py_shared_state` must be owned by the `inner` Python object. @@ -330,20 +307,39 @@ inner: inner.clone_ref(py), data: Some(data), py_shared_state, + generation: py_shared_state.current_generation(py), } } - /// Returns an immutable reference to the inner value. - pub fn get_ref<'a>(&'a self, _py: Python<'a>) -> &'a T { - self.data.as_ref().unwrap() + /// Immutably borrows the wrapped value. + /// + /// Borrowing fails if the underlying reference has been invalidated. + pub fn try_borrow<'a>( + &'a self, + py: Python<'a>, + ) -> PyResult<PyLeakedRef<'a, T>> { + self.validate_generation(py)?; + Ok(PyLeakedRef { + _borrow: BorrowPyShared::new(py, self.py_shared_state), + data: self.data.as_ref().unwrap(), + }) } - /// Returns a mutable reference to the inner value. + /// Mutably borrows the wrapped value. + /// + /// Borrowing fails if the underlying reference has been invalidated. /// /// Typically `T` is an iterator. If `T` is an immutable reference, /// `get_mut()` is useless since the inner value can't be mutated. - pub fn get_mut<'a>(&'a mut self, _py: Python<'a>) -> &'a mut T { - self.data.as_mut().unwrap() + pub fn try_borrow_mut<'a>( + &'a mut self, + py: Python<'a>, + ) -> PyResult<PyLeakedRefMut<'a, T>> { + self.validate_generation(py)?; + Ok(PyLeakedRefMut { + _borrow: BorrowPyShared::new(py, self.py_shared_state), + data: self.data.as_mut().unwrap(), + }) } /// Converts the inner value by the given function. @@ -351,41 +347,85 @@ /// Typically `T` is a static reference to a container, and `U` is an /// iterator of that container. /// + /// # Panics + /// + /// Panics if the underlying reference has been invalidated. + /// + /// This is typically called immediately after the `PyLeaked` is obtained. + /// In which case, the reference must be valid and no panic would occur. + /// /// # Safety /// /// The lifetime of the object passed in to the function `f` is cheated. /// It's typically a static reference, but is valid only while the - /// corresponding `PyLeakedRef` is alive. Do not copy it out of the + /// corresponding `PyLeaked` is alive. Do not copy it out of the /// function call. pub unsafe fn map<U>( mut self, py: Python, f: impl FnOnce(T) -> U, - ) -> PyLeakedRef<U> { + ) -> PyLeaked<U> { + // Needs to test the generation value to make sure self.data reference + // is still intact. + self.validate_generation(py) + .expect("map() over invalidated leaked reference"); + // f() could make the self.data outlive. That's why map() is unsafe. // In order to make this function safe, maybe we'll need a way to // temporarily restrict the lifetime of self.data and translate the // returned object back to Something<'static>. let new_data = f(self.data.take().unwrap()); - PyLeakedRef { + PyLeaked { inner: self.inner.clone_ref(py), data: Some(new_data), py_shared_state: self.py_shared_state, + generation: self.generation, + } + } + + fn validate_generation(&self, py: Python) -> PyResult<()> { + if self.py_shared_state.current_generation(py) == self.generation { + Ok(()) + } else { + Err(PyErr::new::<exc::RuntimeError, _>( + py, + "Cannot access to leaked reference after mutation", + )) } } } -impl<T> Drop for PyLeakedRef<T> { - fn drop(&mut self) { - // py_shared_state should be alive since we do have - // a Python reference to the owner object. Taking GIL makes - // sure that the state is only accessed by this thread. - let gil = Python::acquire_gil(); - let py = gil.python(); - if self.data.is_none() { - return; // moved to another PyLeakedRef - } - self.py_shared_state.decrease_leak_count(py, false); +/// Immutably borrowed reference to a leaked value. +pub struct PyLeakedRef<'a, T> { + _borrow: BorrowPyShared<'a>, + data: &'a T, +} + +impl<T> Deref for PyLeakedRef<'_, T> { + type Target = T; + + fn deref(&self) -> &T { + self.data + } +} + +/// Mutably borrowed reference to a leaked value. +pub struct PyLeakedRefMut<'a, T> { + _borrow: BorrowPyShared<'a>, + data: &'a mut T, +} + +impl<T> Deref for PyLeakedRefMut<'_, T> { + type Target = T; + + fn deref(&self) -> &T { + self.data + } +} + +impl<T> DerefMut for PyLeakedRefMut<'_, T> { + fn deref_mut(&mut self) -> &mut T { + self.data } } @@ -414,7 +454,7 @@ /// data inner: PySharedRefCell<MyStruct>; /// /// def __iter__(&self) -> PyResult<MyTypeItemsIterator> { -/// let leaked_ref = self.inner_shared(py).leak_immutable()?; +/// let leaked_ref = self.inner_shared(py).leak_immutable(); /// MyTypeItemsIterator::from_inner( /// py, /// unsafe { leaked_ref.map(py, |o| o.iter()) }, @@ -439,7 +479,7 @@ /// /// py_shared_iterator!( /// MyTypeItemsIterator, -/// PyLeakedRef<HashMap<'static, Vec<u8>, Vec<u8>>>, +/// PyLeaked<HashMap<'static, Vec<u8>, Vec<u8>>>, /// MyType::translate_key_value, /// Option<(PyBytes, PyBytes)> /// ); @@ -452,23 +492,14 @@ $success_type: ty ) => { py_class!(pub class $name |py| { - data inner: RefCell<Option<$leaked>>; + data inner: RefCell<$leaked>; def __next__(&self) -> PyResult<$success_type> { - let mut inner_opt = self.inner(py).borrow_mut(); - if let Some(leaked) = inner_opt.as_mut() { - match leaked.get_mut(py).next() { - None => { - // replace Some(inner) by None, drop $leaked - inner_opt.take(); - Ok(None) - } - Some(res) => { - $success_func(py, res) - } - } - } else { - Ok(None) + let mut leaked = self.inner(py).borrow_mut(); + let mut iter = leaked.try_borrow_mut(py)?; + match iter.next() { + None => Ok(None), + Some(res) => $success_func(py, res), } } @@ -484,7 +515,7 @@ ) -> PyResult<Self> { Self::create_instance( py, - RefCell::new(Some(leaked)), + RefCell::new(leaked), ) } } @@ -512,12 +543,94 @@ } #[test] - fn test_borrow_mut_while_leaked() { + fn test_leaked_borrow() { + let (gil, owner) = prepare_env(); + let py = gil.python(); + let leaked = owner.string_shared(py).leak_immutable(); + let leaked_ref = leaked.try_borrow(py).unwrap(); + assert_eq!(*leaked_ref, "new"); + } + + #[test] + fn test_leaked_borrow_mut() { + let (gil, owner) = prepare_env(); + let py = gil.python(); + let leaked = owner.string_shared(py).leak_immutable(); + let mut leaked_iter = unsafe { leaked.map(py, |s| s.chars()) }; + let mut leaked_ref = leaked_iter.try_borrow_mut(py).unwrap(); + assert_eq!(leaked_ref.next(), Some('n')); + assert_eq!(leaked_ref.next(), Some('e')); + assert_eq!(leaked_ref.next(), Some('w')); + assert_eq!(leaked_ref.next(), None); + } + + #[test] + fn test_leaked_borrow_after_mut() { + let (gil, owner) = prepare_env(); + let py = gil.python(); + let leaked = owner.string_shared(py).leak_immutable(); + owner.string_shared(py).borrow_mut().unwrap().clear(); + assert!(leaked.try_borrow(py).is_err()); + } + + #[test] + fn test_leaked_borrow_mut_after_mut() { + let (gil, owner) = prepare_env(); + let py = gil.python(); + let leaked = owner.string_shared(py).leak_immutable(); + let mut leaked_iter = unsafe { leaked.map(py, |s| s.chars()) }; + owner.string_shared(py).borrow_mut().unwrap().clear(); + assert!(leaked_iter.try_borrow_mut(py).is_err()); + } + + #[test] + #[should_panic(expected = "map() over invalidated leaked reference")] + fn test_leaked_map_after_mut() { + let (gil, owner) = prepare_env(); + let py = gil.python(); + let leaked = owner.string_shared(py).leak_immutable(); + owner.string_shared(py).borrow_mut().unwrap().clear(); + let _leaked_iter = unsafe { leaked.map(py, |s| s.chars()) }; + } + + #[test] + fn test_borrow_mut_while_leaked_ref() { let (gil, owner) = prepare_env(); let py = gil.python(); assert!(owner.string_shared(py).borrow_mut().is_ok()); - let _leaked = owner.string_shared(py).leak_immutable().unwrap(); - // TODO: will be allowed - assert!(owner.string_shared(py).borrow_mut().is_err()); + let leaked = owner.string_shared(py).leak_immutable(); + { + let _leaked_ref = leaked.try_borrow(py).unwrap(); + assert!(owner.string_shared(py).borrow_mut().is_err()); + { + let _leaked_ref2 = leaked.try_borrow(py).unwrap(); + assert!(owner.string_shared(py).borrow_mut().is_err()); + } + assert!(owner.string_shared(py).borrow_mut().is_err()); + } + assert!(owner.string_shared(py).borrow_mut().is_ok()); + } + + #[test] + fn test_borrow_mut_while_leaked_ref_mut() { + let (gil, owner) = prepare_env(); + let py = gil.python(); + assert!(owner.string_shared(py).borrow_mut().is_ok()); + let leaked = owner.string_shared(py).leak_immutable(); + let mut leaked_iter = unsafe { leaked.map(py, |s| s.chars()) }; + { + let _leaked_ref = leaked_iter.try_borrow_mut(py).unwrap(); + assert!(owner.string_shared(py).borrow_mut().is_err()); + } + assert!(owner.string_shared(py).borrow_mut().is_ok()); + } + + #[test] + #[should_panic(expected = "mutably borrowed")] + fn test_leak_while_borrow_mut() { + let (gil, owner) = prepare_env(); + let py = gil.python(); + let _mut_ref = owner.string_shared(py).borrow_mut(); + owner.string_shared(py).leak_immutable(); } }
--- a/tests/hghave.py Wed Nov 06 16:54:34 2019 +0100 +++ b/tests/hghave.py Fri Nov 08 23:26:50 2019 +0900 @@ -431,7 +431,8 @@ @check("symlink", "symbolic links") def has_symlink(): - if getattr(os, "symlink", None) is None: + # mercurial.windows.checklink() is a hard 'no' at the moment + if os.name == 'nt' or getattr(os, "symlink", None) is None: return False name = tempfile.mktemp(dir='.', prefix=tempprefix) try: @@ -999,3 +1000,11 @@ version = matchoutput(blackcmd, version_regex) sv = distutils.version.StrictVersion return version and sv(_strpath(version.group(1))) >= sv('19.10b0') + + +@check('pytype', 'the pytype type checker') +def has_pytype(): + pytypecmd = 'pytype --version' + version = matchoutput(pytypecmd, b'[0-9a-b.]+') + sv = distutils.version.StrictVersion + return version and sv(_strpath(version.group(0))) >= sv('2019.10.17')
--- a/tests/test-byteify-strings.t Wed Nov 06 16:54:34 2019 +0100 +++ b/tests/test-byteify-strings.t Fri Nov 08 23:26:50 2019 +0900 @@ -1,7 +1,7 @@ #require py37 $ byteify_strings () { - > $PYTHON "$TESTDIR/../contrib/byteify-strings.py" "$@" + > "$PYTHON" "$TESTDIR/../contrib/byteify-strings.py" "$@" > } Test version
--- a/tests/test-censor.t Wed Nov 06 16:54:34 2019 +0100 +++ b/tests/test-censor.t Fri Nov 08 23:26:50 2019 +0900 @@ -442,6 +442,33 @@ checking files checked 14 changesets with 15 changes to 2 files +Grepping only warns, doesn't error out + + $ cd ../rpull + $ hg grep 'Normal file' + bystander:Normal file v2 + $ hg grep nothing + target:Re-sanitized; nothing to see here + $ hg grep --diff 'Normal file' + cannot search in censored file: target:7 + cannot search in censored file: target:10 + cannot search in censored file: target:12 + bystander:6:-:Normal file v2 + cannot search in censored file: target:1 + cannot search in censored file: target:2 + cannot search in censored file: target:3 + bystander:2:-:Normal file here + bystander:2:+:Normal file v2 + bystander:0:+:Normal file here + $ hg grep --diff nothing + cannot search in censored file: target:7 + cannot search in censored file: target:10 + cannot search in censored file: target:12 + target:13:+:Re-sanitized; nothing to see here + cannot search in censored file: target:1 + cannot search in censored file: target:2 + cannot search in censored file: target:3 + Censored nodes can be imported on top of censored nodes, consecutively $ hg init ../rimport
--- a/tests/test-contrib-perf.t Wed Nov 06 16:54:34 2019 +0100 +++ b/tests/test-contrib-perf.t Fri Nov 08 23:26:50 2019 +0900 @@ -248,6 +248,7 @@ $ hg perfrevset 'all()' $ hg perfstartup $ hg perfstatus + $ hg perfstatus --dirstate $ hg perftags $ hg perftemplating $ hg perfvolatilesets
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-dirs.py Fri Nov 08 23:26:50 2019 +0900 @@ -0,0 +1,27 @@ +from __future__ import absolute_import + +import unittest + +import silenttestrunner + +from mercurial import util + + +class dirstests(unittest.TestCase): + def testdirs(self): + for case, want in [ + (b'a/a/a', [b'a', b'a/a', b'']), + (b'alpha/beta/gamma', [b'', b'alpha', b'alpha/beta']), + ]: + d = util.dirs({}) + d.addpath(case) + self.assertEqual(sorted(d), sorted(want)) + + def testinvalid(self): + with self.assertRaises(ValueError): + d = util.dirs({}) + d.addpath(b'a//b') + + +if __name__ == '__main__': + silenttestrunner.main(__name__)
--- a/tests/test-fileset.t Wed Nov 06 16:54:34 2019 +0100 +++ b/tests/test-fileset.t Fri Nov 08 23:26:50 2019 +0900 @@ -853,7 +853,7 @@ M b2 A 1k A 2k - A b2link (no-windows !) + A b2link (symlink !) A bin A c1 A con.xml (no-windows !) @@ -864,7 +864,7 @@ M b2 A 1k A 2k - A b2link (no-windows !) + A b2link (symlink !) A bin A c1 A con.xml (no-windows !) @@ -997,7 +997,7 @@ A .hgsubstate A 1k A 2k - A b2link (no-windows !) + A b2link (symlink !) A bin A c1 A con.xml (no-windows !) @@ -1006,7 +1006,7 @@ .hgsubstate 1k 2k - b2link (no-windows !) + b2link (symlink !) bin c1 con.xml (no-windows !)