Mercurial > hg-stable
changeset 44470:9d2b2df2c2ba
cleanup: run pyupgrade on our source tree to clean up varying things
Built with:
hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**" - hgext/fsmonitor/pywatchman/**' | xargs pyupgrade --keep-percent-format --keep-extraneous-parens
and then blackened. pyupgrade comes from
https://github.com/asottile/pyupgrade with a patch to let me preserve
extraneous parens (which we use for marking strings that shouldn't be
translated), and lets us clean up a bunch of idioms that have cruftily
accumulated over the years.
# skip-blame no-op automated code cleanups
Differential Revision: https://phab.mercurial-scm.org/D8255
line wrap: on
line diff
--- a/contrib/benchmarks/__init__.py Fri Mar 06 10:52:44 2020 +0100 +++ b/contrib/benchmarks/__init__.py Fri Mar 06 13:27:41 2020 -0500 @@ -81,7 +81,7 @@ output = ui.popbuffer() match = outputre.search(output) if not match: - raise ValueError("Invalid output {0}".format(output)) + raise ValueError("Invalid output {}".format(output)) return float(match.group(1))
--- a/contrib/check-py3-compat.py Fri Mar 06 10:52:44 2020 +0100 +++ b/contrib/check-py3-compat.py Fri Mar 06 13:27:41 2020 -0500 @@ -32,7 +32,7 @@ for node in ast.walk(root): if isinstance(node, ast.ImportFrom): if node.module == '__future__': - futures |= set(n.name for n in node.names) + futures |= {n.name for n in node.names} elif isinstance(node, ast.Print): haveprint = True
--- a/contrib/perf.py Fri Mar 06 10:52:44 2020 +0100 +++ b/contrib/perf.py Fri Mar 06 13:27:41 2020 -0500 @@ -2523,7 +2523,7 @@ } for diffopt in ('', 'w', 'b', 'B', 'wB'): - opts = dict((options[c], b'1') for c in diffopt) + opts = {options[c]: b'1' for c in diffopt} def d(): ui.pushbuffer() @@ -3048,7 +3048,7 @@ # Verify engines argument. if engines: - engines = set(e.strip() for e in engines.split(b',')) + engines = {e.strip() for e in engines.split(b',')} for engine in engines: try: util.compressionengines[engine]
--- a/hgext/absorb.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/absorb.py Fri Mar 06 13:27:41 2020 -0500 @@ -407,7 +407,7 @@ involved = [ annotated[i] for i in nearbylinenums if annotated[i][0] != 1 ] - involvedrevs = list(set(r for r, l in involved)) + involvedrevs = list({r for r, l in involved}) newfixups = [] if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True): # chunk belongs to a single revision @@ -734,10 +734,10 @@ @property def chunkstats(self): """-> {path: chunkstats}. collect chunkstats from filefixupstates""" - return dict( - (path, state.chunkstats) + return { + path: state.chunkstats for path, state in pycompat.iteritems(self.fixupmap) - ) + } def commit(self): """commit changes. update self.finalnode, self.replacemap"""
--- a/hgext/closehead.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/closehead.py Fri Mar 06 13:27:41 2020 -0500 @@ -76,7 +76,7 @@ heads = [] for branch in repo.branchmap(): heads.extend(repo.branchheads(branch)) - heads = set(repo[h].rev() for h in heads) + heads = {repo[h].rev() for h in heads} for rev in revs: if rev not in heads: raise error.Abort(_(b'revision is not an open head: %d') % rev)
--- a/hgext/convert/hg.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/convert/hg.py Fri Mar 06 13:27:41 2020 -0500 @@ -677,13 +677,9 @@ for t in self.repo.tagslist() if self.repo.tagtype(t[0]) == b'global' ] - return dict( - [ - (name, nodemod.hex(node)) - for name, node in tags - if self.keep(node) - ] - ) + return { + name: nodemod.hex(node) for name, node in tags if self.keep(node) + } def getchangedfiles(self, rev, i): ctx = self._changectx(rev)
--- a/hgext/convert/subversion.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/convert/subversion.py Fri Mar 06 13:27:41 2020 -0500 @@ -710,11 +710,11 @@ # Here/tags/tag.1 discarded as well as its children. # It happens with tools like cvs2svn. Such tags cannot # be represented in mercurial. - addeds = dict( - (p, e.copyfrom_path) + addeds = { + p: e.copyfrom_path for p, e in pycompat.iteritems(origpaths) if e.action == b'A' and e.copyfrom_path - ) + } badroots = set() for destroot in addeds: for source, sourcerev, dest in pendings:
--- a/hgext/eol.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/eol.py Fri Mar 06 13:27:41 2020 -0500 @@ -221,7 +221,7 @@ self.match = match.match(root, b'', [], include, exclude) def copytoui(self, ui): - newpatterns = set(pattern for pattern, key, m in self.patterns) + newpatterns = {pattern for pattern, key, m in self.patterns} for section in (b'decode', b'encode'): for oldpattern, _filter in ui.configitems(section): if oldpattern not in newpatterns:
--- a/hgext/fastannotate/commands.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/fastannotate/commands.py Fri Mar 06 13:27:41 2020 -0500 @@ -233,7 +233,7 @@ showlines=(showlines and not showdeleted), ) if showdeleted: - existinglines = set((l[0], l[1]) for l in result) + existinglines = {(l[0], l[1]) for l in result} result = a.annotatealllines( rev, showpath=showpath, showlines=showlines )
--- a/hgext/fsmonitor/__init__.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/fsmonitor/__init__.py Fri Mar 06 13:27:41 2020 -0500 @@ -397,7 +397,7 @@ # for file paths which require normalization and we encounter a case # collision, we store our own foldmap if normalize: - foldmap = dict((normcase(k), k) for k in results) + foldmap = {normcase(k): k for k in results} switch_slashes = pycompat.ossep == b'\\' # The order of the results is, strictly speaking, undefined. @@ -459,22 +459,16 @@ if normalize: # any notable files that have changed case will already be handled # above, so just check membership in the foldmap - notefiles = set( - ( - normalize(f, True, True) - for f in notefiles - if normcase(f) not in foldmap - ) - ) - visit = set( - ( - f + notefiles = { + normalize(f, True, True) for f in notefiles - if ( - f not in results and matchfn(f) and (f in dmap or not ignore(f)) - ) - ) - ) + if normcase(f) not in foldmap + } + visit = { + f + for f in notefiles + if (f not in results and matchfn(f) and (f in dmap or not ignore(f))) + } if not fresh_instance: if matchalways:
--- a/hgext/histedit.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/histedit.py Fri Mar 06 13:27:41 2020 -0500 @@ -835,10 +835,10 @@ return ctx, [(self.node, (parentctxnode,))] parentctx = repo[parentctxnode] - newcommits = set( + newcommits = { c.node() for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev()) - ) + } if not newcommits: repo.ui.warn( _( @@ -2412,7 +2412,7 @@ Will abort if there are to many or too few rules, a malformed rule, or a rule on a changeset outside of the user-given range. """ - expected = set(c.node() for c in ctxs) + expected = {c.node() for c in ctxs} seen = set() prev = None
--- a/hgext/largefiles/basestore.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/largefiles/basestore.py Fri Mar 06 13:27:41 2020 -0500 @@ -67,7 +67,7 @@ ui = self.ui at = 0 - available = self.exists(set(hash for (_filename, hash) in files)) + available = self.exists({hash for (_filename, hash) in files}) with ui.makeprogress( _(b'getting largefiles'), unit=_(b'files'), total=len(files) ) as progress:
--- a/hgext/largefiles/overrides.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/largefiles/overrides.py Fri Mar 06 13:27:41 2020 -0500 @@ -1564,11 +1564,11 @@ def overriderollback(orig, ui, repo, **opts): with repo.wlock(): before = repo.dirstate.parents() - orphans = set( + orphans = { f for f in repo.dirstate if lfutil.isstandin(f) and repo.dirstate[f] != b'r' - ) + } result = orig(ui, repo, **opts) after = repo.dirstate.parents() if before == after:
--- a/hgext/largefiles/remotestore.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/largefiles/remotestore.py Fri Mar 06 13:27:41 2020 -0500 @@ -48,12 +48,12 @@ ) def exists(self, hashes): - return dict( - (h, s == 0) + return { + h: s == 0 for (h, s) in pycompat.iteritems( self._stat(hashes) ) # dict-from-generator - ) + } def sendfile(self, filename, hash): self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash))
--- a/hgext/mq.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/mq.py Fri Mar 06 13:27:41 2020 -0500 @@ -1162,7 +1162,7 @@ if unknown: if numrevs: - rev = dict((entry.name, entry.node) for entry in qfinished) + rev = {entry.name: entry.node for entry in qfinished} for p in unknown: msg = _(b'revision %s refers to unknown patches: %s\n') self.ui.warn(msg % (short(rev[p]), p)) @@ -3361,7 +3361,7 @@ ui.write(b'\n') q = repo.mq - applied = set(p.name for p in q.applied) + applied = {p.name for p in q.applied} patch = None args = list(args) if opts.get('list'):
--- a/hgext/phabricator.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/phabricator.py Fri Mar 06 13:27:41 2020 -0500 @@ -483,7 +483,7 @@ ] # "precursors" as known by Phabricator - phprecset = set(getnode(d) for d in diffs) + phprecset = {getnode(d) for d in diffs} # Ignore if precursors (Phabricator and local repo) do not overlap, # and force is not set (when commit message says nothing) @@ -1062,7 +1062,7 @@ # username not found is not an error of the API. So check if we have missed # some names here. data = result[b'data'] - resolved = set(entry[b'fields'][b'username'].lower() for entry in data) + resolved = {entry[b'fields'][b'username'].lower() for entry in data} unresolved = set(names) - resolved if unresolved: raise error.Abort( @@ -1635,7 +1635,7 @@ "differential.query". """ # Prefetch hg:meta property for all diffs - diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs)) + diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs}) diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids}) patches = [] @@ -1792,11 +1792,11 @@ """Phabricator differiential status""" revs = repo.revs('sort(_underway(), topo)') drevmap = getdrevmap(repo, revs) - unknownrevs, drevids, revsbydrevid = [], set([]), {} + unknownrevs, drevids, revsbydrevid = [], set(), {} for rev, drevid in pycompat.iteritems(drevmap): if drevid is not None: drevids.add(drevid) - revsbydrevid.setdefault(drevid, set([])).add(rev) + revsbydrevid.setdefault(drevid, set()).add(rev) else: unknownrevs.append(rev)
--- a/hgext/rebase.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/rebase.py Fri Mar 06 13:27:41 2020 -0500 @@ -1936,7 +1936,7 @@ # applied patch. But it prevents messing up the working directory when # a partially completed rebase is blocked by mq. if b'qtip' in repo.tags(): - mqapplied = set(repo[s.node].rev() for s in repo.mq.applied) + mqapplied = {repo[s.node].rev() for s in repo.mq.applied} if set(destmap.values()) & mqapplied: raise error.Abort(_(b'cannot rebase onto an applied mq patch')) @@ -2121,7 +2121,7 @@ def _filterobsoleterevs(repo, revs): """returns a set of the obsolete revisions in revs""" - return set(r for r in revs if repo[r].obsolete()) + return {r for r in revs if repo[r].obsolete()} def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
--- a/hgext/remotefilelog/__init__.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/remotefilelog/__init__.py Fri Mar 06 13:27:41 2020 -0500 @@ -737,7 +737,7 @@ # "link" is actually wrong here (it is set to len(changelog)) # if changelog remains unchanged, skip writing file revisions # but still do a sanity check about pending multiple revisions - if len(set(x[3] for x in pendingfilecommits)) > 1: + if len({x[3] for x in pendingfilecommits}) > 1: raise error.ProgrammingError( b'pending multiple integer revisions are not supported' )
--- a/hgext/remotefilelog/basepack.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/remotefilelog/basepack.py Fri Mar 06 13:27:41 2020 -0500 @@ -101,7 +101,7 @@ self._lastpack = pack yield pack - cachedpacks = set(pack for pack in self._lrucache) + cachedpacks = {pack for pack in self._lrucache} # Yield for paths not in the cache. for pack in self._packs - cachedpacks: self._lastpack = pack @@ -259,7 +259,7 @@ newpacks = [] if now > self.lastrefresh + REFRESHRATE: self.lastrefresh = now - previous = set(p.path for p in self.packs) + previous = {p.path for p in self.packs} for filepath, __, __ in self._getavailablepackfilessorted(): if filepath not in previous: newpack = self.getpack(filepath)
--- a/hgext/remotefilelog/contentstore.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/remotefilelog/contentstore.py Fri Mar 06 13:27:41 2020 -0500 @@ -300,7 +300,7 @@ rl = self._revlog(name) ancestors = {} - missing = set((node,)) + missing = {node} for ancrev in rl.ancestors([rl.rev(node)], inclusive=True): ancnode = rl.node(ancrev) missing.discard(ancnode)
--- a/hgext/remotefilelog/datapack.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/remotefilelog/datapack.py Fri Mar 06 13:27:41 2020 -0500 @@ -271,9 +271,9 @@ def cleanup(self, ledger): entries = ledger.sources.get(self, []) allkeys = set(self) - repackedkeys = set( + repackedkeys = { (e.filename, e.node) for e in entries if e.datarepacked or e.gced - ) + } if len(allkeys - repackedkeys) == 0: if self.path not in ledger.created:
--- a/hgext/remotefilelog/historypack.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/remotefilelog/historypack.py Fri Mar 06 13:27:41 2020 -0500 @@ -132,7 +132,7 @@ known = set() section = self._findsection(name) filename, offset, size, nodeindexoffset, nodeindexsize = section - pending = set((node,)) + pending = {node} o = 0 while o < size: if not pending: @@ -291,9 +291,9 @@ def cleanup(self, ledger): entries = ledger.sources.get(self, []) allkeys = set(self) - repackedkeys = set( + repackedkeys = { (e.filename, e.node) for e in entries if e.historyrepacked - ) + } if len(allkeys - repackedkeys) == 0: if self.path not in ledger.created: @@ -452,7 +452,7 @@ sectionstart = self.packfp.tell() # Write the file section content - entrymap = dict((e[0], e) for e in entries) + entrymap = {e[0]: e for e in entries} def parentfunc(node): x, p1, p2, x, x, x = entrymap[node]
--- a/hgext/remotefilelog/remotefilelog.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/remotefilelog/remotefilelog.py Fri Mar 06 13:27:41 2020 -0500 @@ -429,7 +429,7 @@ return nullid revmap, parentfunc = self._buildrevgraph(a, b) - nodemap = dict(((v, k) for (k, v) in pycompat.iteritems(revmap))) + nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b]) if ancs: @@ -444,7 +444,7 @@ return nullid revmap, parentfunc = self._buildrevgraph(a, b) - nodemap = dict(((v, k) for (k, v) in pycompat.iteritems(revmap))) + nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b]) return map(nodemap.__getitem__, ancs)
--- a/hgext/remotefilelog/repack.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/remotefilelog/repack.py Fri Mar 06 13:27:41 2020 -0500 @@ -321,7 +321,7 @@ def _allpackfileswithsuffix(files, packsuffix, indexsuffix): result = [] - fileset = set(fn for fn, mode, stat in files) + fileset = {fn for fn, mode, stat in files} for filename, mode, stat in files: if not filename.endswith(packsuffix): continue
--- a/hgext/schemes.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/schemes.py Fri Mar 06 13:27:41 2020 -0500 @@ -97,7 +97,7 @@ parts = parts[:-1] else: tail = b'' - context = dict((b'%d' % (i + 1), v) for i, v in enumerate(parts)) + context = {b'%d' % (i + 1): v for i, v in enumerate(parts)} return b''.join(self.templater.process(self.url, context)) + tail
--- a/hgext/sparse.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/sparse.py Fri Mar 06 13:27:41 2020 -0500 @@ -246,7 +246,7 @@ if changedfiles is not None: # In _rebuild, these files will be deleted from the dirstate # when they are not found to be in allfiles - dirstatefilestoremove = set(f for f in self if not matcher(f)) + dirstatefilestoremove = {f for f in self if not matcher(f)} changedfiles = dirstatefilestoremove.union(changedfiles) return orig(self, parent, allfiles, changedfiles)
--- a/hgext/strip.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/strip.py Fri Mar 06 13:27:41 2020 -0500 @@ -228,7 +228,7 @@ for p in repo.dirstate.parents() ) - rootnodes = set(cl.node(r) for r in roots) + rootnodes = {cl.node(r) for r in roots} q = getattr(repo, 'mq', None) if q is not None and q.applied:
--- a/hgext/transplant.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/transplant.py Fri Mar 06 13:27:41 2020 -0500 @@ -840,10 +840,10 @@ tf = tp.transplantfilter(repo, source, p1) if opts.get(b'prune'): - prune = set( + prune = { source[r].node() for r in scmutil.revrange(source, opts.get(b'prune')) - ) + } matchfn = lambda x: tf(x) and x not in prune else: matchfn = tf
--- a/hgext/uncommit.py Fri Mar 06 10:52:44 2020 +0100 +++ b/hgext/uncommit.py Fri Mar 06 13:27:41 2020 -0500 @@ -65,7 +65,7 @@ base = ctx.p1() # ctx initialfiles = set(ctx.files()) - exclude = set(f for f in initialfiles if match(f)) + exclude = {f for f in initialfiles if match(f)} # No files matched commit, so nothing excluded if not exclude: @@ -78,9 +78,9 @@ files = initialfiles - exclude # Filter copies copied = copiesmod.pathcopies(base, ctx) - copied = dict( - (dst, src) for dst, src in pycompat.iteritems(copied) if dst in files - ) + copied = { + dst: src for dst, src in pycompat.iteritems(copied) if dst in files + } def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): if path not in contentctx:
--- a/i18n/polib.py Fri Mar 06 10:52:44 2020 +0100 +++ b/i18n/polib.py Fri Mar 06 13:27:41 2020 -0500 @@ -722,8 +722,8 @@ object POFile, the reference catalog. """ # Store entries in dict/set for faster access - self_entries = dict((entry.msgid, entry) for entry in self) - refpot_msgids = set(entry.msgid for entry in refpot) + self_entries = {entry.msgid: entry for entry in self} + refpot_msgids = {entry.msgid for entry in refpot} # Merge entries that are in the refpot for entry in refpot: e = self_entries.get(entry.msgid) @@ -1808,9 +1808,9 @@ entry = self._build_entry( msgid=msgid_tokens[0], msgid_plural=msgid_tokens[1], - msgstr_plural=dict( - (k, v) for k, v in enumerate(msgstr.split(b('\0'))) - ), + msgstr_plural={ + k: v for k, v in enumerate(msgstr.split(b('\0'))) + }, ) else: entry = self._build_entry(msgid=msgid, msgstr=msgstr)
--- a/mercurial/ancestor.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/ancestor.py Fri Mar 06 13:27:41 2020 -0500 @@ -138,7 +138,7 @@ k = 0 for i in interesting: k |= i - return set(n for (i, n) in mapping if k & i) + return {n for (i, n) in mapping if k & i} gca = commonancestorsheads(pfunc, *orignodes)
--- a/mercurial/branchmap.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/branchmap.py Fri Mar 06 13:27:41 2020 -0500 @@ -446,7 +446,7 @@ # 1 (branch a) -> 2 (branch b) -> 3 (branch a) for branch, newheadrevs in pycompat.iteritems(newbranches): bheads = self._entries.setdefault(branch, []) - bheadset = set(cl.rev(node) for node in bheads) + bheadset = {cl.rev(node) for node in bheads} # This have been tested True on all internal usage of this function. # run it again in case of doubt @@ -582,7 +582,7 @@ @util.propertycache def _namesreverse(self): - return dict((b, r) for r, b in enumerate(self._names)) + return {b: r for r, b in enumerate(self._names)} def branchinfo(self, rev): """Return branch name and close flag for rev, using and updating
--- a/mercurial/changegroup.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/changegroup.py Fri Mar 06 13:27:41 2020 -0500 @@ -993,7 +993,7 @@ ] manifests.clear() - clrevs = set(cl.rev(x) for x in clnodes) + clrevs = {cl.rev(x) for x in clnodes} it = self.generatefiles( changedfiles, @@ -1282,9 +1282,7 @@ flinkrev = store.linkrev fnode = store.node revs = ((r, flinkrev(r)) for r in store) - return dict( - (fnode(r), cln(lr)) for r, lr in revs if lr in clrevs - ) + return {fnode(r): cln(lr) for r, lr in revs if lr in clrevs} clrevtolocalrev = {}
--- a/mercurial/cmdutil.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/cmdutil.py Fri Mar 06 13:27:41 2020 -0500 @@ -3125,7 +3125,7 @@ ms = mergemod.mergestate.read(repo) mergeutil.checkunresolved(ms) - filestoamend = set(f for f in wctx.files() if matcher(f)) + filestoamend = {f for f in wctx.files() if matcher(f)} changes = len(filestoamend) > 0 if changes: @@ -3917,7 +3917,7 @@ # Apply changes fp = stringio() # chunks are serialized per file, but files aren't sorted - for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))): + for f in sorted({c.header.filename() for c in chunks if ishunk(c)}): prntstatusmsg(b'revert', f) files = set() for c in chunks:
--- a/mercurial/commands.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/commands.py Fri Mar 06 13:27:41 2020 -0500 @@ -3721,9 +3721,9 @@ heads = [repo[h] for h in heads] if branchrevs: - branches = set( + branches = { repo[r].branch() for r in scmutil.revrange(repo, branchrevs) - ) + } heads = [h for h in heads if h.branch() in branches] if opts.get(b'active') and branchrevs: @@ -3731,7 +3731,7 @@ heads = [h for h in heads if h.node() in dagheads] if branchrevs: - haveheads = set(h.branch() for h in heads) + haveheads = {h.branch() for h in heads} if branches - haveheads: headless = b', '.join(b for b in branches - haveheads) msg = _(b'no open branch heads found on branches %s')
--- a/mercurial/debugcommands.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/debugcommands.py Fri Mar 06 13:27:41 2020 -0500 @@ -584,7 +584,7 @@ dots = opts.get('dots') if file_: rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_) - revs = set((int(r) for r in revs)) + revs = {int(r) for r in revs} def events(): for r in rlog: @@ -1134,7 +1134,7 @@ (b'analyzed', filesetlang.analyze), (b'optimized', filesetlang.optimize), ] - stagenames = set(n for n, f in stages) + stagenames = {n for n, f in stages} showalways = set() if ui.verbose and not opts[b'show_stage']: @@ -2598,7 +2598,7 @@ dirstatefiles = set(dirstate) manifestonly = manifestfiles - dirstatefiles dsonly = dirstatefiles - manifestfiles - dsnotadded = set(f for f in dsonly if dirstate[f] != b'a') + dsnotadded = {f for f in dsonly if dirstate[f] != b'a'} changedfiles = manifestonly | dsnotadded dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) @@ -3165,7 +3165,7 @@ raise error.Abort( _(b'cannot use --verify-optimized with --no-optimized') ) - stagenames = set(n for n, f in stages) + stagenames = {n for n, f in stages} showalways = set() showchanged = set()
--- a/mercurial/discovery.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/discovery.py Fri Mar 06 13:27:41 2020 -0500 @@ -188,7 +188,7 @@ # ancestors of missing og._computecommonmissing() cl = repo.changelog - missingrevs = set(cl.rev(n) for n in og._missing) + missingrevs = {cl.rev(n) for n in og._missing} og._common = set(cl.ancestors(missingrevs)) - missingrevs commonheads = set(og.commonheads) og.missingheads = [h for h in og.missingheads if h not in commonheads] @@ -264,8 +264,8 @@ # If there are no obsstore, no post processing are needed. if repo.obsstore: torev = repo.changelog.rev - futureheads = set(torev(h) for h in outgoing.missingheads) - futureheads |= set(torev(h) for h in outgoing.commonheads) + futureheads = {torev(h) for h in outgoing.missingheads} + futureheads |= {torev(h) for h in outgoing.commonheads} allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True) for branch, heads in sorted(pycompat.iteritems(headssum)): remoteheads, newheads, unsyncedheads, placeholder = heads
--- a/mercurial/dispatch.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/dispatch.py Fri Mar 06 13:27:41 2020 -0500 @@ -514,7 +514,7 @@ ''' # util.interpolate can't deal with "$@" (with quotes) because it's only # built to match prefix + patterns. - replacemap = dict((b'$%d' % (i + 1), arg) for i, arg in enumerate(args)) + replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)} replacemap[b'$0'] = name replacemap[b'$$'] = b'$' replacemap[b'$@'] = b' '.join(args)
--- a/mercurial/encoding.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/encoding.py Fri Mar 06 13:27:41 2020 -0500 @@ -86,10 +86,10 @@ else: # preferred encoding isn't known yet; use utf-8 to avoid unicode error # and recreate it once encoding is settled - environ = dict( - (k.encode('utf-8'), v.encode('utf-8')) + environ = { + k.encode('utf-8'): v.encode('utf-8') for k, v in os.environ.items() # re-exports - ) + } _encodingrewrites = { b'646': b'ascii', @@ -285,10 +285,10 @@ if not _nativeenviron: # now encoding and helper functions are available, recreate the environ # dict to be exported to other modules - environ = dict( - (tolocal(k.encode('utf-8')), tolocal(v.encode('utf-8'))) + environ = { + tolocal(k.encode('utf-8')): tolocal(v.encode('utf-8')) for k, v in os.environ.items() # re-exports - ) + } if pycompat.ispy3: # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
--- a/mercurial/exchange.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/exchange.py Fri Mar 06 13:27:41 2020 -0500 @@ -1679,12 +1679,12 @@ def headsofdiff(h1, h2): """Returns heads(h1 % h2)""" res = unfi.set(b'heads(%ln %% %ln)', h1, h2) - return set(ctx.node() for ctx in res) + return {ctx.node() for ctx in res} def headsofunion(h1, h2): """Returns heads((h1 + h2) - null)""" res = unfi.set(b'heads((%ln + %ln - null))', h1, h2) - return set(ctx.node() for ctx in res) + return {ctx.node() for ctx in res} while True: old_heads = unficl.heads()
--- a/mercurial/extensions.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/extensions.py Fri Mar 06 13:27:41 2020 -0500 @@ -787,11 +787,11 @@ try: from hgext import __index__ # pytype: disable=import-error - return dict( - (name, gettext(desc)) + return { + name: gettext(desc) for name, desc in pycompat.iteritems(__index__.docs) if name not in _order - ) + } except (ImportError, AttributeError): pass
--- a/mercurial/fancyopts.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/fancyopts.py Fri Mar 06 13:27:41 2020 -0500 @@ -314,7 +314,7 @@ argmap = {} defmap = {} negations = {} - alllong = set(o[1] for o in options) + alllong = {o[1] for o in options} for option in options: if len(option) == 5:
--- a/mercurial/graphmod.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/graphmod.py Fri Mar 06 13:27:41 2020 -0500 @@ -58,7 +58,7 @@ # partition into parents in the rev set and missing parents, then # augment the lists with markers, to inform graph drawing code about # what kind of edge to draw between nodes. - pset = set(p.rev() for p in ctx.parents() if p.rev() in revs) + pset = {p.rev() for p in ctx.parents() if p.rev() in revs} mpars = [ p.rev() for p in ctx.parents() @@ -95,9 +95,9 @@ include = set(nodes) for node in nodes: ctx = repo[node] - parents = set( + parents = { (PARENT, p.rev()) for p in ctx.parents() if p.node() in include - ) + } yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
--- a/mercurial/hbisect.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/hbisect.py Fri Mar 06 13:27:41 2020 -0500 @@ -137,7 +137,7 @@ side = state[b'bad'] else: side = state[b'good'] - num = len(set(i.node() for i in parents) & set(side)) + num = len({i.node() for i in parents} & set(side)) if num == 1: return parents[0].ancestor(parents[1]) return None
--- a/mercurial/localrepo.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/localrepo.py Fri Mar 06 13:27:41 2020 -0500 @@ -1809,7 +1809,7 @@ # map tag name to (node, hist) alltags = tagsmod.findglobaltags(self.ui, self) # map tag name to tag type - tagtypes = dict((tag, b'global') for tag in alltags) + tagtypes = {tag: b'global' for tag in alltags} tagsmod.readlocaltags(self.ui, self, alltags, tagtypes) @@ -1822,12 +1822,10 @@ if node != nullid: tags[encoding.tolocal(name)] = node tags[b'tip'] = self.changelog.tip() - tagtypes = dict( - [ - (encoding.tolocal(name), value) - for (name, value) in pycompat.iteritems(tagtypes) - ] - ) + tagtypes = { + encoding.tolocal(name): value + for (name, value) in pycompat.iteritems(tagtypes) + } return (tags, tagtypes) def tagtype(self, tagname):
--- a/mercurial/match.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/match.py Fri Mar 06 13:27:41 2020 -0500 @@ -772,7 +772,7 @@ candidates = self._fileset | self._dirs - {b''} if dir != b'': d = dir + b'/' - candidates = set(c[len(d) :] for c in candidates if c.startswith(d)) + candidates = {c[len(d) :] for c in candidates if c.startswith(d)} # self._dirs includes all of the directories, recursively, so if # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo', # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
--- a/mercurial/mdiff.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/mdiff.py Fri Mar 06 13:27:41 2020 -0500 @@ -91,7 +91,7 @@ ) def copy(self, **kwargs): - opts = dict((k, getattr(self, k)) for k in self.defaults) + opts = {k: getattr(self, k) for k in self.defaults} opts = pycompat.strkwargs(opts) opts.update(kwargs) return diffopts(**opts)
--- a/mercurial/merge.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/merge.py Fri Mar 06 13:27:41 2020 -0500 @@ -1796,8 +1796,8 @@ def emptyactions(): """create an actions dict, to be populated and passed to applyupdates()""" - return dict( - (m, []) + return { + m: [] for m in ( ACTION_ADD, ACTION_ADD_MODIFIED, @@ -1814,7 +1814,7 @@ ACTION_PATH_CONFLICT, ACTION_PATH_CONFLICT_RESOLVE, ) - ) + } def applyupdates( @@ -2070,7 +2070,7 @@ extraactions = ms.actions() if extraactions: - mfiles = set(a[0] for a in actions[ACTION_MERGE]) + mfiles = {a[0] for a in actions[ACTION_MERGE]} for k, acts in pycompat.iteritems(extraactions): actions[k].extend(acts) if k == ACTION_GET and wantfiledata:
--- a/mercurial/obsolete.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/obsolete.py Fri Mar 06 13:27:41 2020 -0500 @@ -939,7 +939,7 @@ getnode = repo.changelog.node notpublic = _mutablerevs(repo) isobs = repo.obsstore.successors.__contains__ - obs = set(r for r in notpublic if isobs(getnode(r))) + obs = {r for r in notpublic if isobs(getnode(r))} return obs @@ -965,7 +965,7 @@ def _computesuspendedset(repo): """the set of obsolete parents with non obsolete descendants""" suspended = repo.changelog.ancestors(getrevs(repo, b'orphan')) - return set(r for r in getrevs(repo, b'obsolete') if r in suspended) + return {r for r in getrevs(repo, b'obsolete') if r in suspended} @cachefor(b'extinct')
--- a/mercurial/obsutil.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/obsutil.py Fri Mar 06 13:27:41 2020 -0500 @@ -194,7 +194,7 @@ def _filterprunes(markers): """return a set with no prune markers""" - return set(m for m in markers if m[1]) + return {m for m in markers if m[1]} def exclusivemarkers(repo, nodes): @@ -338,12 +338,12 @@ # compute the whole set of successors or descendants while len(foreground) != plen: plen = len(foreground) - succs = set(c.node() for c in foreground) + succs = {c.node() for c in foreground} mutable = [c.node() for c in foreground if c.mutable()] succs.update(allsuccessors(repo.obsstore, mutable)) known = (n for n in succs if has_node(n)) foreground = set(repo.set(b'%ln::', known)) - return set(c.node() for c in foreground) + return {c.node() for c in foreground} # effectflag field @@ -855,11 +855,11 @@ """ Returns a sorted list of markers users without duplicates """ markersmeta = [dict(m[3]) for m in markers] - users = set( + users = { encoding.tolocal(meta[b'user']) for meta in markersmeta if meta.get(b'user') - ) + } return sorted(users) @@ -868,9 +868,9 @@ """ Returns a sorted list of markers operations without duplicates """ markersmeta = [dict(m[3]) for m in markers] - operations = set( + operations = { meta.get(b'operation') for meta in markersmeta if meta.get(b'operation') - ) + } return sorted(operations)
--- a/mercurial/patch.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/patch.py Fri Mar 06 13:27:41 2020 -0500 @@ -2888,7 +2888,7 @@ or 'rename' (the latter two only if opts.git is set).''' gone = set() - copyto = dict([(v, k) for k, v in copy.items()]) + copyto = {v: k for k, v in copy.items()} addedset, removedset = set(added), set(removed)
--- a/mercurial/phases.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/phases.py Fri Mar 06 13:27:41 2020 -0500 @@ -445,10 +445,10 @@ phasetracking, r, self.phase(repo, r), targetphase ) - roots = set( + roots = { ctx.node() for ctx in repo.set(b'roots((%ln::) - %ld)', olds, affected) - ) + } if olds != roots: self._updateroots(phase, roots, tr) # some roots may need to be declared for lower phases @@ -518,9 +518,7 @@ ] updatedroots = repo.set(b'roots(%ln::)', aboveroots) - finalroots = set( - n for n in currentroots if repo[n].rev() < minnewroot - ) + finalroots = {n for n in currentroots if repo[n].rev() < minnewroot} finalroots.update(ctx.node() for ctx in updatedroots) if finalroots != oldroots: self._updateroots(targetphase, finalroots, tr) @@ -760,7 +758,7 @@ if not heads or heads == [nullid]: return [] # The logic operated on revisions, convert arguments early for convenience - new_heads = set(rev(n) for n in heads if n != nullid) + new_heads = {rev(n) for n in heads if n != nullid} roots = [rev(n) for n in roots] # compute the area we need to remove affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads)
--- a/mercurial/pycompat.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/pycompat.py Fri Mar 06 13:27:41 2020 -0500 @@ -334,7 +334,7 @@ they can be passed as keyword arguments as dictonaries with bytes keys can't be passed as keyword arguments to functions on Python 3. """ - dic = dict((k.decode('latin-1'), v) for k, v in dic.items()) + dic = {k.decode('latin-1'): v for k, v in dic.items()} return dic def byteskwargs(dic): @@ -342,7 +342,7 @@ Converts keys of python dictonaries to bytes as they were converted to str to pass that dictonary as a keyword argument on Python 3. """ - dic = dict((k.encode('latin-1'), v) for k, v in dic.items()) + dic = {k.encode('latin-1'): v for k, v in dic.items()} return dic # TODO: handle shlex.shlex().
--- a/mercurial/repair.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/repair.py Fri Mar 06 13:27:41 2020 -0500 @@ -351,7 +351,7 @@ def safestriproots(ui, repo, nodes): """return list of roots of nodes where descendants are covered by nodes""" torev = repo.unfiltered().changelog.rev - revs = set(torev(n) for n in nodes) + revs = {torev(n) for n in nodes} # tostrip = wanted - unsafe = wanted - ancestors(orphaned) # orphaned = affected - wanted # affected = descendants(roots(wanted))
--- a/mercurial/revlog.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/revlog.py Fri Mar 06 13:27:41 2020 -0500 @@ -1286,7 +1286,7 @@ else: start = self.rev(start) - stoprevs = set(self.rev(n) for n in stop or []) + stoprevs = {self.rev(n) for n in stop or []} revs = dagop.headrevssubset( self.revs, self.parentrevs, startrev=start, stoprevs=stoprevs
--- a/mercurial/revset.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/revset.py Fri Mar 06 13:27:41 2020 -0500 @@ -1875,7 +1875,7 @@ The set of all parents for all changesets in set, or the working directory. """ if x is None: - ps = set(p.rev() for p in repo[x].parents()) + ps = {p.rev() for p in repo[x].parents()} else: ps = set() cl = repo.changelog @@ -2437,7 +2437,7 @@ cl = repo.unfiltered().changelog torev = cl.index.get_rev tonode = cl.node - result = set(torev(n) for n in f(tonode(r) for r in s)) + result = {torev(n) for n in f(tonode(r) for r in s)} result.discard(None) return smartset.baseset(result - repo.changelog.filteredrevs)
--- a/mercurial/scmutil.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/scmutil.py Fri Mar 06 13:27:41 2020 -0500 @@ -1457,10 +1457,10 @@ # Merge old parent and old working dir copies oldcopies = copiesmod.pathcopies(newctx, oldctx, match) oldcopies.update(copies) - copies = dict( - (dst, oldcopies.get(src, src)) + copies = { + dst: oldcopies.get(src, src) for dst, src in pycompat.iteritems(oldcopies) - ) + } # Adjust the dirstate copies for dst, src in pycompat.iteritems(copies): if src not in newctx or dst in newctx or ds[dst] != b'a':
--- a/mercurial/store.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/store.py Fri Mar 06 13:27:41 2020 -0500 @@ -137,7 +137,7 @@ asciistr = list(map(xchr, range(127))) capitals = list(range(ord(b"A"), ord(b"Z") + 1)) - cmap = dict((x, x) for x in asciistr) + cmap = {x: x for x in asciistr} for x in _reserved(): cmap[xchr(x)] = b"~%02x" % x for x in capitals + [ord(e)]: @@ -200,7 +200,7 @@ 'the~07quick~adshot' ''' xchr = pycompat.bytechr - cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)]) + cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)} for x in _reserved(): cmap[xchr(x)] = b"~%02x" % x for x in range(ord(b"A"), ord(b"Z") + 1):
--- a/mercurial/upgrade.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/upgrade.py Fri Mar 06 13:27:41 2020 -0500 @@ -1129,7 +1129,7 @@ """Upgrade a repository in place.""" if optimize is None: optimize = [] - optimize = set(legacy_opts_map.get(o, o) for o in optimize) + optimize = {legacy_opts_map.get(o, o) for o in optimize} repo = repo.unfiltered() revlogs = set(UPGRADE_ALL_REVLOGS)
--- a/mercurial/url.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/url.py Fri Mar 06 13:27:41 2020 -0500 @@ -224,13 +224,11 @@ def _generic_proxytunnel(self): - proxyheaders = dict( - [ - (x, self.headers[x]) - for x in self.headers - if x.lower().startswith('proxy-') - ] - ) + proxyheaders = { + x: self.headers[x] + for x in self.headers + if x.lower().startswith('proxy-') + } self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport) for header in pycompat.iteritems(proxyheaders): self.send(b'%s: %s\r\n' % header)
--- a/mercurial/util.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/util.py Fri Mar 06 13:27:41 2020 -0500 @@ -2213,7 +2213,7 @@ ''' def _makefspathcacheentry(dir): - return dict((normcase(n), n) for n in os.listdir(dir)) + return {normcase(n): n for n in os.listdir(dir)} seps = pycompat.ossep if pycompat.osaltsep:
--- a/mercurial/utils/storageutil.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/utils/storageutil.py Fri Mar 06 13:27:41 2020 -0500 @@ -364,7 +364,7 @@ if nodesorder == b'nodes': revs = [frev(n) for n in nodes] elif nodesorder == b'linear': - revs = set(frev(n) for n in nodes) + revs = {frev(n) for n in nodes} revs = dagop.linearize(revs, store.parentrevs) else: # storage and default revs = sorted(frev(n) for n in nodes)
--- a/mercurial/windows.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/windows.py Fri Mar 06 13:27:41 2020 -0500 @@ -535,13 +535,11 @@ cache = dircache.get(dir, None) if cache is None: try: - dmap = dict( - [ - (normcase(n), s) - for n, k, s in listdir(dir, True) - if getkind(s.st_mode) in _wantedkinds - ] - ) + dmap = { + normcase(n): s + for n, k, s in listdir(dir, True) + if getkind(s.st_mode) in _wantedkinds + } except OSError as err: # Python >= 2.5 returns ENOENT and adds winerror field # EINVAL is raised if dir is not a directory.
--- a/mercurial/wireprototypes.py Fri Mar 06 10:52:44 2020 +0100 +++ b/mercurial/wireprototypes.py Fri Mar 06 13:27:41 2020 -0500 @@ -383,8 +383,8 @@ # reason for it (like server operators wanting to achieve specific # performance characteristics). So fail fast if the config references # unusable compression engines. - validnames = set(e.name() for e in compengines) - invalidnames = set(e for e in configengines if e not in validnames) + validnames = {e.name() for e in compengines} + invalidnames = {e for e in configengines if e not in validnames} if invalidnames: raise error.Abort( _(b'invalid compression engine defined in %s: %s')
--- a/setup.py Fri Mar 06 10:52:44 2020 +0100 +++ b/setup.py Fri Mar 06 13:27:41 2020 -0500 @@ -935,11 +935,11 @@ normalizecrlf('doc/%s.html' % root) # This logic is duplicated in doc/Makefile. - sources = set( + sources = { f for f in os.listdir('mercurial/helptext') if re.search(r'[0-9]\.txt$', f) - ) + } # common.txt is a one-off. gentxt('common') @@ -979,7 +979,7 @@ # Screen out egg related commands to prevent egg generation. But allow # mercurial.egg-info generation, since that is part of modern # packaging. - excl = set(['bdist_egg']) + excl = {'bdist_egg'} return filter(lambda x: x not in excl, install.get_sub_commands(self))
--- a/tests/run-tests.py Fri Mar 06 10:52:44 2020 +0100 +++ b/tests/run-tests.py Fri Mar 06 13:27:41 2020 -0500 @@ -1553,7 +1553,7 @@ NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub - ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256)) + ESCAPEMAP = {bchr(i): br'\x%02x' % i for i in range(256)} ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'}) def __init__(self, path, *args, **kwds): @@ -2524,7 +2524,7 @@ def savetimes(outputdir, result): saved = dict(loadtimes(outputdir)) maxruns = 5 - skipped = set([str(t[0]) for t in result.skipped]) + skipped = {str(t[0]) for t in result.skipped} for tdata in result.times: test, real = tdata[0], tdata[3] if test not in skipped: @@ -2737,7 +2737,7 @@ @staticmethod def _writexunit(result, outf): # See http://llg.cubic.org/docs/junit/ for a reference. - timesd = dict((t[0], t[3]) for t in result.times) + timesd = {t[0]: t[3] for t in result.times} doc = minidom.Document() s = doc.createElement('testsuite') s.setAttribute('errors', "0") # TODO @@ -3343,7 +3343,7 @@ tmpdir = os.path.join(self._hgtmp, b'child%d' % count) # extra keyword parameters. 'case' is used by .t tests - kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc) + kwds = {k: testdesc[k] for k in ['case'] if k in testdesc} t = testcls( refpath,
--- a/tests/simplestorerepo.py Fri Mar 06 10:52:44 2020 +0100 +++ b/tests/simplestorerepo.py Fri Mar 06 13:27:41 2020 -0500 @@ -588,7 +588,7 @@ start = nullid if stop is None: stop = [] - stoprevs = set([self.rev(n) for n in stop]) + stoprevs = {self.rev(n) for n in stop} startrev = self.rev(start) reachable = {startrev} heads = {startrev}
--- a/tests/test-fastannotate-revmap.py Fri Mar 06 10:52:44 2020 +0100 +++ b/tests/test-fastannotate-revmap.py Fri Mar 06 13:27:41 2020 -0500 @@ -165,7 +165,7 @@ rm2.flush() # two files should be the same - ensure(len(set(util.readfile(p) for p in [path, path2])) == 1) + ensure(len({util.readfile(p) for p in [path, path2]}) == 1) os.unlink(path) os.unlink(path2)
--- a/tests/test-hgweb-auth.py Fri Mar 06 10:52:44 2020 +0100 +++ b/tests/test-hgweb-auth.py Fri Mar 06 13:27:41 2020 -0500 @@ -52,7 +52,7 @@ for name in (b'.username', b'.password'): if (p + name) not in auth: auth[p + name] = p - auth = dict((k, v) for k, v in auth.items() if v is not None) + auth = {k: v for k, v in auth.items() if v is not None} ui = writeauth(auth)
--- a/tests/test-revlog-raw.py Fri Mar 06 10:52:44 2020 +0100 +++ b/tests/test-revlog-raw.py Fri Mar 06 13:27:41 2020 -0500 @@ -229,7 +229,7 @@ # Gray Code. See https://en.wikipedia.org/wiki/Gray_code gray = lambda x: x ^ (x >> 1) - reversegray = dict((gray(i), i) for i in range(m)) + reversegray = {gray(i): i for i in range(m)} # Generate (n * 2) bit gray code, yield lower n bits as X, and look for # the next unused gray code where higher n bits equal to X.
--- a/tests/wireprotosimplecache.py Fri Mar 06 10:52:44 2020 +0100 +++ b/tests/wireprotosimplecache.py Fri Mar 06 13:27:41 2020 -0500 @@ -116,7 +116,7 @@ redirectable = False else: clienttargets = set(self.redirecttargets) - ourtargets = set(t[b'name'] for t in loadredirecttargets(self.ui)) + ourtargets = {t[b'name'] for t in loadredirecttargets(self.ui)} # We only ever redirect to a single target (for now). So we don't # need to store which target matched.