# HG changeset patch # User Pierre-Yves David # Date 1590576973 -7200 # Node ID 25512a65cefdf84a4960561c145ccbd14d253a02 # Parent edd08aa193fbb2a24c9947dba50f9c9fedb5a172 metadata: filter the `removed` set to only contains relevant data The `files` entry can be bogus and contains too many entries. This can badly combines with the computation of `removed` inflating the set size. The can lead to the changesets centric rename computation to process much more data than needed, slowing it down (and increasing space taken by data storage). In practice newer commits already that reduced set, this applies this "fix" to older changeset. Differential Revision: https://phab.mercurial-scm.org/D8589 diff -r edd08aa193fb -r 25512a65cefd mercurial/metadata.py --- a/mercurial/metadata.py Wed May 27 12:45:39 2020 +0200 +++ b/mercurial/metadata.py Wed May 27 12:56:13 2020 +0200 @@ -94,6 +94,9 @@ for f in ctx.files(): if f not in ctx: removed.append(f) + if removed: + rf = get_removal_filter(ctx) + removed = [r for r in removed if not rf(r)] return removed