Mercurial > hg
view hgext/fastexport.py @ 44363:f7459da77f23
nodemap: introduce an option to use mmap to read the nodemap mapping
The performance and memory benefit is much greater if we don't have to copy all
the data in memory for each information. So we introduce an option (on by
default) to read the data using mmap.
This changeset is the last one definition the API for index support nodemap
data. (they have to be able to use the mmaping).
Below are some benchmark comparing the best we currently have in 5.3 with the
final step of this series (using the persistent nodemap implementation in
Rust). The benchmark run `hg perfindex` with various revset and the following
variants:
Before:
* do not use the persistent nodemap
* use the CPython implementation of the index for nodemap
* use mmapping of the changelog index
After:
* use the MixedIndex Rust code, with the NodeTree object for nodemap access
(still in review)
* use the persistent nodemap data from disk
* access the persistent nodemap data through mmap
* use mmapping of the changelog index
The persistent nodemap greatly speed up most operation on very large
repositories. Some of the previously very fast lookup end up a bit slower because
the persistent nodemap has to be setup. However the absolute slowdown is very
small and won't matters in the big picture.
Here are some numbers (in seconds) for the reference copy of mozilla-try:
Revset Before After abs-change speedup
-10000: 0.004622 0.005532 0.000910 × 0.83
-10: 0.000050 0.000132 0.000082 × 0.37
tip 0.000052 0.000085 0.000033 × 0.61
0 + (-10000:) 0.028222 0.005337 -0.022885 × 5.29
0 0.023521 0.000084 -0.023437 × 280.01
(-10000:) + 0 0.235539 0.005308 -0.230231 × 44.37
(-10:) + :9 0.232883 0.000180 -0.232703 ×1293.79
(-10000:) + (:99) 0.238735 0.005358 -0.233377 × 44.55
:99 + (-10000:) 0.317942 0.005593 -0.312349 × 56.84
:9 + (-10:) 0.313372 0.000179 -0.313193 ×1750.68
:9 0.316450 0.000143 -0.316307 ×2212.93
On smaller repositories, the cost of nodemap related operation is not as big, so
the win is much more modest. Yet it helps shaving a handful of millisecond here
and there.
Here are some numbers (in seconds) for the reference copy of mercurial:
Revset Before After abs-change speedup
-10: 0.000065 0.000097 0.000032 × 0.67
tip 0.000063 0.000078 0.000015 × 0.80
0 0.000561 0.000079 -0.000482 × 7.10
-10000: 0.004609 0.003648 -0.000961 × 1.26
0 + (-10000:) 0.005023 0.003715 -0.001307 × 1.35
(-10:) + :9 0.002187 0.000108 -0.002079 ×20.25
(-10000:) + 0 0.006252 0.003716 -0.002536 × 1.68
(-10000:) + (:99) 0.006367 0.003707 -0.002660 × 1.71
:9 + (-10:) 0.003846 0.000110 -0.003736 ×34.96
:9 0.003854 0.000099 -0.003755 ×38.92
:99 + (-10000:) 0.007644 0.003778 -0.003866 × 2.02
Differential Revision: https://phab.mercurial-scm.org/D7894
author | Pierre-Yves David <pierre-yves.david@octobus.net> |
---|---|
date | Tue, 11 Feb 2020 11:18:52 +0100 |
parents | 93a05cb223da |
children | 44aff45b556d |
line wrap: on
line source
# Copyright 2020 Joerg Sonnenberger <joerg@bec.de> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """export repositories as git fast-import stream""" # The format specification for fast-import streams can be found at # https://git-scm.com/docs/git-fast-import#_input_format from __future__ import absolute_import import re from mercurial.i18n import _ from mercurial.node import hex, nullrev from mercurial.utils import stringutil from mercurial import ( error, pycompat, registrar, scmutil, ) from .convert import convcmd # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or # leave the attribute unspecified. testedwith = b"ships-with-hg-core" cmdtable = {} command = registrar.command(cmdtable) GIT_PERSON_PROHIBITED = re.compile(b'[<>\n"]') GIT_EMAIL_PROHIBITED = re.compile(b"[<> \n]") def convert_to_git_user(authormap, user, rev): mapped_user = authormap.get(user, user) user_person = stringutil.person(mapped_user) user_email = stringutil.email(mapped_user) if GIT_EMAIL_PROHIBITED.match(user_email) or GIT_PERSON_PROHIBITED.match( user_person ): raise error.Abort( _(b"Unable to parse user into person and email for revision " + rev) ) if user_person: return b'"' + user_person + b'" <' + user_email + b'>' else: return b"<" + user_email + b">" def convert_to_git_date(date): timestamp, utcoff = date tzsign = b"+" if utcoff < 0 else b"-" if utcoff % 60 != 0: raise error.Abort( _(b"UTC offset in %b is not an integer number of seconds") % (date,) ) utcoff = abs(utcoff) // 60 tzh = utcoff // 60 tzmin = utcoff % 60 return b"%d " % int(timestamp) + tzsign + b"%02d%02d" % (tzh, tzmin) def convert_to_git_ref(branch): # XXX filter/map depending on git restrictions return b"refs/heads/" + branch def write_data(buf, data, skip_newline): buf.append(b"data %d\n" % len(data)) buf.append(data) if not skip_newline or data[-1:] != b"\n": buf.append(b"\n") def export_commit(ui, repo, rev, marks, authormap): ctx = repo[rev] revid = ctx.hex() if revid in marks: ui.warn(_(b"warning: revision %s already exported, skipped\n") % revid) return parents = [p for p in ctx.parents() if p.rev() != nullrev] for p in parents: if p.hex() not in marks: ui.warn( _(b"warning: parent %s of %s has not been exported, skipped\n") % (p, revid) ) return # For all files modified by the commit, check if they have already # been exported and otherwise dump the blob with the new mark. for fname in ctx.files(): if fname not in ctx: continue filectx = ctx.filectx(fname) filerev = hex(filectx.filenode()) if filerev not in marks: mark = len(marks) + 1 marks[filerev] = mark data = filectx.data() buf = [b"blob\n", b"mark :%d\n" % mark] write_data(buf, data, False) ui.write(*buf, keepprogressbar=True) del buf # Assign a mark for the current revision for references by # latter merge commits. mark = len(marks) + 1 marks[revid] = mark ref = convert_to_git_ref(ctx.branch()) buf = [ b"commit %s\n" % ref, b"mark :%d\n" % mark, b"committer %s %s\n" % ( convert_to_git_user(authormap, ctx.user(), revid), convert_to_git_date(ctx.date()), ), ] write_data(buf, ctx.description(), True) if parents: buf.append(b"from :%d\n" % marks[parents[0].hex()]) if len(parents) == 2: buf.append(b"merge :%d\n" % marks[parents[1].hex()]) p0ctx = repo[parents[0]] files = ctx.manifest().diff(p0ctx.manifest()) else: files = ctx.files() filebuf = [] for fname in files: if fname not in ctx: filebuf.append((fname, b"D %s\n" % fname)) else: filectx = ctx.filectx(fname) filerev = filectx.filenode() fileperm = b"755" if filectx.isexec() else b"644" changed = b"M %s :%d %s\n" % (fileperm, marks[hex(filerev)], fname) filebuf.append((fname, changed)) filebuf.sort() buf.extend(changed for (fname, changed) in filebuf) del filebuf buf.append(b"\n") ui.write(*buf, keepprogressbar=True) del buf isrev = re.compile(b"^[0-9a-f]{40}$") @command( b"fastexport", [ (b"r", b"rev", [], _(b"revisions to export"), _(b"REV")), (b"i", b"import-marks", b"", _(b"old marks file to read"), _(b"FILE")), (b"e", b"export-marks", b"", _(b"new marks file to write"), _(b"FILE")), ( b"A", b"authormap", b"", _(b"remap usernames using this file"), _(b"FILE"), ), ], _(b"[OPTION]... [REV]..."), helpcategory=command.CATEGORY_IMPORT_EXPORT, ) def fastexport(ui, repo, *revs, **opts): """export repository as git fast-import stream This command lets you dump a repository as a human-readable text stream. It can be piped into corresponding import routines like "git fast-import". Incremental dumps can be created by using marks files. """ opts = pycompat.byteskwargs(opts) revs += tuple(opts.get(b"rev", [])) if not revs: revs = scmutil.revrange(repo, [b":"]) else: revs = scmutil.revrange(repo, revs) if not revs: raise error.Abort(_(b"no revisions matched")) authorfile = opts.get(b"authormap") if authorfile: authormap = convcmd.readauthormap(ui, authorfile) else: authormap = {} import_marks = opts.get(b"import_marks") marks = {} if import_marks: with open(import_marks, "rb") as import_marks_file: for line in import_marks_file: line = line.strip() if not isrev.match(line) or line in marks: raise error.Abort(_(b"Corrupted marks file")) marks[line] = len(marks) + 1 revs.sort() with ui.makeprogress( _(b"exporting"), unit=_(b"revisions"), total=len(revs) ) as progress: for rev in revs: export_commit(ui, repo, rev, marks, authormap) progress.increment() export_marks = opts.get(b"export_marks") if export_marks: with open(export_marks, "wb") as export_marks_file: output_marks = [None] * len(marks) for k, v in marks.items(): output_marks[v - 1] = k for k in output_marks: export_marks_file.write(k + b"\n")