Mercurial > hg
view tests/f @ 44118:f81c17ec303c
hgdemandimport: apply lazy module loading to sys.meta_path finders
Python's `sys.meta_path` finders are the primary objects whose job it
is to find a module at import time. When `import` is called, Python
iterates objects in this list and calls `o.find_spec(...)` to find
a `ModuleSpec` (or None if the module couldn't be found by that
finder). If no meta path finder can find a module, import fails.
One of the default meta path finders is `PathFinder`. Its job is to
import modules from the filesystem and is probably the most important
importer. This finder looks at `sys.path` and `sys.path_hooks` to do
its job.
The `ModuleSpec` returned by `MetaPathImporter.find_spec()` has a
`loader` attribute, which defines the concrete module loader to use.
`sys.path_hooks` is a hook point for teaching `PathFinder` to
instantiate custom loader types.
Previously, we injected a custom `sys.path_hook` that told `PathFinder`
to wrap the default loaders with a loader that creates a module object
that is lazy.
This approach worked. But its main limitation was that it only applied
to the `PathFinder` meta path importer. There are other meta path
importers that are registered. And in the case of PyOxidizer loading
modules from memory, `PathFinder` doesn't come into play since
PyOxidizer's own meta path importer was handling all imports.
This commit changes our approach to lazy module loading by proxying
all meta path importers. Specifically, we overload the `find_spec()`
method to swap in a wrapped loader on the `ModuleSpec` before it
is returned. The end result of this is all meta path importers should
be lazy.
As much as I would have loved to utilize .__class__ manipulation to
achieve this, some meta path importers are implemented in C/Rust
in such a way that they cannot be monkeypatched. This is why we
use __getattribute__ to define a proxy.
Also, this change could theoretically open us up to regressions in
meta path importers whose loader is creating module objects which
can't be monkeypatched. But I'm not aware of any of these in the
wild. So I think we'll be safe.
According to hyperfine, this change yields a decent startup time win of
5-6ms:
```
Benchmark #1: ~/.pyenv/versions/3.6.10/bin/python ./hg version
Time (mean ± σ): 86.8 ms ± 0.5 ms [User: 78.0 ms, System: 8.7 ms]
Range (min … max): 86.0 ms … 89.1 ms 50 runs
Time (mean ± σ): 81.1 ms ± 2.7 ms [User: 74.5 ms, System: 6.5 ms]
Range (min … max): 77.8 ms … 90.5 ms 50 runs
Benchmark #2: ~/.pyenv/versions/3.7.6/bin/python ./hg version
Time (mean ± σ): 78.9 ms ± 0.6 ms [User: 70.2 ms, System: 8.7 ms]
Range (min … max): 78.1 ms … 81.2 ms 50 runs
Time (mean ± σ): 73.4 ms ± 0.6 ms [User: 65.3 ms, System: 8.0 ms]
Range (min … max): 72.4 ms … 75.7 ms 50 runs
Benchmark #3: ~/.pyenv/versions/3.8.1/bin/python ./hg version
Time (mean ± σ): 78.1 ms ± 0.6 ms [User: 70.2 ms, System: 7.9 ms]
Range (min … max): 77.4 ms … 80.9 ms 50 runs
Time (mean ± σ): 72.1 ms ± 0.4 ms [User: 64.4 ms, System: 7.6 ms]
Range (min … max): 71.4 ms … 74.1 ms 50 runs
```
Differential Revision: https://phab.mercurial-scm.org/D7954
author | Gregory Szorc <gregory.szorc@gmail.com> |
---|---|
date | Mon, 20 Jan 2020 23:51:25 -0800 |
parents | 47ef023d0165 |
children | c102b704edb5 |
line wrap: on
line source
#!/usr/bin/env python """ Utility for inspecting files in various ways. This tool is like the collection of tools found in a unix environment but are cross platform and stable and suitable for our needs in the test suite. This can be used instead of tools like: [ dd find head hexdump ls md5sum readlink sha1sum stat tail test readlink.py md5sum.py """ from __future__ import absolute_import import binascii import glob import hashlib import optparse import os import re import sys # Python 3 adapters ispy3 = sys.version_info[0] >= 3 if ispy3: def iterbytes(s): for i in range(len(s)): yield s[i : i + 1] else: iterbytes = iter def visit(opts, filenames, outfile): """Process filenames in the way specified in opts, writing output to outfile.""" for f in sorted(filenames): isstdin = f == '-' if not isstdin and not os.path.lexists(f): outfile.write(b'%s: file not found\n' % f.encode('utf-8')) continue quiet = opts.quiet and not opts.recurse or isstdin isdir = os.path.isdir(f) islink = os.path.islink(f) isfile = os.path.isfile(f) and not islink dirfiles = None content = None facts = [] if isfile: if opts.type: facts.append(b'file') if any((opts.hexdump, opts.dump, opts.md5, opts.sha1, opts.sha256)): with open(f, 'rb') as fobj: content = fobj.read() elif islink: if opts.type: facts.append(b'link') content = os.readlink(f).encode('utf8') elif isstdin: content = getattr(sys.stdin, 'buffer', sys.stdin).read() if opts.size: facts.append(b'size=%d' % len(content)) elif isdir: if opts.recurse or opts.type: dirfiles = glob.glob(f + '/*') facts.append(b'directory with %d files' % len(dirfiles)) elif opts.type: facts.append(b'type unknown') if not isstdin: stat = os.lstat(f) if opts.size and not isdir: facts.append(b'size=%d' % stat.st_size) if opts.mode and not islink: facts.append(b'mode=%o' % (stat.st_mode & 0o777)) if opts.links: facts.append(b'links=%d' % stat.st_nlink) if opts.newer: # mtime might be in whole seconds so newer file might be same if stat.st_mtime >= os.stat(opts.newer).st_mtime: facts.append( b'newer than %s' % opts.newer.encode('utf8', 'replace') ) else: facts.append( b'older than %s' % opts.newer.encode('utf8', 'replace') ) if opts.md5 and content is not None: h = hashlib.md5(content) facts.append(b'md5=%s' % binascii.hexlify(h.digest())[: opts.bytes]) if opts.sha1 and content is not None: h = hashlib.sha1(content) facts.append( b'sha1=%s' % binascii.hexlify(h.digest())[: opts.bytes] ) if opts.sha256 and content is not None: h = hashlib.sha256(content) facts.append( b'sha256=%s' % binascii.hexlify(h.digest())[: opts.bytes] ) if isstdin: outfile.write(b', '.join(facts) + b'\n') elif facts: outfile.write(b'%s: %s\n' % (f.encode('utf-8'), b', '.join(facts))) elif not quiet: outfile.write(b'%s:\n' % f.encode('utf-8')) if content is not None: chunk = content if not islink: if opts.lines: if opts.lines >= 0: chunk = b''.join(chunk.splitlines(True)[: opts.lines]) else: chunk = b''.join(chunk.splitlines(True)[opts.lines :]) if opts.bytes: if opts.bytes >= 0: chunk = chunk[: opts.bytes] else: chunk = chunk[opts.bytes :] if opts.hexdump: for i in range(0, len(chunk), 16): s = chunk[i : i + 16] outfile.write( b'%04x: %-47s |%s|\n' % ( i, b' '.join(b'%02x' % ord(c) for c in iterbytes(s)), re.sub(b'[^ -~]', b'.', s), ) ) if opts.dump: if not quiet: outfile.write(b'>>>\n') outfile.write(chunk) if not quiet: if chunk.endswith(b'\n'): outfile.write(b'<<<\n') else: outfile.write(b'\n<<< no trailing newline\n') if opts.recurse and dirfiles: assert not isstdin visit(opts, dirfiles, outfile) if __name__ == "__main__": parser = optparse.OptionParser("%prog [options] [filenames]") parser.add_option( "-t", "--type", action="store_true", help="show file type (file or directory)", ) parser.add_option( "-m", "--mode", action="store_true", help="show file mode" ) parser.add_option( "-l", "--links", action="store_true", help="show number of links" ) parser.add_option( "-s", "--size", action="store_true", help="show size of file" ) parser.add_option( "-n", "--newer", action="store", help="check if file is newer (or same)" ) parser.add_option( "-r", "--recurse", action="store_true", help="recurse into directories" ) parser.add_option( "-S", "--sha1", action="store_true", help="show sha1 hash of the content", ) parser.add_option( "", "--sha256", action="store_true", help="show sha256 hash of the content", ) parser.add_option( "-M", "--md5", action="store_true", help="show md5 hash of the content" ) parser.add_option( "-D", "--dump", action="store_true", help="dump file content" ) parser.add_option( "-H", "--hexdump", action="store_true", help="hexdump file content" ) parser.add_option( "-B", "--bytes", type="int", help="number of characters to dump" ) parser.add_option( "-L", "--lines", type="int", help="number of lines to dump" ) parser.add_option( "-q", "--quiet", action="store_true", help="no default output" ) (opts, filenames) = parser.parse_args(sys.argv[1:]) if not filenames: filenames = ['-'] visit(opts, filenames, getattr(sys.stdout, 'buffer', sys.stdout))