view tests/f @ 45214:6063c1857d0a stable

exchange: backout changeset c26335fa4225 Changeset c26335fa4225 has good intends but introduce significant behavior regressions for multiple important cases. In short there are many case where push would have caught instability creation/propagation that are no longer covered. These behavior have been covered for many years and even if some related case are not currently caught, the covered one should not be regressed. The next four changesets introduce tests for some of these cases. However we could produce many more tests cases since the area is wide and they are many possible combination. (And we should cover them when getting back to this issue) Since 5.5 is one week away, the most reasonable approach seems to back this out while we devise a new way to move forward that preserve the current behavior, catch more issues and also improves the situation that c26335fa4225 target. In addition to the behavior change, c26335fa4225 also introduced output changes. These output changes does not requires a backout per-se, but are part of the same changeset. However they come with a couple of issues that also requires attention: 1) the bulk of the error message have been shoehorned into a multiple line abort message. This seems quite different from what we usually do. The abort message should be a compact and efficient message, with extra details being issued as normal error output beforehand. (with --verbose/--quiet) support. 2) the current output is unbounded, so if there is many (tens, hundreds, thousands, …) of unstable/obsolete changeset involved in the push, the output can quickly become a scary and un-usuable wall of text. So we need some limitation here (same as we have with the remote head list that says A, B , C and # others).
author Pierre-Yves David <pierre-yves.david@octobus.net>
date Thu, 23 Jul 2020 16:57:56 +0200
parents 47ef023d0165
children c102b704edb5
line wrap: on
line source

#!/usr/bin/env python

"""
Utility for inspecting files in various ways.

This tool is like the collection of tools found in a unix environment but are
cross platform and stable and suitable for our needs in the test suite.

This can be used instead of tools like:
  [
  dd
  find
  head
  hexdump
  ls
  md5sum
  readlink
  sha1sum
  stat
  tail
  test
  readlink.py
  md5sum.py
"""

from __future__ import absolute_import

import binascii
import glob
import hashlib
import optparse
import os
import re
import sys

# Python 3 adapters
ispy3 = sys.version_info[0] >= 3
if ispy3:

    def iterbytes(s):
        for i in range(len(s)):
            yield s[i : i + 1]


else:
    iterbytes = iter


def visit(opts, filenames, outfile):
    """Process filenames in the way specified in opts, writing output to
    outfile."""
    for f in sorted(filenames):
        isstdin = f == '-'
        if not isstdin and not os.path.lexists(f):
            outfile.write(b'%s: file not found\n' % f.encode('utf-8'))
            continue
        quiet = opts.quiet and not opts.recurse or isstdin
        isdir = os.path.isdir(f)
        islink = os.path.islink(f)
        isfile = os.path.isfile(f) and not islink
        dirfiles = None
        content = None
        facts = []
        if isfile:
            if opts.type:
                facts.append(b'file')
            if any((opts.hexdump, opts.dump, opts.md5, opts.sha1, opts.sha256)):
                with open(f, 'rb') as fobj:
                    content = fobj.read()
        elif islink:
            if opts.type:
                facts.append(b'link')
            content = os.readlink(f).encode('utf8')
        elif isstdin:
            content = getattr(sys.stdin, 'buffer', sys.stdin).read()
            if opts.size:
                facts.append(b'size=%d' % len(content))
        elif isdir:
            if opts.recurse or opts.type:
                dirfiles = glob.glob(f + '/*')
                facts.append(b'directory with %d files' % len(dirfiles))
        elif opts.type:
            facts.append(b'type unknown')
        if not isstdin:
            stat = os.lstat(f)
            if opts.size and not isdir:
                facts.append(b'size=%d' % stat.st_size)
            if opts.mode and not islink:
                facts.append(b'mode=%o' % (stat.st_mode & 0o777))
            if opts.links:
                facts.append(b'links=%d' % stat.st_nlink)
            if opts.newer:
                # mtime might be in whole seconds so newer file might be same
                if stat.st_mtime >= os.stat(opts.newer).st_mtime:
                    facts.append(
                        b'newer than %s' % opts.newer.encode('utf8', 'replace')
                    )
                else:
                    facts.append(
                        b'older than %s' % opts.newer.encode('utf8', 'replace')
                    )
        if opts.md5 and content is not None:
            h = hashlib.md5(content)
            facts.append(b'md5=%s' % binascii.hexlify(h.digest())[: opts.bytes])
        if opts.sha1 and content is not None:
            h = hashlib.sha1(content)
            facts.append(
                b'sha1=%s' % binascii.hexlify(h.digest())[: opts.bytes]
            )
        if opts.sha256 and content is not None:
            h = hashlib.sha256(content)
            facts.append(
                b'sha256=%s' % binascii.hexlify(h.digest())[: opts.bytes]
            )
        if isstdin:
            outfile.write(b', '.join(facts) + b'\n')
        elif facts:
            outfile.write(b'%s: %s\n' % (f.encode('utf-8'), b', '.join(facts)))
        elif not quiet:
            outfile.write(b'%s:\n' % f.encode('utf-8'))
        if content is not None:
            chunk = content
            if not islink:
                if opts.lines:
                    if opts.lines >= 0:
                        chunk = b''.join(chunk.splitlines(True)[: opts.lines])
                    else:
                        chunk = b''.join(chunk.splitlines(True)[opts.lines :])
                if opts.bytes:
                    if opts.bytes >= 0:
                        chunk = chunk[: opts.bytes]
                    else:
                        chunk = chunk[opts.bytes :]
            if opts.hexdump:
                for i in range(0, len(chunk), 16):
                    s = chunk[i : i + 16]
                    outfile.write(
                        b'%04x: %-47s |%s|\n'
                        % (
                            i,
                            b' '.join(b'%02x' % ord(c) for c in iterbytes(s)),
                            re.sub(b'[^ -~]', b'.', s),
                        )
                    )
            if opts.dump:
                if not quiet:
                    outfile.write(b'>>>\n')
                outfile.write(chunk)
                if not quiet:
                    if chunk.endswith(b'\n'):
                        outfile.write(b'<<<\n')
                    else:
                        outfile.write(b'\n<<< no trailing newline\n')
        if opts.recurse and dirfiles:
            assert not isstdin
            visit(opts, dirfiles, outfile)


if __name__ == "__main__":
    parser = optparse.OptionParser("%prog [options] [filenames]")
    parser.add_option(
        "-t",
        "--type",
        action="store_true",
        help="show file type (file or directory)",
    )
    parser.add_option(
        "-m", "--mode", action="store_true", help="show file mode"
    )
    parser.add_option(
        "-l", "--links", action="store_true", help="show number of links"
    )
    parser.add_option(
        "-s", "--size", action="store_true", help="show size of file"
    )
    parser.add_option(
        "-n", "--newer", action="store", help="check if file is newer (or same)"
    )
    parser.add_option(
        "-r", "--recurse", action="store_true", help="recurse into directories"
    )
    parser.add_option(
        "-S",
        "--sha1",
        action="store_true",
        help="show sha1 hash of the content",
    )
    parser.add_option(
        "",
        "--sha256",
        action="store_true",
        help="show sha256 hash of the content",
    )
    parser.add_option(
        "-M", "--md5", action="store_true", help="show md5 hash of the content"
    )
    parser.add_option(
        "-D", "--dump", action="store_true", help="dump file content"
    )
    parser.add_option(
        "-H", "--hexdump", action="store_true", help="hexdump file content"
    )
    parser.add_option(
        "-B", "--bytes", type="int", help="number of characters to dump"
    )
    parser.add_option(
        "-L", "--lines", type="int", help="number of lines to dump"
    )
    parser.add_option(
        "-q", "--quiet", action="store_true", help="no default output"
    )
    (opts, filenames) = parser.parse_args(sys.argv[1:])
    if not filenames:
        filenames = ['-']

    visit(opts, filenames, getattr(sys.stdout, 'buffer', sys.stdout))