view tests/test-encoding-func.py @ 40132:e67522413ca8

wireprotov2: define and use stream encoders Now that we have basic support for defining stream encoding, it is time to start doing something with it. We define various classes implementing stream encoders/decoders for the defined encoding profiles. This is relatively straightforward. We teach the inputstream and outputstream classes how to encode, decode, and flush data. We then teach the clientreactor how to filter received data through the inputstream decoder. One of the features of the framing format is that streams can span requests. This is a differentiating feature from say HTTP/2, which associates streams with requests. By allowing streams to span requests, we can reuse compression context data across requests/responses. But in order to do this, we need a mechanism to "flush" the encoder at logical boundaries so that receivers receive all data where it is expected. And a "flush" event is distinct from a "finish" event from the perspective of certain compressors because a "flush" will retain compression context state whereas a "finish" operation will not. This is why encoders have both a flush() and a finish() and each uses specific flushing semantics on the underlying compressor. The added tests verify various behavior of decoders via clientreactor. These tests do test some compression behavior via use of outputstream. But for all intents and purposes, server reactor support for encoding is not yet implemented. Differential Revision: https://phab.mercurial-scm.org/D4921
author Gregory Szorc <gregory.szorc@gmail.com>
date Mon, 08 Oct 2018 17:10:59 -0700
parents 3ea3c96ada54
children 2372284d9457
line wrap: on
line source

from __future__ import absolute_import

import unittest

from mercurial import (
    encoding,
)

class IsasciistrTest(unittest.TestCase):
    asciistrs = [
        b'a',
        b'ab',
        b'abc',
        b'abcd',
        b'abcde',
        b'abcdefghi',
        b'abcd\0fghi',
    ]

    def testascii(self):
        for s in self.asciistrs:
            self.assertTrue(encoding.isasciistr(s))

    def testnonasciichar(self):
        for s in self.asciistrs:
            for i in range(len(s)):
                t = bytearray(s)
                t[i] |= 0x80
                self.assertFalse(encoding.isasciistr(bytes(t)))

class LocalEncodingTest(unittest.TestCase):
    def testasciifastpath(self):
        s = b'\0' * 100
        self.assertTrue(s is encoding.tolocal(s))
        self.assertTrue(s is encoding.fromlocal(s))

class Utf8bEncodingTest(unittest.TestCase):
    def setUp(self):
        self.origencoding = encoding.encoding

    def tearDown(self):
        encoding.encoding = self.origencoding

    def testasciifastpath(self):
        s = b'\0' * 100
        self.assertTrue(s is encoding.toutf8b(s))
        self.assertTrue(s is encoding.fromutf8b(s))

    def testlossylatin(self):
        encoding.encoding = b'ascii'
        s = u'\xc0'.encode('utf-8')
        l = encoding.tolocal(s)
        self.assertEqual(l, b'?')  # lossy
        self.assertEqual(s, encoding.toutf8b(l))  # utf8 sequence preserved

    def testlosslesslatin(self):
        encoding.encoding = b'latin-1'
        s = u'\xc0'.encode('utf-8')
        l = encoding.tolocal(s)
        self.assertEqual(l, b'\xc0')  # lossless
        self.assertEqual(s, encoding.toutf8b(l))  # convert back to utf-8

    def testlossy0xed(self):
        encoding.encoding = b'euc-kr'  # U+Dxxx Hangul
        s = u'\ud1bc\xc0'.encode('utf-8')
        l = encoding.tolocal(s)
        self.assertIn(b'\xed', l)
        self.assertTrue(l.endswith(b'?'))  # lossy
        self.assertEqual(s, encoding.toutf8b(l))  # utf8 sequence preserved

    def testlossless0xed(self):
        encoding.encoding = b'euc-kr'  # U+Dxxx Hangul
        s = u'\ud1bc'.encode('utf-8')
        l = encoding.tolocal(s)
        self.assertEqual(l, b'\xc5\xed')  # lossless
        self.assertEqual(s, encoding.toutf8b(l))  # convert back to utf-8

if __name__ == '__main__':
    import silenttestrunner
    silenttestrunner.main(__name__)