diff tests/test-remotefilelog-datapack.py @ 43076:2372284d9457

formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:45:02 -0400
parents eb37d95cc486
children 37458d8b0c1e
line wrap: on
line diff
--- a/tests/test-remotefilelog-datapack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-remotefilelog-datapack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -27,6 +27,7 @@
     datapack,
 )
 
+
 class datapacktestsbase(object):
     def __init__(self, datapackreader, paramsavailable):
         self.datapackreader = datapackreader
@@ -48,8 +49,9 @@
         return hashlib.sha1(content).digest()
 
     def getFakeHash(self):
-        return b''.join(pycompat.bytechr(random.randint(0, 255))
-                        for _ in range(20))
+        return b''.join(
+            pycompat.bytechr(random.randint(0, 255)) for _ in range(20)
+        )
 
     def createPack(self, revisions=None, packdir=None):
         if revisions is None:
@@ -80,8 +82,9 @@
         revisions = [(filename, node, nullid, content)]
         pack = self.createPack(revisions)
         if self.paramsavailable:
-            self.assertEqual(pack.params.fanoutprefix,
-                             basepack.SMALLFANOUTPREFIX)
+            self.assertEqual(
+                pack.params.fanoutprefix, basepack.SMALLFANOUTPREFIX
+            )
 
         chain = pack.getdeltachain(filename, node)
         self.assertEqual(content, chain[0][4])
@@ -171,10 +174,12 @@
             filename = b'%d.txt' % i
             content = b'put-something-here \n' * i
             node = self.getHash(content)
-            meta = {constants.METAKEYFLAG: i ** 4,
-                    constants.METAKEYSIZE: len(content),
-                    b'Z': b'random_string',
-                    b'_': b'\0' * i}
+            meta = {
+                constants.METAKEYFLAG: i ** 4,
+                constants.METAKEYSIZE: len(content),
+                b'Z': b'random_string',
+                b'_': b'\0' * i,
+            }
             revisions.append((filename, node, nullid, content, meta))
         pack = self.createPack(revisions)
         for name, node, x, content, origmeta in revisions:
@@ -201,13 +206,15 @@
         missing = pack.getmissing([(b"foo", revisions[0][1])])
         self.assertFalse(missing)
 
-        missing = pack.getmissing([(b"foo", revisions[0][1]),
-                                   (b"foo", revisions[1][1])])
+        missing = pack.getmissing(
+            [(b"foo", revisions[0][1]), (b"foo", revisions[1][1])]
+        )
         self.assertFalse(missing)
 
         fakenode = self.getFakeHash()
-        missing = pack.getmissing([(b"foo", revisions[0][1]),
-                                   (b"foo", fakenode)])
+        missing = pack.getmissing(
+            [(b"foo", revisions[0][1]), (b"foo", fakenode)]
+        )
         self.assertEqual(missing, [(b"foo", fakenode)])
 
     def testAddThrows(self):
@@ -257,8 +264,9 @@
 
         pack = self.createPack(revisions)
         if self.paramsavailable:
-            self.assertEqual(pack.params.fanoutprefix,
-                             basepack.LARGEFANOUTPREFIX)
+            self.assertEqual(
+                pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX
+            )
 
         for (filename, node), content in blobs.items():
             actualcontent = pack.getdeltachain(filename, node)[0][4]
@@ -284,7 +292,7 @@
                     b'%d' % i,
                     self.getFakeHash(),
                     revision[1],
-                    self.getFakeHash()
+                    self.getFakeHash(),
                 )
 
             self.createPack(chain, packdir)
@@ -303,8 +311,7 @@
 
             mostrecentpack = next(iter(store.packs), None)
             self.assertEqual(
-                mostrecentpack.getdeltachain(revision[0], revision[1]),
-                chain
+                mostrecentpack.getdeltachain(revision[0], revision[1]), chain
             )
 
             self.assertEqual(randomchain.index(revision) + 1, len(chain))
@@ -341,6 +348,7 @@
 
             # Perf of large multi-get
             import gc
+
             gc.disable()
             pack = self.datapackreader(path)
             for lookupsize in lookupsizes:
@@ -352,10 +360,14 @@
                 start = time.time()
                 pack.getmissing(findnodes[:lookupsize])
                 elapsed = time.time() - start
-                print ("%s pack %d lookups = %0.04f" %
-                       (('%d' % packsize).rjust(7),
+                print(
+                    "%s pack %d lookups = %0.04f"
+                    % (
+                        ('%d' % packsize).rjust(7),
                         ('%d' % lookupsize).rjust(7),
-                        elapsed))
+                        elapsed,
+                    )
+                )
 
             print("")
             gc.enable()
@@ -364,11 +376,13 @@
         # so the user sees the output.
         raise RuntimeError("perf test always fails")
 
+
 class datapacktests(datapacktestsbase, unittest.TestCase):
     def __init__(self, *args, **kwargs):
         datapacktestsbase.__init__(self, datapack.datapack, True)
         unittest.TestCase.__init__(self, *args, **kwargs)
 
+
 # TODO:
 # datapack store:
 # - getmissing
@@ -376,5 +390,5 @@
 
 if __name__ == '__main__':
     if pycompat.iswindows:
-        sys.exit(80)    # Skip on Windows
+        sys.exit(80)  # Skip on Windows
     silenttestrunner.main(__name__)