xdiff: use int64 for hash table size
Follow-up of the previous "long" -> "int64" change. Now xdiff only uses int
for return values and small integers (ex. booleans, shifting score, bits in
hash table size, etc) so it should be able to handle large input.
Differential Revision: https://phab.mercurial-scm.org/D2765
--- a/mercurial/thirdparty/xdiff/xprepare.c Fri Mar 09 14:39:35 2018 -0800
+++ b/mercurial/thirdparty/xdiff/xprepare.c Fri Mar 09 14:47:29 2018 -0800
@@ -70,7 +70,7 @@
static int xdl_init_classifier(xdlclassifier_t *cf, int64_t size, int64_t flags) {
cf->flags = flags;
- cf->hbits = xdl_hashbits((unsigned int) size);
+ cf->hbits = xdl_hashbits(size);
cf->hsize = 1 << cf->hbits;
if (xdl_cha_init(&cf->ncha, sizeof(xdlclass_t), size / 4 + 1) < 0) {
@@ -262,7 +262,7 @@
goto abort;
{
- hbits = xdl_hashbits((unsigned int) narec);
+ hbits = xdl_hashbits(narec);
hsize = 1 << hbits;
if (!(rhash = (xrecord_t **) xdl_malloc(hsize * sizeof(xrecord_t *))))
goto abort;
--- a/mercurial/thirdparty/xdiff/xutils.c Fri Mar 09 14:39:35 2018 -0800
+++ b/mercurial/thirdparty/xdiff/xutils.c Fri Mar 09 14:47:29 2018 -0800
@@ -141,9 +141,10 @@
return ha;
}
-unsigned int xdl_hashbits(unsigned int size) {
- unsigned int val = 1, bits = 0;
+unsigned int xdl_hashbits(int64_t size) {
+ int64_t val = 1;
+ unsigned int bits = 0;
- for (; val < size && bits < CHAR_BIT * sizeof(unsigned int); val <<= 1, bits++);
+ for (; val < size && bits < (int64_t) CHAR_BIT * sizeof(unsigned int); val <<= 1, bits++);
return bits ? bits: 1;
}
--- a/mercurial/thirdparty/xdiff/xutils.h Fri Mar 09 14:39:35 2018 -0800
+++ b/mercurial/thirdparty/xdiff/xutils.h Fri Mar 09 14:47:29 2018 -0800
@@ -32,7 +32,7 @@
int64_t xdl_guess_lines(mmfile_t *mf, int64_t sample);
int xdl_recmatch(const char *l1, int64_t s1, const char *l2, int64_t s2);
uint64_t xdl_hash_record(char const **data, char const *top);
-unsigned int xdl_hashbits(unsigned int size);
+unsigned int xdl_hashbits(int64_t size);