comparison contrib/python-zstandard/tests/test_compressor_fuzzing.py @ 43994:de7838053207

zstandard: vendor python-zstandard 0.13.0 Version 0.13.0 of the package was just released. It contains an upgraded zstd C library which can result in some performance wins, official support for Python 3.8, and a blackened code base. There were no meaningful code or functionality changes in this release of python-zstandard: just reformatting and an upgraded zstd library version. So the diff seems much larger than what it is. Files were added without modifications. The clang-format-ignorelist file was updated to reflect a new header file in the zstd distribution. # no-check-commit because 3rd party code has different style guidelines Differential Revision: https://phab.mercurial-scm.org/D7770
author Gregory Szorc <gregory.szorc@gmail.com>
date Sat, 28 Dec 2019 09:55:45 -0800
parents 675775c33ab6
children 5e84a96d865b
comparison
equal deleted inserted replaced
43993:873d0fecb9a3 43994:de7838053207
4 4
5 try: 5 try:
6 import hypothesis 6 import hypothesis
7 import hypothesis.strategies as strategies 7 import hypothesis.strategies as strategies
8 except ImportError: 8 except ImportError:
9 raise unittest.SkipTest('hypothesis not available') 9 raise unittest.SkipTest("hypothesis not available")
10 10
11 import zstandard as zstd 11 import zstandard as zstd
12 12
13 from . common import ( 13 from .common import (
14 make_cffi, 14 make_cffi,
15 NonClosingBytesIO, 15 NonClosingBytesIO,
16 random_input_data, 16 random_input_data,
17 TestCase,
17 ) 18 )
18 19
19 20
20 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 21 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
21 @make_cffi 22 @make_cffi
22 class TestCompressor_stream_reader_fuzzing(unittest.TestCase): 23 class TestCompressor_stream_reader_fuzzing(TestCase):
23 @hypothesis.settings( 24 @hypothesis.settings(
24 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 25 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
25 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 26 )
26 level=strategies.integers(min_value=1, max_value=5), 27 @hypothesis.given(
27 source_read_size=strategies.integers(1, 16384), 28 original=strategies.sampled_from(random_input_data()),
28 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 29 level=strategies.integers(min_value=1, max_value=5),
29 def test_stream_source_read(self, original, level, source_read_size, 30 source_read_size=strategies.integers(1, 16384),
30 read_size): 31 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
32 )
33 def test_stream_source_read(self, original, level, source_read_size, read_size):
31 if read_size == 0: 34 if read_size == 0:
32 read_size = -1 35 read_size = -1
33 36
34 refctx = zstd.ZstdCompressor(level=level) 37 refctx = zstd.ZstdCompressor(level=level)
35 ref_frame = refctx.compress(original) 38 ref_frame = refctx.compress(original)
36 39
37 cctx = zstd.ZstdCompressor(level=level) 40 cctx = zstd.ZstdCompressor(level=level)
38 with cctx.stream_reader(io.BytesIO(original), size=len(original), 41 with cctx.stream_reader(
39 read_size=source_read_size) as reader: 42 io.BytesIO(original), size=len(original), read_size=source_read_size
43 ) as reader:
40 chunks = [] 44 chunks = []
41 while True: 45 while True:
42 chunk = reader.read(read_size) 46 chunk = reader.read(read_size)
43 if not chunk: 47 if not chunk:
44 break 48 break
45 49
46 chunks.append(chunk) 50 chunks.append(chunk)
47 51
48 self.assertEqual(b''.join(chunks), ref_frame) 52 self.assertEqual(b"".join(chunks), ref_frame)
49 53
50 @hypothesis.settings( 54 @hypothesis.settings(
51 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 55 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
52 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 56 )
53 level=strategies.integers(min_value=1, max_value=5), 57 @hypothesis.given(
54 source_read_size=strategies.integers(1, 16384), 58 original=strategies.sampled_from(random_input_data()),
55 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 59 level=strategies.integers(min_value=1, max_value=5),
56 def test_buffer_source_read(self, original, level, source_read_size, 60 source_read_size=strategies.integers(1, 16384),
57 read_size): 61 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
62 )
63 def test_buffer_source_read(self, original, level, source_read_size, read_size):
58 if read_size == 0: 64 if read_size == 0:
59 read_size = -1 65 read_size = -1
60 66
61 refctx = zstd.ZstdCompressor(level=level) 67 refctx = zstd.ZstdCompressor(level=level)
62 ref_frame = refctx.compress(original) 68 ref_frame = refctx.compress(original)
63 69
64 cctx = zstd.ZstdCompressor(level=level) 70 cctx = zstd.ZstdCompressor(level=level)
65 with cctx.stream_reader(original, size=len(original), 71 with cctx.stream_reader(
66 read_size=source_read_size) as reader: 72 original, size=len(original), read_size=source_read_size
73 ) as reader:
67 chunks = [] 74 chunks = []
68 while True: 75 while True:
69 chunk = reader.read(read_size) 76 chunk = reader.read(read_size)
70 if not chunk: 77 if not chunk:
71 break 78 break
72 79
73 chunks.append(chunk) 80 chunks.append(chunk)
74 81
75 self.assertEqual(b''.join(chunks), ref_frame) 82 self.assertEqual(b"".join(chunks), ref_frame)
76 83
77 @hypothesis.settings( 84 @hypothesis.settings(
78 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 85 suppress_health_check=[
79 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 86 hypothesis.HealthCheck.large_base_example,
80 level=strategies.integers(min_value=1, max_value=5), 87 hypothesis.HealthCheck.too_slow,
81 source_read_size=strategies.integers(1, 16384), 88 ]
82 read_sizes=strategies.data()) 89 )
83 def test_stream_source_read_variance(self, original, level, source_read_size, 90 @hypothesis.given(
84 read_sizes): 91 original=strategies.sampled_from(random_input_data()),
85 refctx = zstd.ZstdCompressor(level=level) 92 level=strategies.integers(min_value=1, max_value=5),
86 ref_frame = refctx.compress(original) 93 source_read_size=strategies.integers(1, 16384),
87 94 read_sizes=strategies.data(),
88 cctx = zstd.ZstdCompressor(level=level) 95 )
89 with cctx.stream_reader(io.BytesIO(original), size=len(original), 96 def test_stream_source_read_variance(
90 read_size=source_read_size) as reader: 97 self, original, level, source_read_size, read_sizes
98 ):
99 refctx = zstd.ZstdCompressor(level=level)
100 ref_frame = refctx.compress(original)
101
102 cctx = zstd.ZstdCompressor(level=level)
103 with cctx.stream_reader(
104 io.BytesIO(original), size=len(original), read_size=source_read_size
105 ) as reader:
91 chunks = [] 106 chunks = []
92 while True: 107 while True:
93 read_size = read_sizes.draw(strategies.integers(-1, 16384)) 108 read_size = read_sizes.draw(strategies.integers(-1, 16384))
94 chunk = reader.read(read_size) 109 chunk = reader.read(read_size)
95 if not chunk and read_size: 110 if not chunk and read_size:
96 break 111 break
97 112
98 chunks.append(chunk) 113 chunks.append(chunk)
99 114
100 self.assertEqual(b''.join(chunks), ref_frame) 115 self.assertEqual(b"".join(chunks), ref_frame)
101 116
102 @hypothesis.settings( 117 @hypothesis.settings(
103 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 118 suppress_health_check=[
104 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 119 hypothesis.HealthCheck.large_base_example,
105 level=strategies.integers(min_value=1, max_value=5), 120 hypothesis.HealthCheck.too_slow,
106 source_read_size=strategies.integers(1, 16384), 121 ]
107 read_sizes=strategies.data()) 122 )
108 def test_buffer_source_read_variance(self, original, level, source_read_size, 123 @hypothesis.given(
109 read_sizes): 124 original=strategies.sampled_from(random_input_data()),
110 125 level=strategies.integers(min_value=1, max_value=5),
111 refctx = zstd.ZstdCompressor(level=level) 126 source_read_size=strategies.integers(1, 16384),
112 ref_frame = refctx.compress(original) 127 read_sizes=strategies.data(),
113 128 )
114 cctx = zstd.ZstdCompressor(level=level) 129 def test_buffer_source_read_variance(
115 with cctx.stream_reader(original, size=len(original), 130 self, original, level, source_read_size, read_sizes
116 read_size=source_read_size) as reader: 131 ):
132
133 refctx = zstd.ZstdCompressor(level=level)
134 ref_frame = refctx.compress(original)
135
136 cctx = zstd.ZstdCompressor(level=level)
137 with cctx.stream_reader(
138 original, size=len(original), read_size=source_read_size
139 ) as reader:
117 chunks = [] 140 chunks = []
118 while True: 141 while True:
119 read_size = read_sizes.draw(strategies.integers(-1, 16384)) 142 read_size = read_sizes.draw(strategies.integers(-1, 16384))
120 chunk = reader.read(read_size) 143 chunk = reader.read(read_size)
121 if not chunk and read_size: 144 if not chunk and read_size:
122 break 145 break
123 146
124 chunks.append(chunk) 147 chunks.append(chunk)
125 148
126 self.assertEqual(b''.join(chunks), ref_frame) 149 self.assertEqual(b"".join(chunks), ref_frame)
127 150
128 @hypothesis.settings( 151 @hypothesis.settings(
129 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 152 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
130 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 153 )
131 level=strategies.integers(min_value=1, max_value=5), 154 @hypothesis.given(
132 source_read_size=strategies.integers(1, 16384), 155 original=strategies.sampled_from(random_input_data()),
133 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 156 level=strategies.integers(min_value=1, max_value=5),
134 def test_stream_source_readinto(self, original, level, 157 source_read_size=strategies.integers(1, 16384),
135 source_read_size, read_size): 158 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
136 refctx = zstd.ZstdCompressor(level=level) 159 )
137 ref_frame = refctx.compress(original) 160 def test_stream_source_readinto(self, original, level, source_read_size, read_size):
138 161 refctx = zstd.ZstdCompressor(level=level)
139 cctx = zstd.ZstdCompressor(level=level) 162 ref_frame = refctx.compress(original)
140 with cctx.stream_reader(io.BytesIO(original), size=len(original), 163
141 read_size=source_read_size) as reader: 164 cctx = zstd.ZstdCompressor(level=level)
165 with cctx.stream_reader(
166 io.BytesIO(original), size=len(original), read_size=source_read_size
167 ) as reader:
142 chunks = [] 168 chunks = []
143 while True: 169 while True:
144 b = bytearray(read_size) 170 b = bytearray(read_size)
145 count = reader.readinto(b) 171 count = reader.readinto(b)
146 172
147 if not count: 173 if not count:
148 break 174 break
149 175
150 chunks.append(bytes(b[0:count])) 176 chunks.append(bytes(b[0:count]))
151 177
152 self.assertEqual(b''.join(chunks), ref_frame) 178 self.assertEqual(b"".join(chunks), ref_frame)
153 179
154 @hypothesis.settings( 180 @hypothesis.settings(
155 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 181 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
156 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 182 )
157 level=strategies.integers(min_value=1, max_value=5), 183 @hypothesis.given(
158 source_read_size=strategies.integers(1, 16384), 184 original=strategies.sampled_from(random_input_data()),
159 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 185 level=strategies.integers(min_value=1, max_value=5),
160 def test_buffer_source_readinto(self, original, level, 186 source_read_size=strategies.integers(1, 16384),
161 source_read_size, read_size): 187 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
162 188 )
163 refctx = zstd.ZstdCompressor(level=level) 189 def test_buffer_source_readinto(self, original, level, source_read_size, read_size):
164 ref_frame = refctx.compress(original) 190
165 191 refctx = zstd.ZstdCompressor(level=level)
166 cctx = zstd.ZstdCompressor(level=level) 192 ref_frame = refctx.compress(original)
167 with cctx.stream_reader(original, size=len(original), 193
168 read_size=source_read_size) as reader: 194 cctx = zstd.ZstdCompressor(level=level)
195 with cctx.stream_reader(
196 original, size=len(original), read_size=source_read_size
197 ) as reader:
169 chunks = [] 198 chunks = []
170 while True: 199 while True:
171 b = bytearray(read_size) 200 b = bytearray(read_size)
172 count = reader.readinto(b) 201 count = reader.readinto(b)
173 202
174 if not count: 203 if not count:
175 break 204 break
176 205
177 chunks.append(bytes(b[0:count])) 206 chunks.append(bytes(b[0:count]))
178 207
179 self.assertEqual(b''.join(chunks), ref_frame) 208 self.assertEqual(b"".join(chunks), ref_frame)
180 209
181 @hypothesis.settings( 210 @hypothesis.settings(
182 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 211 suppress_health_check=[
183 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 212 hypothesis.HealthCheck.large_base_example,
184 level=strategies.integers(min_value=1, max_value=5), 213 hypothesis.HealthCheck.too_slow,
185 source_read_size=strategies.integers(1, 16384), 214 ]
186 read_sizes=strategies.data()) 215 )
187 def test_stream_source_readinto_variance(self, original, level, 216 @hypothesis.given(
188 source_read_size, read_sizes): 217 original=strategies.sampled_from(random_input_data()),
189 refctx = zstd.ZstdCompressor(level=level) 218 level=strategies.integers(min_value=1, max_value=5),
190 ref_frame = refctx.compress(original) 219 source_read_size=strategies.integers(1, 16384),
191 220 read_sizes=strategies.data(),
192 cctx = zstd.ZstdCompressor(level=level) 221 )
193 with cctx.stream_reader(io.BytesIO(original), size=len(original), 222 def test_stream_source_readinto_variance(
194 read_size=source_read_size) as reader: 223 self, original, level, source_read_size, read_sizes
224 ):
225 refctx = zstd.ZstdCompressor(level=level)
226 ref_frame = refctx.compress(original)
227
228 cctx = zstd.ZstdCompressor(level=level)
229 with cctx.stream_reader(
230 io.BytesIO(original), size=len(original), read_size=source_read_size
231 ) as reader:
195 chunks = [] 232 chunks = []
196 while True: 233 while True:
197 read_size = read_sizes.draw(strategies.integers(1, 16384)) 234 read_size = read_sizes.draw(strategies.integers(1, 16384))
198 b = bytearray(read_size) 235 b = bytearray(read_size)
199 count = reader.readinto(b) 236 count = reader.readinto(b)
201 if not count: 238 if not count:
202 break 239 break
203 240
204 chunks.append(bytes(b[0:count])) 241 chunks.append(bytes(b[0:count]))
205 242
206 self.assertEqual(b''.join(chunks), ref_frame) 243 self.assertEqual(b"".join(chunks), ref_frame)
207 244
208 @hypothesis.settings( 245 @hypothesis.settings(
209 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 246 suppress_health_check=[
210 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 247 hypothesis.HealthCheck.large_base_example,
211 level=strategies.integers(min_value=1, max_value=5), 248 hypothesis.HealthCheck.too_slow,
212 source_read_size=strategies.integers(1, 16384), 249 ]
213 read_sizes=strategies.data()) 250 )
214 def test_buffer_source_readinto_variance(self, original, level, 251 @hypothesis.given(
215 source_read_size, read_sizes): 252 original=strategies.sampled_from(random_input_data()),
216 253 level=strategies.integers(min_value=1, max_value=5),
217 refctx = zstd.ZstdCompressor(level=level) 254 source_read_size=strategies.integers(1, 16384),
218 ref_frame = refctx.compress(original) 255 read_sizes=strategies.data(),
219 256 )
220 cctx = zstd.ZstdCompressor(level=level) 257 def test_buffer_source_readinto_variance(
221 with cctx.stream_reader(original, size=len(original), 258 self, original, level, source_read_size, read_sizes
222 read_size=source_read_size) as reader: 259 ):
260
261 refctx = zstd.ZstdCompressor(level=level)
262 ref_frame = refctx.compress(original)
263
264 cctx = zstd.ZstdCompressor(level=level)
265 with cctx.stream_reader(
266 original, size=len(original), read_size=source_read_size
267 ) as reader:
223 chunks = [] 268 chunks = []
224 while True: 269 while True:
225 read_size = read_sizes.draw(strategies.integers(1, 16384)) 270 read_size = read_sizes.draw(strategies.integers(1, 16384))
226 b = bytearray(read_size) 271 b = bytearray(read_size)
227 count = reader.readinto(b) 272 count = reader.readinto(b)
229 if not count: 274 if not count:
230 break 275 break
231 276
232 chunks.append(bytes(b[0:count])) 277 chunks.append(bytes(b[0:count]))
233 278
234 self.assertEqual(b''.join(chunks), ref_frame) 279 self.assertEqual(b"".join(chunks), ref_frame)
235 280
236 @hypothesis.settings( 281 @hypothesis.settings(
237 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 282 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
238 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 283 )
239 level=strategies.integers(min_value=1, max_value=5), 284 @hypothesis.given(
240 source_read_size=strategies.integers(1, 16384), 285 original=strategies.sampled_from(random_input_data()),
241 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 286 level=strategies.integers(min_value=1, max_value=5),
242 def test_stream_source_read1(self, original, level, source_read_size, 287 source_read_size=strategies.integers(1, 16384),
243 read_size): 288 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
289 )
290 def test_stream_source_read1(self, original, level, source_read_size, read_size):
244 if read_size == 0: 291 if read_size == 0:
245 read_size = -1 292 read_size = -1
246 293
247 refctx = zstd.ZstdCompressor(level=level) 294 refctx = zstd.ZstdCompressor(level=level)
248 ref_frame = refctx.compress(original) 295 ref_frame = refctx.compress(original)
249 296
250 cctx = zstd.ZstdCompressor(level=level) 297 cctx = zstd.ZstdCompressor(level=level)
251 with cctx.stream_reader(io.BytesIO(original), size=len(original), 298 with cctx.stream_reader(
252 read_size=source_read_size) as reader: 299 io.BytesIO(original), size=len(original), read_size=source_read_size
300 ) as reader:
253 chunks = [] 301 chunks = []
254 while True: 302 while True:
255 chunk = reader.read1(read_size) 303 chunk = reader.read1(read_size)
256 if not chunk: 304 if not chunk:
257 break 305 break
258 306
259 chunks.append(chunk) 307 chunks.append(chunk)
260 308
261 self.assertEqual(b''.join(chunks), ref_frame) 309 self.assertEqual(b"".join(chunks), ref_frame)
262 310
263 @hypothesis.settings( 311 @hypothesis.settings(
264 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 312 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
265 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 313 )
266 level=strategies.integers(min_value=1, max_value=5), 314 @hypothesis.given(
267 source_read_size=strategies.integers(1, 16384), 315 original=strategies.sampled_from(random_input_data()),
268 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 316 level=strategies.integers(min_value=1, max_value=5),
269 def test_buffer_source_read1(self, original, level, source_read_size, 317 source_read_size=strategies.integers(1, 16384),
270 read_size): 318 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
319 )
320 def test_buffer_source_read1(self, original, level, source_read_size, read_size):
271 if read_size == 0: 321 if read_size == 0:
272 read_size = -1 322 read_size = -1
273 323
274 refctx = zstd.ZstdCompressor(level=level) 324 refctx = zstd.ZstdCompressor(level=level)
275 ref_frame = refctx.compress(original) 325 ref_frame = refctx.compress(original)
276 326
277 cctx = zstd.ZstdCompressor(level=level) 327 cctx = zstd.ZstdCompressor(level=level)
278 with cctx.stream_reader(original, size=len(original), 328 with cctx.stream_reader(
279 read_size=source_read_size) as reader: 329 original, size=len(original), read_size=source_read_size
330 ) as reader:
280 chunks = [] 331 chunks = []
281 while True: 332 while True:
282 chunk = reader.read1(read_size) 333 chunk = reader.read1(read_size)
283 if not chunk: 334 if not chunk:
284 break 335 break
285 336
286 chunks.append(chunk) 337 chunks.append(chunk)
287 338
288 self.assertEqual(b''.join(chunks), ref_frame) 339 self.assertEqual(b"".join(chunks), ref_frame)
289 340
290 @hypothesis.settings( 341 @hypothesis.settings(
291 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 342 suppress_health_check=[
292 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 343 hypothesis.HealthCheck.large_base_example,
293 level=strategies.integers(min_value=1, max_value=5), 344 hypothesis.HealthCheck.too_slow,
294 source_read_size=strategies.integers(1, 16384), 345 ]
295 read_sizes=strategies.data()) 346 )
296 def test_stream_source_read1_variance(self, original, level, source_read_size, 347 @hypothesis.given(
297 read_sizes): 348 original=strategies.sampled_from(random_input_data()),
298 refctx = zstd.ZstdCompressor(level=level) 349 level=strategies.integers(min_value=1, max_value=5),
299 ref_frame = refctx.compress(original) 350 source_read_size=strategies.integers(1, 16384),
300 351 read_sizes=strategies.data(),
301 cctx = zstd.ZstdCompressor(level=level) 352 )
302 with cctx.stream_reader(io.BytesIO(original), size=len(original), 353 def test_stream_source_read1_variance(
303 read_size=source_read_size) as reader: 354 self, original, level, source_read_size, read_sizes
355 ):
356 refctx = zstd.ZstdCompressor(level=level)
357 ref_frame = refctx.compress(original)
358
359 cctx = zstd.ZstdCompressor(level=level)
360 with cctx.stream_reader(
361 io.BytesIO(original), size=len(original), read_size=source_read_size
362 ) as reader:
304 chunks = [] 363 chunks = []
305 while True: 364 while True:
306 read_size = read_sizes.draw(strategies.integers(-1, 16384)) 365 read_size = read_sizes.draw(strategies.integers(-1, 16384))
307 chunk = reader.read1(read_size) 366 chunk = reader.read1(read_size)
308 if not chunk and read_size: 367 if not chunk and read_size:
309 break 368 break
310 369
311 chunks.append(chunk) 370 chunks.append(chunk)
312 371
313 self.assertEqual(b''.join(chunks), ref_frame) 372 self.assertEqual(b"".join(chunks), ref_frame)
314 373
315 @hypothesis.settings( 374 @hypothesis.settings(
316 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 375 suppress_health_check=[
317 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 376 hypothesis.HealthCheck.large_base_example,
318 level=strategies.integers(min_value=1, max_value=5), 377 hypothesis.HealthCheck.too_slow,
319 source_read_size=strategies.integers(1, 16384), 378 ]
320 read_sizes=strategies.data()) 379 )
321 def test_buffer_source_read1_variance(self, original, level, source_read_size, 380 @hypothesis.given(
322 read_sizes): 381 original=strategies.sampled_from(random_input_data()),
323 382 level=strategies.integers(min_value=1, max_value=5),
324 refctx = zstd.ZstdCompressor(level=level) 383 source_read_size=strategies.integers(1, 16384),
325 ref_frame = refctx.compress(original) 384 read_sizes=strategies.data(),
326 385 )
327 cctx = zstd.ZstdCompressor(level=level) 386 def test_buffer_source_read1_variance(
328 with cctx.stream_reader(original, size=len(original), 387 self, original, level, source_read_size, read_sizes
329 read_size=source_read_size) as reader: 388 ):
389
390 refctx = zstd.ZstdCompressor(level=level)
391 ref_frame = refctx.compress(original)
392
393 cctx = zstd.ZstdCompressor(level=level)
394 with cctx.stream_reader(
395 original, size=len(original), read_size=source_read_size
396 ) as reader:
330 chunks = [] 397 chunks = []
331 while True: 398 while True:
332 read_size = read_sizes.draw(strategies.integers(-1, 16384)) 399 read_size = read_sizes.draw(strategies.integers(-1, 16384))
333 chunk = reader.read1(read_size) 400 chunk = reader.read1(read_size)
334 if not chunk and read_size: 401 if not chunk and read_size:
335 break 402 break
336 403
337 chunks.append(chunk) 404 chunks.append(chunk)
338 405
339 self.assertEqual(b''.join(chunks), ref_frame) 406 self.assertEqual(b"".join(chunks), ref_frame)
340 407
341 408 @hypothesis.settings(
342 @hypothesis.settings( 409 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
343 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 410 )
344 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 411 @hypothesis.given(
345 level=strategies.integers(min_value=1, max_value=5), 412 original=strategies.sampled_from(random_input_data()),
346 source_read_size=strategies.integers(1, 16384), 413 level=strategies.integers(min_value=1, max_value=5),
347 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 414 source_read_size=strategies.integers(1, 16384),
348 def test_stream_source_readinto1(self, original, level, source_read_size, 415 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
349 read_size): 416 )
417 def test_stream_source_readinto1(
418 self, original, level, source_read_size, read_size
419 ):
350 if read_size == 0: 420 if read_size == 0:
351 read_size = -1 421 read_size = -1
352 422
353 refctx = zstd.ZstdCompressor(level=level) 423 refctx = zstd.ZstdCompressor(level=level)
354 ref_frame = refctx.compress(original) 424 ref_frame = refctx.compress(original)
355 425
356 cctx = zstd.ZstdCompressor(level=level) 426 cctx = zstd.ZstdCompressor(level=level)
357 with cctx.stream_reader(io.BytesIO(original), size=len(original), 427 with cctx.stream_reader(
358 read_size=source_read_size) as reader: 428 io.BytesIO(original), size=len(original), read_size=source_read_size
429 ) as reader:
359 chunks = [] 430 chunks = []
360 while True: 431 while True:
361 b = bytearray(read_size) 432 b = bytearray(read_size)
362 count = reader.readinto1(b) 433 count = reader.readinto1(b)
363 434
364 if not count: 435 if not count:
365 break 436 break
366 437
367 chunks.append(bytes(b[0:count])) 438 chunks.append(bytes(b[0:count]))
368 439
369 self.assertEqual(b''.join(chunks), ref_frame) 440 self.assertEqual(b"".join(chunks), ref_frame)
370 441
371 @hypothesis.settings( 442 @hypothesis.settings(
372 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 443 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
373 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 444 )
374 level=strategies.integers(min_value=1, max_value=5), 445 @hypothesis.given(
375 source_read_size=strategies.integers(1, 16384), 446 original=strategies.sampled_from(random_input_data()),
376 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE)) 447 level=strategies.integers(min_value=1, max_value=5),
377 def test_buffer_source_readinto1(self, original, level, source_read_size, 448 source_read_size=strategies.integers(1, 16384),
378 read_size): 449 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
450 )
451 def test_buffer_source_readinto1(
452 self, original, level, source_read_size, read_size
453 ):
379 if read_size == 0: 454 if read_size == 0:
380 read_size = -1 455 read_size = -1
381 456
382 refctx = zstd.ZstdCompressor(level=level) 457 refctx = zstd.ZstdCompressor(level=level)
383 ref_frame = refctx.compress(original) 458 ref_frame = refctx.compress(original)
384 459
385 cctx = zstd.ZstdCompressor(level=level) 460 cctx = zstd.ZstdCompressor(level=level)
386 with cctx.stream_reader(original, size=len(original), 461 with cctx.stream_reader(
387 read_size=source_read_size) as reader: 462 original, size=len(original), read_size=source_read_size
463 ) as reader:
388 chunks = [] 464 chunks = []
389 while True: 465 while True:
390 b = bytearray(read_size) 466 b = bytearray(read_size)
391 count = reader.readinto1(b) 467 count = reader.readinto1(b)
392 468
393 if not count: 469 if not count:
394 break 470 break
395 471
396 chunks.append(bytes(b[0:count])) 472 chunks.append(bytes(b[0:count]))
397 473
398 self.assertEqual(b''.join(chunks), ref_frame) 474 self.assertEqual(b"".join(chunks), ref_frame)
399 475
400 @hypothesis.settings( 476 @hypothesis.settings(
401 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 477 suppress_health_check=[
402 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 478 hypothesis.HealthCheck.large_base_example,
403 level=strategies.integers(min_value=1, max_value=5), 479 hypothesis.HealthCheck.too_slow,
404 source_read_size=strategies.integers(1, 16384), 480 ]
405 read_sizes=strategies.data()) 481 )
406 def test_stream_source_readinto1_variance(self, original, level, source_read_size, 482 @hypothesis.given(
407 read_sizes): 483 original=strategies.sampled_from(random_input_data()),
408 refctx = zstd.ZstdCompressor(level=level) 484 level=strategies.integers(min_value=1, max_value=5),
409 ref_frame = refctx.compress(original) 485 source_read_size=strategies.integers(1, 16384),
410 486 read_sizes=strategies.data(),
411 cctx = zstd.ZstdCompressor(level=level) 487 )
412 with cctx.stream_reader(io.BytesIO(original), size=len(original), 488 def test_stream_source_readinto1_variance(
413 read_size=source_read_size) as reader: 489 self, original, level, source_read_size, read_sizes
490 ):
491 refctx = zstd.ZstdCompressor(level=level)
492 ref_frame = refctx.compress(original)
493
494 cctx = zstd.ZstdCompressor(level=level)
495 with cctx.stream_reader(
496 io.BytesIO(original), size=len(original), read_size=source_read_size
497 ) as reader:
414 chunks = [] 498 chunks = []
415 while True: 499 while True:
416 read_size = read_sizes.draw(strategies.integers(1, 16384)) 500 read_size = read_sizes.draw(strategies.integers(1, 16384))
417 b = bytearray(read_size) 501 b = bytearray(read_size)
418 count = reader.readinto1(b) 502 count = reader.readinto1(b)
420 if not count: 504 if not count:
421 break 505 break
422 506
423 chunks.append(bytes(b[0:count])) 507 chunks.append(bytes(b[0:count]))
424 508
425 self.assertEqual(b''.join(chunks), ref_frame) 509 self.assertEqual(b"".join(chunks), ref_frame)
426 510
427 @hypothesis.settings( 511 @hypothesis.settings(
428 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 512 suppress_health_check=[
429 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 513 hypothesis.HealthCheck.large_base_example,
430 level=strategies.integers(min_value=1, max_value=5), 514 hypothesis.HealthCheck.too_slow,
431 source_read_size=strategies.integers(1, 16384), 515 ]
432 read_sizes=strategies.data()) 516 )
433 def test_buffer_source_readinto1_variance(self, original, level, source_read_size, 517 @hypothesis.given(
434 read_sizes): 518 original=strategies.sampled_from(random_input_data()),
435 519 level=strategies.integers(min_value=1, max_value=5),
436 refctx = zstd.ZstdCompressor(level=level) 520 source_read_size=strategies.integers(1, 16384),
437 ref_frame = refctx.compress(original) 521 read_sizes=strategies.data(),
438 522 )
439 cctx = zstd.ZstdCompressor(level=level) 523 def test_buffer_source_readinto1_variance(
440 with cctx.stream_reader(original, size=len(original), 524 self, original, level, source_read_size, read_sizes
441 read_size=source_read_size) as reader: 525 ):
526
527 refctx = zstd.ZstdCompressor(level=level)
528 ref_frame = refctx.compress(original)
529
530 cctx = zstd.ZstdCompressor(level=level)
531 with cctx.stream_reader(
532 original, size=len(original), read_size=source_read_size
533 ) as reader:
442 chunks = [] 534 chunks = []
443 while True: 535 while True:
444 read_size = read_sizes.draw(strategies.integers(1, 16384)) 536 read_size = read_sizes.draw(strategies.integers(1, 16384))
445 b = bytearray(read_size) 537 b = bytearray(read_size)
446 count = reader.readinto1(b) 538 count = reader.readinto1(b)
448 if not count: 540 if not count:
449 break 541 break
450 542
451 chunks.append(bytes(b[0:count])) 543 chunks.append(bytes(b[0:count]))
452 544
453 self.assertEqual(b''.join(chunks), ref_frame) 545 self.assertEqual(b"".join(chunks), ref_frame)
454 546
455 547
456 548 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
457 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
458 @make_cffi 549 @make_cffi
459 class TestCompressor_stream_writer_fuzzing(unittest.TestCase): 550 class TestCompressor_stream_writer_fuzzing(TestCase):
460 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 551 @hypothesis.given(
461 level=strategies.integers(min_value=1, max_value=5), 552 original=strategies.sampled_from(random_input_data()),
462 write_size=strategies.integers(min_value=1, max_value=1048576)) 553 level=strategies.integers(min_value=1, max_value=5),
554 write_size=strategies.integers(min_value=1, max_value=1048576),
555 )
463 def test_write_size_variance(self, original, level, write_size): 556 def test_write_size_variance(self, original, level, write_size):
464 refctx = zstd.ZstdCompressor(level=level) 557 refctx = zstd.ZstdCompressor(level=level)
465 ref_frame = refctx.compress(original) 558 ref_frame = refctx.compress(original)
466 559
467 cctx = zstd.ZstdCompressor(level=level) 560 cctx = zstd.ZstdCompressor(level=level)
468 b = NonClosingBytesIO() 561 b = NonClosingBytesIO()
469 with cctx.stream_writer(b, size=len(original), write_size=write_size) as compressor: 562 with cctx.stream_writer(
563 b, size=len(original), write_size=write_size
564 ) as compressor:
470 compressor.write(original) 565 compressor.write(original)
471 566
472 self.assertEqual(b.getvalue(), ref_frame) 567 self.assertEqual(b.getvalue(), ref_frame)
473 568
474 569
475 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 570 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
476 @make_cffi 571 @make_cffi
477 class TestCompressor_copy_stream_fuzzing(unittest.TestCase): 572 class TestCompressor_copy_stream_fuzzing(TestCase):
478 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 573 @hypothesis.given(
479 level=strategies.integers(min_value=1, max_value=5), 574 original=strategies.sampled_from(random_input_data()),
480 read_size=strategies.integers(min_value=1, max_value=1048576), 575 level=strategies.integers(min_value=1, max_value=5),
481 write_size=strategies.integers(min_value=1, max_value=1048576)) 576 read_size=strategies.integers(min_value=1, max_value=1048576),
577 write_size=strategies.integers(min_value=1, max_value=1048576),
578 )
482 def test_read_write_size_variance(self, original, level, read_size, write_size): 579 def test_read_write_size_variance(self, original, level, read_size, write_size):
483 refctx = zstd.ZstdCompressor(level=level) 580 refctx = zstd.ZstdCompressor(level=level)
484 ref_frame = refctx.compress(original) 581 ref_frame = refctx.compress(original)
485 582
486 cctx = zstd.ZstdCompressor(level=level) 583 cctx = zstd.ZstdCompressor(level=level)
487 source = io.BytesIO(original) 584 source = io.BytesIO(original)
488 dest = io.BytesIO() 585 dest = io.BytesIO()
489 586
490 cctx.copy_stream(source, dest, size=len(original), read_size=read_size, 587 cctx.copy_stream(
491 write_size=write_size) 588 source, dest, size=len(original), read_size=read_size, write_size=write_size
589 )
492 590
493 self.assertEqual(dest.getvalue(), ref_frame) 591 self.assertEqual(dest.getvalue(), ref_frame)
494 592
495 593
496 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 594 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
497 @make_cffi 595 @make_cffi
498 class TestCompressor_compressobj_fuzzing(unittest.TestCase): 596 class TestCompressor_compressobj_fuzzing(TestCase):
499 @hypothesis.settings( 597 @hypothesis.settings(
500 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 598 suppress_health_check=[
501 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 599 hypothesis.HealthCheck.large_base_example,
502 level=strategies.integers(min_value=1, max_value=5), 600 hypothesis.HealthCheck.too_slow,
503 chunk_sizes=strategies.data()) 601 ]
602 )
603 @hypothesis.given(
604 original=strategies.sampled_from(random_input_data()),
605 level=strategies.integers(min_value=1, max_value=5),
606 chunk_sizes=strategies.data(),
607 )
504 def test_random_input_sizes(self, original, level, chunk_sizes): 608 def test_random_input_sizes(self, original, level, chunk_sizes):
505 refctx = zstd.ZstdCompressor(level=level) 609 refctx = zstd.ZstdCompressor(level=level)
506 ref_frame = refctx.compress(original) 610 ref_frame = refctx.compress(original)
507 611
508 cctx = zstd.ZstdCompressor(level=level) 612 cctx = zstd.ZstdCompressor(level=level)
510 614
511 chunks = [] 615 chunks = []
512 i = 0 616 i = 0
513 while True: 617 while True:
514 chunk_size = chunk_sizes.draw(strategies.integers(1, 4096)) 618 chunk_size = chunk_sizes.draw(strategies.integers(1, 4096))
515 source = original[i:i + chunk_size] 619 source = original[i : i + chunk_size]
516 if not source: 620 if not source:
517 break 621 break
518 622
519 chunks.append(cobj.compress(source)) 623 chunks.append(cobj.compress(source))
520 i += chunk_size 624 i += chunk_size
521 625
522 chunks.append(cobj.flush()) 626 chunks.append(cobj.flush())
523 627
524 self.assertEqual(b''.join(chunks), ref_frame) 628 self.assertEqual(b"".join(chunks), ref_frame)
525 629
526 @hypothesis.settings( 630 @hypothesis.settings(
527 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 631 suppress_health_check=[
528 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 632 hypothesis.HealthCheck.large_base_example,
529 level=strategies.integers(min_value=1, max_value=5), 633 hypothesis.HealthCheck.too_slow,
530 chunk_sizes=strategies.data(), 634 ]
531 flushes=strategies.data()) 635 )
636 @hypothesis.given(
637 original=strategies.sampled_from(random_input_data()),
638 level=strategies.integers(min_value=1, max_value=5),
639 chunk_sizes=strategies.data(),
640 flushes=strategies.data(),
641 )
532 def test_flush_block(self, original, level, chunk_sizes, flushes): 642 def test_flush_block(self, original, level, chunk_sizes, flushes):
533 cctx = zstd.ZstdCompressor(level=level) 643 cctx = zstd.ZstdCompressor(level=level)
534 cobj = cctx.compressobj() 644 cobj = cctx.compressobj()
535 645
536 dctx = zstd.ZstdDecompressor() 646 dctx = zstd.ZstdDecompressor()
539 compressed_chunks = [] 649 compressed_chunks = []
540 decompressed_chunks = [] 650 decompressed_chunks = []
541 i = 0 651 i = 0
542 while True: 652 while True:
543 input_size = chunk_sizes.draw(strategies.integers(1, 4096)) 653 input_size = chunk_sizes.draw(strategies.integers(1, 4096))
544 source = original[i:i + input_size] 654 source = original[i : i + input_size]
545 if not source: 655 if not source:
546 break 656 break
547 657
548 i += input_size 658 i += input_size
549 659
556 666
557 chunk = cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK) 667 chunk = cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK)
558 compressed_chunks.append(chunk) 668 compressed_chunks.append(chunk)
559 decompressed_chunks.append(dobj.decompress(chunk)) 669 decompressed_chunks.append(dobj.decompress(chunk))
560 670
561 self.assertEqual(b''.join(decompressed_chunks), original[0:i]) 671 self.assertEqual(b"".join(decompressed_chunks), original[0:i])
562 672
563 chunk = cobj.flush(zstd.COMPRESSOBJ_FLUSH_FINISH) 673 chunk = cobj.flush(zstd.COMPRESSOBJ_FLUSH_FINISH)
564 compressed_chunks.append(chunk) 674 compressed_chunks.append(chunk)
565 decompressed_chunks.append(dobj.decompress(chunk)) 675 decompressed_chunks.append(dobj.decompress(chunk))
566 676
567 self.assertEqual(dctx.decompress(b''.join(compressed_chunks), 677 self.assertEqual(
568 max_output_size=len(original)), 678 dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)),
569 original) 679 original,
570 self.assertEqual(b''.join(decompressed_chunks), original) 680 )
571 681 self.assertEqual(b"".join(decompressed_chunks), original)
572 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 682
683
684 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
573 @make_cffi 685 @make_cffi
574 class TestCompressor_read_to_iter_fuzzing(unittest.TestCase): 686 class TestCompressor_read_to_iter_fuzzing(TestCase):
575 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 687 @hypothesis.given(
576 level=strategies.integers(min_value=1, max_value=5), 688 original=strategies.sampled_from(random_input_data()),
577 read_size=strategies.integers(min_value=1, max_value=4096), 689 level=strategies.integers(min_value=1, max_value=5),
578 write_size=strategies.integers(min_value=1, max_value=4096)) 690 read_size=strategies.integers(min_value=1, max_value=4096),
691 write_size=strategies.integers(min_value=1, max_value=4096),
692 )
579 def test_read_write_size_variance(self, original, level, read_size, write_size): 693 def test_read_write_size_variance(self, original, level, read_size, write_size):
580 refcctx = zstd.ZstdCompressor(level=level) 694 refcctx = zstd.ZstdCompressor(level=level)
581 ref_frame = refcctx.compress(original) 695 ref_frame = refcctx.compress(original)
582 696
583 source = io.BytesIO(original) 697 source = io.BytesIO(original)
584 698
585 cctx = zstd.ZstdCompressor(level=level) 699 cctx = zstd.ZstdCompressor(level=level)
586 chunks = list(cctx.read_to_iter(source, size=len(original), 700 chunks = list(
587 read_size=read_size, 701 cctx.read_to_iter(
588 write_size=write_size)) 702 source, size=len(original), read_size=read_size, write_size=write_size
589 703 )
590 self.assertEqual(b''.join(chunks), ref_frame) 704 )
591 705
592 706 self.assertEqual(b"".join(chunks), ref_frame)
593 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 707
594 class TestCompressor_multi_compress_to_buffer_fuzzing(unittest.TestCase): 708
595 @hypothesis.given(original=strategies.lists(strategies.sampled_from(random_input_data()), 709 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
596 min_size=1, max_size=1024), 710 class TestCompressor_multi_compress_to_buffer_fuzzing(TestCase):
597 threads=strategies.integers(min_value=1, max_value=8), 711 @hypothesis.given(
598 use_dict=strategies.booleans()) 712 original=strategies.lists(
713 strategies.sampled_from(random_input_data()), min_size=1, max_size=1024
714 ),
715 threads=strategies.integers(min_value=1, max_value=8),
716 use_dict=strategies.booleans(),
717 )
599 def test_data_equivalence(self, original, threads, use_dict): 718 def test_data_equivalence(self, original, threads, use_dict):
600 kwargs = {} 719 kwargs = {}
601 720
602 # Use a content dictionary because it is cheap to create. 721 # Use a content dictionary because it is cheap to create.
603 if use_dict: 722 if use_dict:
604 kwargs['dict_data'] = zstd.ZstdCompressionDict(original[0]) 723 kwargs["dict_data"] = zstd.ZstdCompressionDict(original[0])
605 724
606 cctx = zstd.ZstdCompressor(level=1, 725 cctx = zstd.ZstdCompressor(level=1, write_checksum=True, **kwargs)
607 write_checksum=True, 726
608 **kwargs) 727 if not hasattr(cctx, "multi_compress_to_buffer"):
609 728 self.skipTest("multi_compress_to_buffer not available")
610 if not hasattr(cctx, 'multi_compress_to_buffer'):
611 self.skipTest('multi_compress_to_buffer not available')
612 729
613 result = cctx.multi_compress_to_buffer(original, threads=-1) 730 result = cctx.multi_compress_to_buffer(original, threads=-1)
614 731
615 self.assertEqual(len(result), len(original)) 732 self.assertEqual(len(result), len(original))
616 733
622 739
623 for i, frame in enumerate(result): 740 for i, frame in enumerate(result):
624 self.assertEqual(dctx.decompress(frame), original[i]) 741 self.assertEqual(dctx.decompress(frame), original[i])
625 742
626 743
627 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 744 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
628 @make_cffi 745 @make_cffi
629 class TestCompressor_chunker_fuzzing(unittest.TestCase): 746 class TestCompressor_chunker_fuzzing(TestCase):
630 @hypothesis.settings( 747 @hypothesis.settings(
631 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 748 suppress_health_check=[
632 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 749 hypothesis.HealthCheck.large_base_example,
633 level=strategies.integers(min_value=1, max_value=5), 750 hypothesis.HealthCheck.too_slow,
634 chunk_size=strategies.integers( 751 ]
635 min_value=1, 752 )
636 max_value=32 * 1048576), 753 @hypothesis.given(
637 input_sizes=strategies.data()) 754 original=strategies.sampled_from(random_input_data()),
755 level=strategies.integers(min_value=1, max_value=5),
756 chunk_size=strategies.integers(min_value=1, max_value=32 * 1048576),
757 input_sizes=strategies.data(),
758 )
638 def test_random_input_sizes(self, original, level, chunk_size, input_sizes): 759 def test_random_input_sizes(self, original, level, chunk_size, input_sizes):
639 cctx = zstd.ZstdCompressor(level=level) 760 cctx = zstd.ZstdCompressor(level=level)
640 chunker = cctx.chunker(chunk_size=chunk_size) 761 chunker = cctx.chunker(chunk_size=chunk_size)
641 762
642 chunks = [] 763 chunks = []
643 i = 0 764 i = 0
644 while True: 765 while True:
645 input_size = input_sizes.draw(strategies.integers(1, 4096)) 766 input_size = input_sizes.draw(strategies.integers(1, 4096))
646 source = original[i:i + input_size] 767 source = original[i : i + input_size]
647 if not source: 768 if not source:
648 break 769 break
649 770
650 chunks.extend(chunker.compress(source)) 771 chunks.extend(chunker.compress(source))
651 i += input_size 772 i += input_size
652 773
653 chunks.extend(chunker.finish()) 774 chunks.extend(chunker.finish())
654 775
655 dctx = zstd.ZstdDecompressor() 776 dctx = zstd.ZstdDecompressor()
656 777
657 self.assertEqual(dctx.decompress(b''.join(chunks), 778 self.assertEqual(
658 max_output_size=len(original)), 779 dctx.decompress(b"".join(chunks), max_output_size=len(original)), original
659 original) 780 )
660 781
661 self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1])) 782 self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1]))
662 783
663 @hypothesis.settings( 784 @hypothesis.settings(
664 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 785 suppress_health_check=[
665 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 786 hypothesis.HealthCheck.large_base_example,
666 level=strategies.integers(min_value=1, max_value=5), 787 hypothesis.HealthCheck.too_slow,
667 chunk_size=strategies.integers( 788 ]
668 min_value=1, 789 )
669 max_value=32 * 1048576), 790 @hypothesis.given(
670 input_sizes=strategies.data(), 791 original=strategies.sampled_from(random_input_data()),
671 flushes=strategies.data()) 792 level=strategies.integers(min_value=1, max_value=5),
672 def test_flush_block(self, original, level, chunk_size, input_sizes, 793 chunk_size=strategies.integers(min_value=1, max_value=32 * 1048576),
673 flushes): 794 input_sizes=strategies.data(),
795 flushes=strategies.data(),
796 )
797 def test_flush_block(self, original, level, chunk_size, input_sizes, flushes):
674 cctx = zstd.ZstdCompressor(level=level) 798 cctx = zstd.ZstdCompressor(level=level)
675 chunker = cctx.chunker(chunk_size=chunk_size) 799 chunker = cctx.chunker(chunk_size=chunk_size)
676 800
677 dctx = zstd.ZstdDecompressor() 801 dctx = zstd.ZstdDecompressor()
678 dobj = dctx.decompressobj() 802 dobj = dctx.decompressobj()
680 compressed_chunks = [] 804 compressed_chunks = []
681 decompressed_chunks = [] 805 decompressed_chunks = []
682 i = 0 806 i = 0
683 while True: 807 while True:
684 input_size = input_sizes.draw(strategies.integers(1, 4096)) 808 input_size = input_sizes.draw(strategies.integers(1, 4096))
685 source = original[i:i + input_size] 809 source = original[i : i + input_size]
686 if not source: 810 if not source:
687 break 811 break
688 812
689 i += input_size 813 i += input_size
690 814
691 chunks = list(chunker.compress(source)) 815 chunks = list(chunker.compress(source))
692 compressed_chunks.extend(chunks) 816 compressed_chunks.extend(chunks)
693 decompressed_chunks.append(dobj.decompress(b''.join(chunks))) 817 decompressed_chunks.append(dobj.decompress(b"".join(chunks)))
694 818
695 if not flushes.draw(strategies.booleans()): 819 if not flushes.draw(strategies.booleans()):
696 continue 820 continue
697 821
698 chunks = list(chunker.flush()) 822 chunks = list(chunker.flush())
699 compressed_chunks.extend(chunks) 823 compressed_chunks.extend(chunks)
700 decompressed_chunks.append(dobj.decompress(b''.join(chunks))) 824 decompressed_chunks.append(dobj.decompress(b"".join(chunks)))
701 825
702 self.assertEqual(b''.join(decompressed_chunks), original[0:i]) 826 self.assertEqual(b"".join(decompressed_chunks), original[0:i])
703 827
704 chunks = list(chunker.finish()) 828 chunks = list(chunker.finish())
705 compressed_chunks.extend(chunks) 829 compressed_chunks.extend(chunks)
706 decompressed_chunks.append(dobj.decompress(b''.join(chunks))) 830 decompressed_chunks.append(dobj.decompress(b"".join(chunks)))
707 831
708 self.assertEqual(dctx.decompress(b''.join(compressed_chunks), 832 self.assertEqual(
709 max_output_size=len(original)), 833 dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)),
710 original) 834 original,
711 self.assertEqual(b''.join(decompressed_chunks), original) 835 )
836 self.assertEqual(b"".join(decompressed_chunks), original)