|
21 | 21 |
|
22 | 22 | log = logging.getLogger(__name__) |
23 | 23 |
|
24 | | - |
25 | 24 | TEST_BUCKET = "test-bucket" |
26 | 25 | TEST_KEY = "test-key" |
27 | 26 | MOCK_OBJECT_INFO = Mock(ObjectInfo) |
@@ -182,14 +181,17 @@ def test_over_read(stream: List[bytes], overread: int): |
182 | 181 | def test_seeks_end(): |
183 | 182 | s3reader = S3Reader(TEST_BUCKET, TEST_KEY, lambda: None, lambda: iter([])) |
184 | 183 | s3reader._size = 10 |
| 184 | + buf = memoryview(bytearray(10)) |
185 | 185 |
|
186 | 186 | assert s3reader.seek(0, SEEK_END) == 10 |
187 | 187 | assert s3reader.tell() == 10 |
188 | 188 | assert s3reader.read() == b"" |
| 189 | + assert s3reader.readinto(buf) == 0 |
189 | 190 |
|
190 | 191 | assert s3reader.seek(0, SEEK_CUR) == 10 |
191 | 192 | assert s3reader.tell() == 10 |
192 | 193 | assert s3reader.read() == b"" |
| 194 | + assert s3reader.readinto(buf) == 0 |
193 | 195 |
|
194 | 196 |
|
195 | 197 | def test_not_writable(): |
@@ -301,3 +303,107 @@ def test_s3reader_writes_size_after_read_all_explicit(stream: List[bytes]): |
301 | 303 | assert s3reader.read(1) == b"" |
302 | 304 | # Once we've read past the end, we know how big the file is |
303 | 305 | assert s3reader._size == total_length |
| 306 | + |
| 307 | + |
| 308 | +@given( |
| 309 | + lists(binary(min_size=20, max_size=30), min_size=0, max_size=2), |
| 310 | + integers(min_value=0, max_value=10), |
| 311 | +) |
| 312 | +def test_s3reader_readinto_buffer_smaller_than_chunks( |
| 313 | + stream: List[bytes], buf_size: int |
| 314 | +): |
| 315 | + s3reader = S3Reader(TEST_BUCKET, TEST_KEY, lambda: None, lambda: iter(stream)) |
| 316 | + assert s3reader._size is None |
| 317 | + total_length = sum(map(len, stream)) |
| 318 | + buf = memoryview(bytearray(buf_size)) |
| 319 | + # We're able to read all the available data or the data that can be accommodated in buf |
| 320 | + if buf_size > 0 and total_length > 0: |
| 321 | + assert s3reader.readinto(buf) == buf_size |
| 322 | + assert s3reader.tell() == buf_size |
| 323 | + # We haven't reached the end yet |
| 324 | + assert s3reader._size is None |
| 325 | + # confirm that read data is the same as in source |
| 326 | + assert buf[:buf_size] == (b"".join(stream))[:buf_size] |
| 327 | + else: |
| 328 | + assert s3reader.readinto(buf) == 0 |
| 329 | + assert s3reader.tell() == 0 |
| 330 | + |
| 331 | + |
| 332 | +@given( |
| 333 | + lists(binary(min_size=20, max_size=30), min_size=2, max_size=3), |
| 334 | + integers(min_value=30, max_value=40), |
| 335 | +) |
| 336 | +def test_s3reader_readinto_buffer_bigger_than_chunks( |
| 337 | + stream: List[bytes], buf_size: int |
| 338 | +): |
| 339 | + s3reader = S3Reader(TEST_BUCKET, TEST_KEY, lambda: None, lambda: iter(stream)) |
| 340 | + assert s3reader._size is None |
| 341 | + buf = memoryview(bytearray(buf_size)) |
| 342 | + # We're able to read the data that can be accommodated in buf |
| 343 | + assert s3reader.readinto(buf) == buf_size |
| 344 | + assert s3reader.tell() == buf_size |
| 345 | + all_data = b"".join(stream) |
| 346 | + # confirm that read data is the same as in source |
| 347 | + assert buf == all_data[:buf_size] |
| 348 | + |
| 349 | + |
| 350 | +@given( |
| 351 | + lists(binary(min_size=20, max_size=30), min_size=1, max_size=3), |
| 352 | + integers(min_value=100, max_value=100), |
| 353 | +) |
| 354 | +def test_s3reader_readinto_buffer_bigger_than_whole_object( |
| 355 | + stream: List[bytes], buf_size: int |
| 356 | +): |
| 357 | + s3reader = S3Reader(TEST_BUCKET, TEST_KEY, lambda: None, lambda: iter(stream)) |
| 358 | + assert s3reader._size is None |
| 359 | + total_length = sum(map(len, stream)) |
| 360 | + buf = memoryview(bytearray(buf_size)) |
| 361 | + # We're able to read all the available data |
| 362 | + assert s3reader.readinto(buf) == total_length |
| 363 | + assert s3reader.tell() == total_length |
| 364 | + all_data = b"".join(stream) |
| 365 | + # confirm that read data is the same as in source |
| 366 | + assert buf[:total_length] == all_data |
| 367 | + assert s3reader._size == total_length |
| 368 | + |
| 369 | + |
| 370 | +@given( |
| 371 | + lists(binary(min_size=2, max_size=12), min_size=1, max_size=5), |
| 372 | + integers(min_value=3, max_value=10), |
| 373 | + integers(min_value=0, max_value=1), |
| 374 | +) |
| 375 | +def test_s3reader_mixing_readinto_and_read( |
| 376 | + stream: List[bytes], buf_size: int, flip: int |
| 377 | +): |
| 378 | + position = 0 |
| 379 | + loops_count = 20 |
| 380 | + all_data = b"".join(stream) |
| 381 | + total_length = len(all_data) |
| 382 | + buf = memoryview(bytearray(buf_size)) |
| 383 | + s3reader = S3Reader(TEST_BUCKET, TEST_KEY, lambda: None, lambda: iter(stream)) |
| 384 | + for i in range(0, loops_count): |
| 385 | + if position >= total_length: |
| 386 | + break |
| 387 | + |
| 388 | + if (i + flip) % 2 == 0: |
| 389 | + result = s3reader.read(buf_size) |
| 390 | + # confirm that read data is the same as in source |
| 391 | + if position + buf_size < total_length: |
| 392 | + assert result[:buf_size] == all_data[position : position + buf_size] |
| 393 | + else: |
| 394 | + read_bytes = total_length - position |
| 395 | + assert result[:read_bytes] == all_data[position:total_length] |
| 396 | + position += buf_size |
| 397 | + else: |
| 398 | + read_bytes = s3reader.readinto(buf) |
| 399 | + # confirm that read data is the same as in source |
| 400 | + assert buf[position:read_bytes] == all_data[position:read_bytes] |
| 401 | + position += read_bytes |
| 402 | + |
| 403 | + if position > total_length: |
| 404 | + # we read all the data, it is time to stop |
| 405 | + assert s3reader.tell() == total_length |
| 406 | + break |
| 407 | + else: |
| 408 | + # confirm that position is as expected |
| 409 | + assert s3reader.tell() == position |
0 commit comments