1from test import support 2from test.support import bigmemtest, _4G 3 4import unittest 5from io import BytesIO, DEFAULT_BUFFER_SIZE 6import os 7import pickle 8import glob 9import tempfile 10import pathlib 11import random 12import shutil 13import subprocess 14import threading 15from test.support import unlink 16import _compression 17import sys 18 19 20# Skip tests if the bz2 module doesn't exist. 21bz2 = support.import_module('bz2') 22from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor 23 24has_cmdline_bunzip2 = None 25 26def ext_decompress(data): 27 global has_cmdline_bunzip2 28 if has_cmdline_bunzip2 is None: 29 has_cmdline_bunzip2 = bool(shutil.which('bunzip2')) 30 if has_cmdline_bunzip2: 31 return subprocess.check_output(['bunzip2'], input=data) 32 else: 33 return bz2.decompress(data) 34 35class BaseTest(unittest.TestCase): 36 "Base for other testcases." 37 38 TEXT_LINES = [ 39 b'root:x:0:0:root:/root:/bin/bash\n', 40 b'bin:x:1:1:bin:/bin:\n', 41 b'daemon:x:2:2:daemon:/sbin:\n', 42 b'adm:x:3:4:adm:/var/adm:\n', 43 b'lp:x:4:7:lp:/var/spool/lpd:\n', 44 b'sync:x:5:0:sync:/sbin:/bin/sync\n', 45 b'shutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\n', 46 b'halt:x:7:0:halt:/sbin:/sbin/halt\n', 47 b'mail:x:8:12:mail:/var/spool/mail:\n', 48 b'news:x:9:13:news:/var/spool/news:\n', 49 b'uucp:x:10:14:uucp:/var/spool/uucp:\n', 50 b'operator:x:11:0:operator:/root:\n', 51 b'games:x:12:100:games:/usr/games:\n', 52 b'gopher:x:13:30:gopher:/usr/lib/gopher-data:\n', 53 b'ftp:x:14:50:FTP User:/var/ftp:/bin/bash\n', 54 b'nobody:x:65534:65534:Nobody:/home:\n', 55 b'postfix:x:100:101:postfix:/var/spool/postfix:\n', 56 b'niemeyer:x:500:500::/home/niemeyer:/bin/bash\n', 57 b'postgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\n', 58 b'mysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\n', 59 b'www:x:103:104::/var/www:/bin/false\n', 60 ] 61 TEXT = b''.join(TEXT_LINES) 62 DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`' 63 EMPTY_DATA = b'BZh9\x17rE8P\x90\x00\x00\x00\x00' 64 BAD_DATA = b'this is not a valid bzip2 file' 65 66 # Some tests need more than one block of uncompressed data. Since one block 67 # is at least 100,000 bytes, we gather some data dynamically and compress it. 68 # Note that this assumes that compression works correctly, so we cannot 69 # simply use the bigger test data for all tests. 70 test_size = 0 71 BIG_TEXT = bytearray(128*1024) 72 for fname in glob.glob(os.path.join(glob.escape(os.path.dirname(__file__)), '*.py')): 73 with open(fname, 'rb') as fh: 74 test_size += fh.readinto(memoryview(BIG_TEXT)[test_size:]) 75 if test_size > 128*1024: 76 break 77 BIG_DATA = bz2.compress(BIG_TEXT, compresslevel=1) 78 79 def setUp(self): 80 fd, self.filename = tempfile.mkstemp() 81 os.close(fd) 82 83 def tearDown(self): 84 unlink(self.filename) 85 86 87class BZ2FileTest(BaseTest): 88 "Test the BZ2File class." 89 90 def createTempFile(self, streams=1, suffix=b""): 91 with open(self.filename, "wb") as f: 92 f.write(self.DATA * streams) 93 f.write(suffix) 94 95 def testBadArgs(self): 96 self.assertRaises(TypeError, BZ2File, 123.456) 97 self.assertRaises(ValueError, BZ2File, os.devnull, "z") 98 self.assertRaises(ValueError, BZ2File, os.devnull, "rx") 99 self.assertRaises(ValueError, BZ2File, os.devnull, "rbt") 100 self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=0) 101 self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=10) 102 103 # compresslevel is keyword-only 104 self.assertRaises(TypeError, BZ2File, os.devnull, "r", 3) 105 106 def testRead(self): 107 self.createTempFile() 108 with BZ2File(self.filename) as bz2f: 109 self.assertRaises(TypeError, bz2f.read, float()) 110 self.assertEqual(bz2f.read(), self.TEXT) 111 112 def testReadBadFile(self): 113 self.createTempFile(streams=0, suffix=self.BAD_DATA) 114 with BZ2File(self.filename) as bz2f: 115 self.assertRaises(OSError, bz2f.read) 116 117 def testReadMultiStream(self): 118 self.createTempFile(streams=5) 119 with BZ2File(self.filename) as bz2f: 120 self.assertRaises(TypeError, bz2f.read, float()) 121 self.assertEqual(bz2f.read(), self.TEXT * 5) 122 123 def testReadMonkeyMultiStream(self): 124 # Test BZ2File.read() on a multi-stream archive where a stream 125 # boundary coincides with the end of the raw read buffer. 126 buffer_size = _compression.BUFFER_SIZE 127 _compression.BUFFER_SIZE = len(self.DATA) 128 try: 129 self.createTempFile(streams=5) 130 with BZ2File(self.filename) as bz2f: 131 self.assertRaises(TypeError, bz2f.read, float()) 132 self.assertEqual(bz2f.read(), self.TEXT * 5) 133 finally: 134 _compression.BUFFER_SIZE = buffer_size 135 136 def testReadTrailingJunk(self): 137 self.createTempFile(suffix=self.BAD_DATA) 138 with BZ2File(self.filename) as bz2f: 139 self.assertEqual(bz2f.read(), self.TEXT) 140 141 def testReadMultiStreamTrailingJunk(self): 142 self.createTempFile(streams=5, suffix=self.BAD_DATA) 143 with BZ2File(self.filename) as bz2f: 144 self.assertEqual(bz2f.read(), self.TEXT * 5) 145 146 def testRead0(self): 147 self.createTempFile() 148 with BZ2File(self.filename) as bz2f: 149 self.assertRaises(TypeError, bz2f.read, float()) 150 self.assertEqual(bz2f.read(0), b"") 151 152 def testReadChunk10(self): 153 self.createTempFile() 154 with BZ2File(self.filename) as bz2f: 155 text = b'' 156 while True: 157 str = bz2f.read(10) 158 if not str: 159 break 160 text += str 161 self.assertEqual(text, self.TEXT) 162 163 def testReadChunk10MultiStream(self): 164 self.createTempFile(streams=5) 165 with BZ2File(self.filename) as bz2f: 166 text = b'' 167 while True: 168 str = bz2f.read(10) 169 if not str: 170 break 171 text += str 172 self.assertEqual(text, self.TEXT * 5) 173 174 def testRead100(self): 175 self.createTempFile() 176 with BZ2File(self.filename) as bz2f: 177 self.assertEqual(bz2f.read(100), self.TEXT[:100]) 178 179 def testPeek(self): 180 self.createTempFile() 181 with BZ2File(self.filename) as bz2f: 182 pdata = bz2f.peek() 183 self.assertNotEqual(len(pdata), 0) 184 self.assertTrue(self.TEXT.startswith(pdata)) 185 self.assertEqual(bz2f.read(), self.TEXT) 186 187 def testReadInto(self): 188 self.createTempFile() 189 with BZ2File(self.filename) as bz2f: 190 n = 128 191 b = bytearray(n) 192 self.assertEqual(bz2f.readinto(b), n) 193 self.assertEqual(b, self.TEXT[:n]) 194 n = len(self.TEXT) - n 195 b = bytearray(len(self.TEXT)) 196 self.assertEqual(bz2f.readinto(b), n) 197 self.assertEqual(b[:n], self.TEXT[-n:]) 198 199 def testReadLine(self): 200 self.createTempFile() 201 with BZ2File(self.filename) as bz2f: 202 self.assertRaises(TypeError, bz2f.readline, None) 203 for line in self.TEXT_LINES: 204 self.assertEqual(bz2f.readline(), line) 205 206 def testReadLineMultiStream(self): 207 self.createTempFile(streams=5) 208 with BZ2File(self.filename) as bz2f: 209 self.assertRaises(TypeError, bz2f.readline, None) 210 for line in self.TEXT_LINES * 5: 211 self.assertEqual(bz2f.readline(), line) 212 213 def testReadLines(self): 214 self.createTempFile() 215 with BZ2File(self.filename) as bz2f: 216 self.assertRaises(TypeError, bz2f.readlines, None) 217 self.assertEqual(bz2f.readlines(), self.TEXT_LINES) 218 219 def testReadLinesMultiStream(self): 220 self.createTempFile(streams=5) 221 with BZ2File(self.filename) as bz2f: 222 self.assertRaises(TypeError, bz2f.readlines, None) 223 self.assertEqual(bz2f.readlines(), self.TEXT_LINES * 5) 224 225 def testIterator(self): 226 self.createTempFile() 227 with BZ2File(self.filename) as bz2f: 228 self.assertEqual(list(iter(bz2f)), self.TEXT_LINES) 229 230 def testIteratorMultiStream(self): 231 self.createTempFile(streams=5) 232 with BZ2File(self.filename) as bz2f: 233 self.assertEqual(list(iter(bz2f)), self.TEXT_LINES * 5) 234 235 def testClosedIteratorDeadlock(self): 236 # Issue #3309: Iteration on a closed BZ2File should release the lock. 237 self.createTempFile() 238 bz2f = BZ2File(self.filename) 239 bz2f.close() 240 self.assertRaises(ValueError, next, bz2f) 241 # This call will deadlock if the above call failed to release the lock. 242 self.assertRaises(ValueError, bz2f.readlines) 243 244 def testWrite(self): 245 with BZ2File(self.filename, "w") as bz2f: 246 self.assertRaises(TypeError, bz2f.write) 247 bz2f.write(self.TEXT) 248 with open(self.filename, 'rb') as f: 249 self.assertEqual(ext_decompress(f.read()), self.TEXT) 250 251 def testWriteChunks10(self): 252 with BZ2File(self.filename, "w") as bz2f: 253 n = 0 254 while True: 255 str = self.TEXT[n*10:(n+1)*10] 256 if not str: 257 break 258 bz2f.write(str) 259 n += 1 260 with open(self.filename, 'rb') as f: 261 self.assertEqual(ext_decompress(f.read()), self.TEXT) 262 263 def testWriteNonDefaultCompressLevel(self): 264 expected = bz2.compress(self.TEXT, compresslevel=5) 265 with BZ2File(self.filename, "w", compresslevel=5) as bz2f: 266 bz2f.write(self.TEXT) 267 with open(self.filename, "rb") as f: 268 self.assertEqual(f.read(), expected) 269 270 def testWriteLines(self): 271 with BZ2File(self.filename, "w") as bz2f: 272 self.assertRaises(TypeError, bz2f.writelines) 273 bz2f.writelines(self.TEXT_LINES) 274 # Issue #1535500: Calling writelines() on a closed BZ2File 275 # should raise an exception. 276 self.assertRaises(ValueError, bz2f.writelines, ["a"]) 277 with open(self.filename, 'rb') as f: 278 self.assertEqual(ext_decompress(f.read()), self.TEXT) 279 280 def testWriteMethodsOnReadOnlyFile(self): 281 with BZ2File(self.filename, "w") as bz2f: 282 bz2f.write(b"abc") 283 284 with BZ2File(self.filename, "r") as bz2f: 285 self.assertRaises(OSError, bz2f.write, b"a") 286 self.assertRaises(OSError, bz2f.writelines, [b"a"]) 287 288 def testAppend(self): 289 with BZ2File(self.filename, "w") as bz2f: 290 self.assertRaises(TypeError, bz2f.write) 291 bz2f.write(self.TEXT) 292 with BZ2File(self.filename, "a") as bz2f: 293 self.assertRaises(TypeError, bz2f.write) 294 bz2f.write(self.TEXT) 295 with open(self.filename, 'rb') as f: 296 self.assertEqual(ext_decompress(f.read()), self.TEXT * 2) 297 298 def testSeekForward(self): 299 self.createTempFile() 300 with BZ2File(self.filename) as bz2f: 301 self.assertRaises(TypeError, bz2f.seek) 302 bz2f.seek(150) 303 self.assertEqual(bz2f.read(), self.TEXT[150:]) 304 305 def testSeekForwardAcrossStreams(self): 306 self.createTempFile(streams=2) 307 with BZ2File(self.filename) as bz2f: 308 self.assertRaises(TypeError, bz2f.seek) 309 bz2f.seek(len(self.TEXT) + 150) 310 self.assertEqual(bz2f.read(), self.TEXT[150:]) 311 312 def testSeekBackwards(self): 313 self.createTempFile() 314 with BZ2File(self.filename) as bz2f: 315 bz2f.read(500) 316 bz2f.seek(-150, 1) 317 self.assertEqual(bz2f.read(), self.TEXT[500-150:]) 318 319 def testSeekBackwardsAcrossStreams(self): 320 self.createTempFile(streams=2) 321 with BZ2File(self.filename) as bz2f: 322 readto = len(self.TEXT) + 100 323 while readto > 0: 324 readto -= len(bz2f.read(readto)) 325 bz2f.seek(-150, 1) 326 self.assertEqual(bz2f.read(), self.TEXT[100-150:] + self.TEXT) 327 328 def testSeekBackwardsFromEnd(self): 329 self.createTempFile() 330 with BZ2File(self.filename) as bz2f: 331 bz2f.seek(-150, 2) 332 self.assertEqual(bz2f.read(), self.TEXT[len(self.TEXT)-150:]) 333 334 def testSeekBackwardsFromEndAcrossStreams(self): 335 self.createTempFile(streams=2) 336 with BZ2File(self.filename) as bz2f: 337 bz2f.seek(-1000, 2) 338 self.assertEqual(bz2f.read(), (self.TEXT * 2)[-1000:]) 339 340 def testSeekPostEnd(self): 341 self.createTempFile() 342 with BZ2File(self.filename) as bz2f: 343 bz2f.seek(150000) 344 self.assertEqual(bz2f.tell(), len(self.TEXT)) 345 self.assertEqual(bz2f.read(), b"") 346 347 def testSeekPostEndMultiStream(self): 348 self.createTempFile(streams=5) 349 with BZ2File(self.filename) as bz2f: 350 bz2f.seek(150000) 351 self.assertEqual(bz2f.tell(), len(self.TEXT) * 5) 352 self.assertEqual(bz2f.read(), b"") 353 354 def testSeekPostEndTwice(self): 355 self.createTempFile() 356 with BZ2File(self.filename) as bz2f: 357 bz2f.seek(150000) 358 bz2f.seek(150000) 359 self.assertEqual(bz2f.tell(), len(self.TEXT)) 360 self.assertEqual(bz2f.read(), b"") 361 362 def testSeekPostEndTwiceMultiStream(self): 363 self.createTempFile(streams=5) 364 with BZ2File(self.filename) as bz2f: 365 bz2f.seek(150000) 366 bz2f.seek(150000) 367 self.assertEqual(bz2f.tell(), len(self.TEXT) * 5) 368 self.assertEqual(bz2f.read(), b"") 369 370 def testSeekPreStart(self): 371 self.createTempFile() 372 with BZ2File(self.filename) as bz2f: 373 bz2f.seek(-150) 374 self.assertEqual(bz2f.tell(), 0) 375 self.assertEqual(bz2f.read(), self.TEXT) 376 377 def testSeekPreStartMultiStream(self): 378 self.createTempFile(streams=2) 379 with BZ2File(self.filename) as bz2f: 380 bz2f.seek(-150) 381 self.assertEqual(bz2f.tell(), 0) 382 self.assertEqual(bz2f.read(), self.TEXT * 2) 383 384 def testFileno(self): 385 self.createTempFile() 386 with open(self.filename, 'rb') as rawf: 387 bz2f = BZ2File(rawf) 388 try: 389 self.assertEqual(bz2f.fileno(), rawf.fileno()) 390 finally: 391 bz2f.close() 392 self.assertRaises(ValueError, bz2f.fileno) 393 394 def testSeekable(self): 395 bz2f = BZ2File(BytesIO(self.DATA)) 396 try: 397 self.assertTrue(bz2f.seekable()) 398 bz2f.read() 399 self.assertTrue(bz2f.seekable()) 400 finally: 401 bz2f.close() 402 self.assertRaises(ValueError, bz2f.seekable) 403 404 bz2f = BZ2File(BytesIO(), "w") 405 try: 406 self.assertFalse(bz2f.seekable()) 407 finally: 408 bz2f.close() 409 self.assertRaises(ValueError, bz2f.seekable) 410 411 src = BytesIO(self.DATA) 412 src.seekable = lambda: False 413 bz2f = BZ2File(src) 414 try: 415 self.assertFalse(bz2f.seekable()) 416 finally: 417 bz2f.close() 418 self.assertRaises(ValueError, bz2f.seekable) 419 420 def testReadable(self): 421 bz2f = BZ2File(BytesIO(self.DATA)) 422 try: 423 self.assertTrue(bz2f.readable()) 424 bz2f.read() 425 self.assertTrue(bz2f.readable()) 426 finally: 427 bz2f.close() 428 self.assertRaises(ValueError, bz2f.readable) 429 430 bz2f = BZ2File(BytesIO(), "w") 431 try: 432 self.assertFalse(bz2f.readable()) 433 finally: 434 bz2f.close() 435 self.assertRaises(ValueError, bz2f.readable) 436 437 def testWritable(self): 438 bz2f = BZ2File(BytesIO(self.DATA)) 439 try: 440 self.assertFalse(bz2f.writable()) 441 bz2f.read() 442 self.assertFalse(bz2f.writable()) 443 finally: 444 bz2f.close() 445 self.assertRaises(ValueError, bz2f.writable) 446 447 bz2f = BZ2File(BytesIO(), "w") 448 try: 449 self.assertTrue(bz2f.writable()) 450 finally: 451 bz2f.close() 452 self.assertRaises(ValueError, bz2f.writable) 453 454 def testOpenDel(self): 455 self.createTempFile() 456 for i in range(10000): 457 o = BZ2File(self.filename) 458 del o 459 460 def testOpenNonexistent(self): 461 self.assertRaises(OSError, BZ2File, "/non/existent") 462 463 def testReadlinesNoNewline(self): 464 # Issue #1191043: readlines() fails on a file containing no newline. 465 data = b'BZh91AY&SY\xd9b\x89]\x00\x00\x00\x03\x80\x04\x00\x02\x00\x0c\x00 \x00!\x9ah3M\x13<]\xc9\x14\xe1BCe\x8a%t' 466 with open(self.filename, "wb") as f: 467 f.write(data) 468 with BZ2File(self.filename) as bz2f: 469 lines = bz2f.readlines() 470 self.assertEqual(lines, [b'Test']) 471 with BZ2File(self.filename) as bz2f: 472 xlines = list(bz2f.readlines()) 473 self.assertEqual(xlines, [b'Test']) 474 475 def testContextProtocol(self): 476 f = None 477 with BZ2File(self.filename, "wb") as f: 478 f.write(b"xxx") 479 f = BZ2File(self.filename, "rb") 480 f.close() 481 try: 482 with f: 483 pass 484 except ValueError: 485 pass 486 else: 487 self.fail("__enter__ on a closed file didn't raise an exception") 488 try: 489 with BZ2File(self.filename, "wb") as f: 490 1/0 491 except ZeroDivisionError: 492 pass 493 else: 494 self.fail("1/0 didn't raise an exception") 495 496 def testThreading(self): 497 # Issue #7205: Using a BZ2File from several threads shouldn't deadlock. 498 data = b"1" * 2**20 499 nthreads = 10 500 with BZ2File(self.filename, 'wb') as f: 501 def comp(): 502 for i in range(5): 503 f.write(data) 504 threads = [threading.Thread(target=comp) for i in range(nthreads)] 505 with support.start_threads(threads): 506 pass 507 508 def testMixedIterationAndReads(self): 509 self.createTempFile() 510 linelen = len(self.TEXT_LINES[0]) 511 halflen = linelen // 2 512 with BZ2File(self.filename) as bz2f: 513 bz2f.read(halflen) 514 self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:]) 515 self.assertEqual(bz2f.read(), self.TEXT[linelen:]) 516 with BZ2File(self.filename) as bz2f: 517 bz2f.readline() 518 self.assertEqual(next(bz2f), self.TEXT_LINES[1]) 519 self.assertEqual(bz2f.readline(), self.TEXT_LINES[2]) 520 with BZ2File(self.filename) as bz2f: 521 bz2f.readlines() 522 self.assertRaises(StopIteration, next, bz2f) 523 self.assertEqual(bz2f.readlines(), []) 524 525 def testMultiStreamOrdering(self): 526 # Test the ordering of streams when reading a multi-stream archive. 527 data1 = b"foo" * 1000 528 data2 = b"bar" * 1000 529 with BZ2File(self.filename, "w") as bz2f: 530 bz2f.write(data1) 531 with BZ2File(self.filename, "a") as bz2f: 532 bz2f.write(data2) 533 with BZ2File(self.filename) as bz2f: 534 self.assertEqual(bz2f.read(), data1 + data2) 535 536 def testOpenBytesFilename(self): 537 str_filename = self.filename 538 try: 539 bytes_filename = str_filename.encode("ascii") 540 except UnicodeEncodeError: 541 self.skipTest("Temporary file name needs to be ASCII") 542 with BZ2File(bytes_filename, "wb") as f: 543 f.write(self.DATA) 544 with BZ2File(bytes_filename, "rb") as f: 545 self.assertEqual(f.read(), self.DATA) 546 # Sanity check that we are actually operating on the right file. 547 with BZ2File(str_filename, "rb") as f: 548 self.assertEqual(f.read(), self.DATA) 549 550 def testOpenPathLikeFilename(self): 551 filename = pathlib.Path(self.filename) 552 with BZ2File(filename, "wb") as f: 553 f.write(self.DATA) 554 with BZ2File(filename, "rb") as f: 555 self.assertEqual(f.read(), self.DATA) 556 557 def testDecompressLimited(self): 558 """Decompressed data buffering should be limited""" 559 bomb = bz2.compress(b'\0' * int(2e6), compresslevel=9) 560 self.assertLess(len(bomb), _compression.BUFFER_SIZE) 561 562 decomp = BZ2File(BytesIO(bomb)) 563 self.assertEqual(decomp.read(1), b'\0') 564 max_decomp = 1 + DEFAULT_BUFFER_SIZE 565 self.assertLessEqual(decomp._buffer.raw.tell(), max_decomp, 566 "Excessive amount of data was decompressed") 567 568 569 # Tests for a BZ2File wrapping another file object: 570 571 def testReadBytesIO(self): 572 with BytesIO(self.DATA) as bio: 573 with BZ2File(bio) as bz2f: 574 self.assertRaises(TypeError, bz2f.read, float()) 575 self.assertEqual(bz2f.read(), self.TEXT) 576 self.assertFalse(bio.closed) 577 578 def testPeekBytesIO(self): 579 with BytesIO(self.DATA) as bio: 580 with BZ2File(bio) as bz2f: 581 pdata = bz2f.peek() 582 self.assertNotEqual(len(pdata), 0) 583 self.assertTrue(self.TEXT.startswith(pdata)) 584 self.assertEqual(bz2f.read(), self.TEXT) 585 586 def testWriteBytesIO(self): 587 with BytesIO() as bio: 588 with BZ2File(bio, "w") as bz2f: 589 self.assertRaises(TypeError, bz2f.write) 590 bz2f.write(self.TEXT) 591 self.assertEqual(ext_decompress(bio.getvalue()), self.TEXT) 592 self.assertFalse(bio.closed) 593 594 def testSeekForwardBytesIO(self): 595 with BytesIO(self.DATA) as bio: 596 with BZ2File(bio) as bz2f: 597 self.assertRaises(TypeError, bz2f.seek) 598 bz2f.seek(150) 599 self.assertEqual(bz2f.read(), self.TEXT[150:]) 600 601 def testSeekBackwardsBytesIO(self): 602 with BytesIO(self.DATA) as bio: 603 with BZ2File(bio) as bz2f: 604 bz2f.read(500) 605 bz2f.seek(-150, 1) 606 self.assertEqual(bz2f.read(), self.TEXT[500-150:]) 607 608 def test_read_truncated(self): 609 # Drop the eos_magic field (6 bytes) and CRC (4 bytes). 610 truncated = self.DATA[:-10] 611 with BZ2File(BytesIO(truncated)) as f: 612 self.assertRaises(EOFError, f.read) 613 with BZ2File(BytesIO(truncated)) as f: 614 self.assertEqual(f.read(len(self.TEXT)), self.TEXT) 615 self.assertRaises(EOFError, f.read, 1) 616 # Incomplete 4-byte file header, and block header of at least 146 bits. 617 for i in range(22): 618 with BZ2File(BytesIO(truncated[:i])) as f: 619 self.assertRaises(EOFError, f.read, 1) 620 621 622class BZ2CompressorTest(BaseTest): 623 def testCompress(self): 624 bz2c = BZ2Compressor() 625 self.assertRaises(TypeError, bz2c.compress) 626 data = bz2c.compress(self.TEXT) 627 data += bz2c.flush() 628 self.assertEqual(ext_decompress(data), self.TEXT) 629 630 def testCompressEmptyString(self): 631 bz2c = BZ2Compressor() 632 data = bz2c.compress(b'') 633 data += bz2c.flush() 634 self.assertEqual(data, self.EMPTY_DATA) 635 636 def testCompressChunks10(self): 637 bz2c = BZ2Compressor() 638 n = 0 639 data = b'' 640 while True: 641 str = self.TEXT[n*10:(n+1)*10] 642 if not str: 643 break 644 data += bz2c.compress(str) 645 n += 1 646 data += bz2c.flush() 647 self.assertEqual(ext_decompress(data), self.TEXT) 648 649 @support.skip_if_pgo_task 650 @bigmemtest(size=_4G + 100, memuse=2) 651 def testCompress4G(self, size): 652 # "Test BZ2Compressor.compress()/flush() with >4GiB input" 653 bz2c = BZ2Compressor() 654 data = b"x" * size 655 try: 656 compressed = bz2c.compress(data) 657 compressed += bz2c.flush() 658 finally: 659 data = None # Release memory 660 data = bz2.decompress(compressed) 661 try: 662 self.assertEqual(len(data), size) 663 self.assertEqual(len(data.strip(b"x")), 0) 664 finally: 665 data = None 666 667 def testPickle(self): 668 for proto in range(pickle.HIGHEST_PROTOCOL + 1): 669 with self.assertRaises(TypeError): 670 pickle.dumps(BZ2Compressor(), proto) 671 672 673class BZ2DecompressorTest(BaseTest): 674 def test_Constructor(self): 675 self.assertRaises(TypeError, BZ2Decompressor, 42) 676 677 def testDecompress(self): 678 bz2d = BZ2Decompressor() 679 self.assertRaises(TypeError, bz2d.decompress) 680 text = bz2d.decompress(self.DATA) 681 self.assertEqual(text, self.TEXT) 682 683 def testDecompressChunks10(self): 684 bz2d = BZ2Decompressor() 685 text = b'' 686 n = 0 687 while True: 688 str = self.DATA[n*10:(n+1)*10] 689 if not str: 690 break 691 text += bz2d.decompress(str) 692 n += 1 693 self.assertEqual(text, self.TEXT) 694 695 def testDecompressUnusedData(self): 696 bz2d = BZ2Decompressor() 697 unused_data = b"this is unused data" 698 text = bz2d.decompress(self.DATA+unused_data) 699 self.assertEqual(text, self.TEXT) 700 self.assertEqual(bz2d.unused_data, unused_data) 701 702 def testEOFError(self): 703 bz2d = BZ2Decompressor() 704 text = bz2d.decompress(self.DATA) 705 self.assertRaises(EOFError, bz2d.decompress, b"anything") 706 self.assertRaises(EOFError, bz2d.decompress, b"") 707 708 @support.skip_if_pgo_task 709 @bigmemtest(size=_4G + 100, memuse=3.3) 710 def testDecompress4G(self, size): 711 # "Test BZ2Decompressor.decompress() with >4GiB input" 712 blocksize = 10 * 1024 * 1024 713 block = random.randbytes(blocksize) 714 try: 715 data = block * (size // blocksize + 1) 716 compressed = bz2.compress(data) 717 bz2d = BZ2Decompressor() 718 decompressed = bz2d.decompress(compressed) 719 self.assertTrue(decompressed == data) 720 finally: 721 data = None 722 compressed = None 723 decompressed = None 724 725 def testPickle(self): 726 for proto in range(pickle.HIGHEST_PROTOCOL + 1): 727 with self.assertRaises(TypeError): 728 pickle.dumps(BZ2Decompressor(), proto) 729 730 def testDecompressorChunksMaxsize(self): 731 bzd = BZ2Decompressor() 732 max_length = 100 733 out = [] 734 735 # Feed some input 736 len_ = len(self.BIG_DATA) - 64 737 out.append(bzd.decompress(self.BIG_DATA[:len_], 738 max_length=max_length)) 739 self.assertFalse(bzd.needs_input) 740 self.assertEqual(len(out[-1]), max_length) 741 742 # Retrieve more data without providing more input 743 out.append(bzd.decompress(b'', max_length=max_length)) 744 self.assertFalse(bzd.needs_input) 745 self.assertEqual(len(out[-1]), max_length) 746 747 # Retrieve more data while providing more input 748 out.append(bzd.decompress(self.BIG_DATA[len_:], 749 max_length=max_length)) 750 self.assertLessEqual(len(out[-1]), max_length) 751 752 # Retrieve remaining uncompressed data 753 while not bzd.eof: 754 out.append(bzd.decompress(b'', max_length=max_length)) 755 self.assertLessEqual(len(out[-1]), max_length) 756 757 out = b"".join(out) 758 self.assertEqual(out, self.BIG_TEXT) 759 self.assertEqual(bzd.unused_data, b"") 760 761 def test_decompressor_inputbuf_1(self): 762 # Test reusing input buffer after moving existing 763 # contents to beginning 764 bzd = BZ2Decompressor() 765 out = [] 766 767 # Create input buffer and fill it 768 self.assertEqual(bzd.decompress(self.DATA[:100], 769 max_length=0), b'') 770 771 # Retrieve some results, freeing capacity at beginning 772 # of input buffer 773 out.append(bzd.decompress(b'', 2)) 774 775 # Add more data that fits into input buffer after 776 # moving existing data to beginning 777 out.append(bzd.decompress(self.DATA[100:105], 15)) 778 779 # Decompress rest of data 780 out.append(bzd.decompress(self.DATA[105:])) 781 self.assertEqual(b''.join(out), self.TEXT) 782 783 def test_decompressor_inputbuf_2(self): 784 # Test reusing input buffer by appending data at the 785 # end right away 786 bzd = BZ2Decompressor() 787 out = [] 788 789 # Create input buffer and empty it 790 self.assertEqual(bzd.decompress(self.DATA[:200], 791 max_length=0), b'') 792 out.append(bzd.decompress(b'')) 793 794 # Fill buffer with new data 795 out.append(bzd.decompress(self.DATA[200:280], 2)) 796 797 # Append some more data, not enough to require resize 798 out.append(bzd.decompress(self.DATA[280:300], 2)) 799 800 # Decompress rest of data 801 out.append(bzd.decompress(self.DATA[300:])) 802 self.assertEqual(b''.join(out), self.TEXT) 803 804 def test_decompressor_inputbuf_3(self): 805 # Test reusing input buffer after extending it 806 807 bzd = BZ2Decompressor() 808 out = [] 809 810 # Create almost full input buffer 811 out.append(bzd.decompress(self.DATA[:200], 5)) 812 813 # Add even more data to it, requiring resize 814 out.append(bzd.decompress(self.DATA[200:300], 5)) 815 816 # Decompress rest of data 817 out.append(bzd.decompress(self.DATA[300:])) 818 self.assertEqual(b''.join(out), self.TEXT) 819 820 def test_failure(self): 821 bzd = BZ2Decompressor() 822 self.assertRaises(Exception, bzd.decompress, self.BAD_DATA * 30) 823 # Previously, a second call could crash due to internal inconsistency 824 self.assertRaises(Exception, bzd.decompress, self.BAD_DATA * 30) 825 826 @support.refcount_test 827 def test_refleaks_in___init__(self): 828 gettotalrefcount = support.get_attribute(sys, 'gettotalrefcount') 829 bzd = BZ2Decompressor() 830 refs_before = gettotalrefcount() 831 for i in range(100): 832 bzd.__init__() 833 self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10) 834 835 836class CompressDecompressTest(BaseTest): 837 def testCompress(self): 838 data = bz2.compress(self.TEXT) 839 self.assertEqual(ext_decompress(data), self.TEXT) 840 841 def testCompressEmptyString(self): 842 text = bz2.compress(b'') 843 self.assertEqual(text, self.EMPTY_DATA) 844 845 def testDecompress(self): 846 text = bz2.decompress(self.DATA) 847 self.assertEqual(text, self.TEXT) 848 849 def testDecompressEmpty(self): 850 text = bz2.decompress(b"") 851 self.assertEqual(text, b"") 852 853 def testDecompressToEmptyString(self): 854 text = bz2.decompress(self.EMPTY_DATA) 855 self.assertEqual(text, b'') 856 857 def testDecompressIncomplete(self): 858 self.assertRaises(ValueError, bz2.decompress, self.DATA[:-10]) 859 860 def testDecompressBadData(self): 861 self.assertRaises(OSError, bz2.decompress, self.BAD_DATA) 862 863 def testDecompressMultiStream(self): 864 text = bz2.decompress(self.DATA * 5) 865 self.assertEqual(text, self.TEXT * 5) 866 867 def testDecompressTrailingJunk(self): 868 text = bz2.decompress(self.DATA + self.BAD_DATA) 869 self.assertEqual(text, self.TEXT) 870 871 def testDecompressMultiStreamTrailingJunk(self): 872 text = bz2.decompress(self.DATA * 5 + self.BAD_DATA) 873 self.assertEqual(text, self.TEXT * 5) 874 875 876class OpenTest(BaseTest): 877 "Test the open function." 878 879 def open(self, *args, **kwargs): 880 return bz2.open(*args, **kwargs) 881 882 def test_binary_modes(self): 883 for mode in ("wb", "xb"): 884 if mode == "xb": 885 unlink(self.filename) 886 with self.open(self.filename, mode) as f: 887 f.write(self.TEXT) 888 with open(self.filename, "rb") as f: 889 file_data = ext_decompress(f.read()) 890 self.assertEqual(file_data, self.TEXT) 891 with self.open(self.filename, "rb") as f: 892 self.assertEqual(f.read(), self.TEXT) 893 with self.open(self.filename, "ab") as f: 894 f.write(self.TEXT) 895 with open(self.filename, "rb") as f: 896 file_data = ext_decompress(f.read()) 897 self.assertEqual(file_data, self.TEXT * 2) 898 899 def test_implicit_binary_modes(self): 900 # Test implicit binary modes (no "b" or "t" in mode string). 901 for mode in ("w", "x"): 902 if mode == "x": 903 unlink(self.filename) 904 with self.open(self.filename, mode) as f: 905 f.write(self.TEXT) 906 with open(self.filename, "rb") as f: 907 file_data = ext_decompress(f.read()) 908 self.assertEqual(file_data, self.TEXT) 909 with self.open(self.filename, "r") as f: 910 self.assertEqual(f.read(), self.TEXT) 911 with self.open(self.filename, "a") as f: 912 f.write(self.TEXT) 913 with open(self.filename, "rb") as f: 914 file_data = ext_decompress(f.read()) 915 self.assertEqual(file_data, self.TEXT * 2) 916 917 def test_text_modes(self): 918 text = self.TEXT.decode("ascii") 919 text_native_eol = text.replace("\n", os.linesep) 920 for mode in ("wt", "xt"): 921 if mode == "xt": 922 unlink(self.filename) 923 with self.open(self.filename, mode) as f: 924 f.write(text) 925 with open(self.filename, "rb") as f: 926 file_data = ext_decompress(f.read()).decode("ascii") 927 self.assertEqual(file_data, text_native_eol) 928 with self.open(self.filename, "rt") as f: 929 self.assertEqual(f.read(), text) 930 with self.open(self.filename, "at") as f: 931 f.write(text) 932 with open(self.filename, "rb") as f: 933 file_data = ext_decompress(f.read()).decode("ascii") 934 self.assertEqual(file_data, text_native_eol * 2) 935 936 def test_x_mode(self): 937 for mode in ("x", "xb", "xt"): 938 unlink(self.filename) 939 with self.open(self.filename, mode) as f: 940 pass 941 with self.assertRaises(FileExistsError): 942 with self.open(self.filename, mode) as f: 943 pass 944 945 def test_fileobj(self): 946 with self.open(BytesIO(self.DATA), "r") as f: 947 self.assertEqual(f.read(), self.TEXT) 948 with self.open(BytesIO(self.DATA), "rb") as f: 949 self.assertEqual(f.read(), self.TEXT) 950 text = self.TEXT.decode("ascii") 951 with self.open(BytesIO(self.DATA), "rt") as f: 952 self.assertEqual(f.read(), text) 953 954 def test_bad_params(self): 955 # Test invalid parameter combinations. 956 self.assertRaises(ValueError, 957 self.open, self.filename, "wbt") 958 self.assertRaises(ValueError, 959 self.open, self.filename, "xbt") 960 self.assertRaises(ValueError, 961 self.open, self.filename, "rb", encoding="utf-8") 962 self.assertRaises(ValueError, 963 self.open, self.filename, "rb", errors="ignore") 964 self.assertRaises(ValueError, 965 self.open, self.filename, "rb", newline="\n") 966 967 def test_encoding(self): 968 # Test non-default encoding. 969 text = self.TEXT.decode("ascii") 970 text_native_eol = text.replace("\n", os.linesep) 971 with self.open(self.filename, "wt", encoding="utf-16-le") as f: 972 f.write(text) 973 with open(self.filename, "rb") as f: 974 file_data = ext_decompress(f.read()).decode("utf-16-le") 975 self.assertEqual(file_data, text_native_eol) 976 with self.open(self.filename, "rt", encoding="utf-16-le") as f: 977 self.assertEqual(f.read(), text) 978 979 def test_encoding_error_handler(self): 980 # Test with non-default encoding error handler. 981 with self.open(self.filename, "wb") as f: 982 f.write(b"foo\xffbar") 983 with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \ 984 as f: 985 self.assertEqual(f.read(), "foobar") 986 987 def test_newline(self): 988 # Test with explicit newline (universal newline mode disabled). 989 text = self.TEXT.decode("ascii") 990 with self.open(self.filename, "wt", newline="\n") as f: 991 f.write(text) 992 with self.open(self.filename, "rt", newline="\r") as f: 993 self.assertEqual(f.readlines(), [text]) 994 995 996def test_main(): 997 support.run_unittest( 998 BZ2FileTest, 999 BZ2CompressorTest, 1000 BZ2DecompressorTest, 1001 CompressDecompressTest, 1002 OpenTest, 1003 ) 1004 support.reap_children() 1005 1006if __name__ == '__main__': 1007 test_main() 1008