1#!/usr/bin/python3 2# Copyright 2017 The Chromium OS Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6"""unittest for utils.py 7""" 8 9from __future__ import absolute_import 10from __future__ import division 11from __future__ import print_function 12 13import json 14import os 15import shutil 16import tempfile 17import time 18import unittest 19 20import common 21from autotest_lib.client.bin.result_tools import result_info 22from autotest_lib.client.bin.result_tools import throttler_lib 23from autotest_lib.client.bin.result_tools import utils as result_utils 24from autotest_lib.client.bin.result_tools import utils_lib 25from autotest_lib.client.bin.result_tools import view as result_view 26from autotest_lib.client.bin.result_tools import unittest_lib 27from six.moves import range 28 29SIZE = unittest_lib.SIZE 30 31# Sizes used for testing throttling 32LARGE_SIZE = 1 * 1024 * 1024 33SMALL_SIZE = 1 * 1024 34 35EXPECTED_SUMMARY = { 36 '': {utils_lib.ORIGINAL_SIZE_BYTES: 4 * SIZE, 37 utils_lib.DIRS: [ 38 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 39 {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE, 40 utils_lib.DIRS: [ 41 {'file2': { 42 utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 43 {'file3': { 44 utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 45 {'symlink': { 46 utils_lib.ORIGINAL_SIZE_BYTES: 0, 47 utils_lib.DIRS: []}}]}}, 48 {'folder2': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 49 utils_lib.DIRS: 50 [{'file2': 51 {utils_lib.ORIGINAL_SIZE_BYTES: 52 SIZE}}], 53 }}]}} 54 55SUMMARY_1 = { 56 '': {utils_lib.ORIGINAL_SIZE_BYTES: 6 * SIZE, 57 utils_lib.TRIMMED_SIZE_BYTES: 5 * SIZE, 58 utils_lib.DIRS: [ 59 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 60 {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 61 {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE, 62 utils_lib.TRIMMED_SIZE_BYTES: SIZE}}, 63 {'folder_not_overwritten': 64 {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 65 utils_lib.DIRS: [ 66 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}} 67 ]}}, 68 {'file_to_be_overwritten': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 69 ] 70 } 71 } 72 73SUMMARY_2 = { 74 '': {utils_lib.ORIGINAL_SIZE_BYTES: 27 * SIZE, 75 utils_lib.DIRS: [ 76 # `file1` exists and has the same size. 77 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 78 # Change the size of `file2` to make sure summary merge works. 79 {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE}}, 80 # `file3` is new. 81 {'file3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 82 # `file4` is old but throttled earlier. 83 {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 84 # Add a new sub-directory. 85 {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE, 86 utils_lib.TRIMMED_SIZE_BYTES: SIZE, 87 utils_lib.DIRS: [ 88 # Add a file being trimmed. 89 {'file4': { 90 utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE, 91 utils_lib.TRIMMED_SIZE_BYTES: SIZE} 92 }] 93 }}, 94 # Add a file whose name collides with the previous summary. 95 {'folder_not_overwritten': { 96 utils_lib.ORIGINAL_SIZE_BYTES: 100 * SIZE}}, 97 # Add a directory whose name collides with the previous summary. 98 {'file_to_be_overwritten': 99 {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 100 utils_lib.DIRS: [ 101 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}] 102 }}, 103 # Folder was collected, not missing from the final result folder. 104 {'folder_tobe_deleted': 105 {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 106 utils_lib.DIRS: [ 107 {'file_tobe_deleted': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}] 108 }}, 109 ] 110 } 111 } 112 113SUMMARY_3 = { 114 '': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 115 utils_lib.DIRS: [ 116 {'file10': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 117 ] 118 } 119 } 120 121SUMMARY_1_SIZE = 224 122SUMMARY_2_SIZE = 388 123SUMMARY_3_SIZE = 48 124 125# The final result dir has an extra folder and file, also with `file3` removed 126# to test the case that client files are removed on the server side. 127EXPECTED_MERGED_SUMMARY = { 128 '': {utils_lib.ORIGINAL_SIZE_BYTES: 129 40 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE, 130 utils_lib.TRIMMED_SIZE_BYTES: 131 19 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE, 132 # Size collected is SIZE bytes more than total size as an old `file2` of 133 # SIZE bytes is overwritten by a newer file. 134 utils_lib.COLLECTED_SIZE_BYTES: 135 22 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE, 136 utils_lib.DIRS: [ 137 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 138 {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE, 139 utils_lib.COLLECTED_SIZE_BYTES: 3 * SIZE}}, 140 {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE, 141 utils_lib.TRIMMED_SIZE_BYTES: SIZE}}, 142 {'folder_not_overwritten': 143 {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 144 utils_lib.DIRS: [ 145 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}] 146 }}, 147 {'file_to_be_overwritten': 148 {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 149 utils_lib.COLLECTED_SIZE_BYTES: 2 * SIZE, 150 utils_lib.TRIMMED_SIZE_BYTES: SIZE, 151 utils_lib.DIRS: [ 152 {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}] 153 }}, 154 {'file3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 155 {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE, 156 utils_lib.TRIMMED_SIZE_BYTES: SIZE, 157 utils_lib.DIRS: [ 158 {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE, 159 utils_lib.TRIMMED_SIZE_BYTES: SIZE} 160 }] 161 }}, 162 {'folder_tobe_deleted': 163 {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 164 utils_lib.COLLECTED_SIZE_BYTES: SIZE, 165 utils_lib.TRIMMED_SIZE_BYTES: 0, 166 utils_lib.DIRS: [ 167 {'file_tobe_deleted': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE, 168 utils_lib.COLLECTED_SIZE_BYTES: SIZE, 169 utils_lib.TRIMMED_SIZE_BYTES: 0}}] 170 }}, 171 {'folder3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE + SUMMARY_3_SIZE, 172 utils_lib.DIRS: [ 173 {'folder31': { 174 utils_lib.ORIGINAL_SIZE_BYTES: SIZE + SUMMARY_3_SIZE, 175 utils_lib.DIRS: [ 176 {'file10': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}, 177 {'dir_summary_3.json': { 178 utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_3_SIZE}}, 179 ]}}, 180 ] 181 }}, 182 {'dir_summary_1.json': { 183 utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_1_SIZE}}, 184 {'dir_summary_2.json': { 185 utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_2_SIZE}}, 186 {'folder2': {utils_lib.ORIGINAL_SIZE_BYTES: 10 * SIZE, 187 utils_lib.DIRS: [ 188 {'server_file': { 189 utils_lib.ORIGINAL_SIZE_BYTES: 10 * SIZE} 190 }] 191 }}, 192 ] 193 } 194 } 195 196 197class GetDirSummaryTest(unittest.TestCase): 198 """Test class for ResultInfo.build_from_path method""" 199 200 def setUp(self): 201 """Setup directory for test.""" 202 self.test_dir = tempfile.mkdtemp() 203 file1 = os.path.join(self.test_dir, 'file1') 204 unittest_lib.create_file(file1) 205 folder1 = os.path.join(self.test_dir, 'folder1') 206 os.mkdir(folder1) 207 file2 = os.path.join(folder1, 'file2') 208 unittest_lib.create_file(file2) 209 file3 = os.path.join(folder1, 'file3') 210 unittest_lib.create_file(file3) 211 212 folder2 = os.path.join(self.test_dir, 'folder2') 213 os.mkdir(folder2) 214 file4 = os.path.join(folder2, 'file2') 215 unittest_lib.create_file(file4) 216 217 symlink = os.path.join(folder1, 'symlink') 218 os.symlink(folder2, symlink) 219 220 def tearDown(self): 221 """Cleanup the test directory.""" 222 shutil.rmtree(self.test_dir, ignore_errors=True) 223 224 def test_BuildFromPath(self): 225 """Test method ResultInfo.build_from_path.""" 226 summary = result_info.ResultInfo.build_from_path(self.test_dir) 227 self.assertEqual(EXPECTED_SUMMARY, summary) 228 229 230class MergeSummaryTest(unittest.TestCase): 231 """Test class for merge_summaries method""" 232 233 def setUp(self): 234 """Setup directory to match the file structure in MERGED_SUMMARY.""" 235 self.test_dir = tempfile.mkdtemp() + '/' 236 file1 = os.path.join(self.test_dir, 'file1') 237 unittest_lib.create_file(file1) 238 file2 = os.path.join(self.test_dir, 'file2') 239 unittest_lib.create_file(file2, 2*SIZE) 240 file3 = os.path.join(self.test_dir, 'file3') 241 unittest_lib.create_file(file3, SIZE) 242 file4 = os.path.join(self.test_dir, 'file4') 243 unittest_lib.create_file(file4, SIZE) 244 folder1 = os.path.join(self.test_dir, 'folder1') 245 os.mkdir(folder1) 246 file4 = os.path.join(folder1, 'file4') 247 unittest_lib.create_file(file4, SIZE) 248 249 # Used to test summary in subdirectory. 250 folder3 = os.path.join(self.test_dir, 'folder3') 251 os.mkdir(folder3) 252 folder31 = os.path.join(folder3, 'folder31') 253 os.mkdir(folder31) 254 file10 = os.path.join(folder31, 'file10') 255 unittest_lib.create_file(file10, SIZE) 256 257 folder2 = os.path.join(self.test_dir, 'folder2') 258 os.mkdir(folder2) 259 server_file = os.path.join(folder2, 'server_file') 260 unittest_lib.create_file(server_file, 10*SIZE) 261 folder_not_overwritten = os.path.join( 262 self.test_dir, 'folder_not_overwritten') 263 os.mkdir(folder_not_overwritten) 264 file1 = os.path.join(folder_not_overwritten, 'file1') 265 unittest_lib.create_file(file1) 266 file_to_be_overwritten = os.path.join( 267 self.test_dir, 'file_to_be_overwritten') 268 os.mkdir(file_to_be_overwritten) 269 file1 = os.path.join(file_to_be_overwritten, 'file1') 270 unittest_lib.create_file(file1) 271 272 # Save summary file to test_dir 273 self.summary_1 = os.path.join(self.test_dir, 'dir_summary_1.json') 274 with open(self.summary_1, 'w') as f: 275 json.dump(SUMMARY_1, f) 276 # Wait for 10ms, to make sure summary_2 has a later time stamp. 277 time.sleep(0.01) 278 self.summary_2 = os.path.join(self.test_dir, 'dir_summary_2.json') 279 with open(self.summary_2, 'w') as f: 280 json.dump(SUMMARY_2, f) 281 time.sleep(0.01) 282 self.summary_3 = os.path.join(self.test_dir, 'folder3', 'folder31', 283 'dir_summary_3.json') 284 with open(self.summary_3, 'w') as f: 285 json.dump(SUMMARY_3, f) 286 287 def tearDown(self): 288 """Cleanup the test directory.""" 289 shutil.rmtree(self.test_dir, ignore_errors=True) 290 291 def testMergeSummaries(self): 292 """Test method merge_summaries.""" 293 collected_bytes, merged_summary, files = result_utils.merge_summaries( 294 self.test_dir) 295 296 # In python3, the dict --> list conversion isn't guaranteed to be in. 297 # this basically drills down to the lowest level values and verifies 298 # each. 299 def _checker(real, expected): 300 if not isinstance(real, list) and not isinstance(real, dict): 301 self.assertEqual(real, expected) 302 return 303 304 if isinstance(real, list): 305 self.assertEqual(type(expected), list) 306 for item in real: 307 _search_for_item(item, expected) 308 return 309 310 for k, v in real.items(): 311 assert(k in expected) 312 _checker(real[k], expected[k]) 313 314 def _search_for_item(item, other): 315 for oth in other: 316 if item.keys() == oth.keys(): 317 self.assertEqual(item, oth) 318 _checker(item, oth) 319 320 _checker(merged_summary, EXPECTED_MERGED_SUMMARY) 321 self.assertEqual(collected_bytes, 12 * SIZE) 322 self.assertEqual(len(files), 3) 323 324 def testMergeSummariesFromNoHistory(self): 325 """Test method merge_summaries can handle results with no existing 326 summary. 327 """ 328 os.remove(self.summary_1) 329 os.remove(self.summary_2) 330 os.remove(self.summary_3) 331 client_collected_bytes, _, _ = result_utils.merge_summaries( 332 self.test_dir) 333 self.assertEqual(client_collected_bytes, 0) 334 335 def testBuildView(self): 336 """Test build method in result_view module.""" 337 client_collected_bytes, summary, _ = result_utils.merge_summaries( 338 self.test_dir) 339 html_file = os.path.join(self.test_dir, 340 result_view.DEFAULT_RESULT_SUMMARY_NAME) 341 result_view.build(client_collected_bytes, summary, html_file) 342 # Make sure html_file is created with content. 343 self.assertGreater(os.stat(html_file).st_size, 1000) 344 345 346# Not throttled. 347EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE = { 348 '': { 349 utils_lib.ORIGINAL_SIZE_BYTES: 350 2 * LARGE_SIZE + 5 * SMALL_SIZE, 351 utils_lib.DIRS: [ 352 { 353 'files_to_dedupe': { 354 utils_lib.ORIGINAL_SIZE_BYTES: 355 5 * SMALL_SIZE, 356 utils_lib.DIRS: [ 357 { 358 'file_0.dmp': { 359 utils_lib.ORIGINAL_SIZE_BYTES: 360 SMALL_SIZE 361 } 362 }, 363 { 364 'file_1.dmp': { 365 utils_lib.ORIGINAL_SIZE_BYTES: 366 SMALL_SIZE 367 } 368 }, 369 { 370 'file_2.dmp': { 371 utils_lib.ORIGINAL_SIZE_BYTES: 372 SMALL_SIZE 373 } 374 }, 375 { 376 'file_3.dmp': { 377 utils_lib.ORIGINAL_SIZE_BYTES: 378 SMALL_SIZE 379 } 380 }, 381 { 382 'file_4.dmp': { 383 utils_lib.ORIGINAL_SIZE_BYTES: 384 SMALL_SIZE 385 } 386 }, 387 ] 388 } 389 }, 390 { 391 'files_to_delete': { 392 utils_lib.ORIGINAL_SIZE_BYTES: 393 LARGE_SIZE, 394 utils_lib.DIRS: [ 395 { 396 'file.png': { 397 utils_lib.ORIGINAL_SIZE_BYTES: 398 LARGE_SIZE 399 } 400 }, 401 ] 402 } 403 }, 404 { 405 'files_to_zip': { 406 utils_lib.ORIGINAL_SIZE_BYTES: 407 LARGE_SIZE, 408 utils_lib.DIRS: [ 409 { 410 'file.xml': { 411 utils_lib.ORIGINAL_SIZE_BYTES: 412 LARGE_SIZE 413 } 414 }, 415 ] 416 } 417 }, 418 ] 419 } 420} 421 422EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE = { 423 '': { 424 utils_lib.ORIGINAL_SIZE_BYTES: 425 2 * LARGE_SIZE + 5 * SMALL_SIZE, 426 utils_lib.TRIMMED_SIZE_BYTES: 427 2 * LARGE_SIZE + 3 * SMALL_SIZE, 428 utils_lib.DIRS: [ 429 { 430 'files_to_dedupe': { 431 utils_lib.ORIGINAL_SIZE_BYTES: 432 5 * SMALL_SIZE, 433 utils_lib.TRIMMED_SIZE_BYTES: 434 3 * SMALL_SIZE, 435 utils_lib.DIRS: [ 436 { 437 'file_0.dmp': { 438 utils_lib.ORIGINAL_SIZE_BYTES: 439 SMALL_SIZE 440 } 441 }, 442 { 443 'file_1.dmp': { 444 utils_lib.ORIGINAL_SIZE_BYTES: 445 SMALL_SIZE 446 } 447 }, 448 { 449 'file_2.dmp': { 450 utils_lib.ORIGINAL_SIZE_BYTES: 451 SMALL_SIZE, 452 utils_lib.TRIMMED_SIZE_BYTES: 453 0 454 } 455 }, 456 { 457 'file_3.dmp': { 458 utils_lib.ORIGINAL_SIZE_BYTES: 459 SMALL_SIZE, 460 utils_lib.TRIMMED_SIZE_BYTES: 461 0 462 } 463 }, 464 { 465 'file_4.dmp': { 466 utils_lib.ORIGINAL_SIZE_BYTES: 467 SMALL_SIZE 468 } 469 }, 470 ] 471 } 472 }, 473 { 474 'files_to_delete': { 475 utils_lib.ORIGINAL_SIZE_BYTES: 476 LARGE_SIZE, 477 utils_lib.DIRS: [ 478 { 479 'file.png': { 480 utils_lib.ORIGINAL_SIZE_BYTES: 481 LARGE_SIZE 482 } 483 }, 484 ] 485 } 486 }, 487 { 488 'files_to_zip': { 489 utils_lib.ORIGINAL_SIZE_BYTES: 490 LARGE_SIZE, 491 utils_lib.DIRS: [ 492 { 493 'file.xml': { 494 utils_lib.ORIGINAL_SIZE_BYTES: 495 LARGE_SIZE 496 } 497 }, 498 ] 499 } 500 }, 501 ] 502 } 503} 504 505 506class ThrottleTest(unittest.TestCase): 507 """Test class for _throttle_results method""" 508 509 def setUp(self): 510 """Setup directory to match the file structure in MERGED_SUMMARY.""" 511 self.test_dir = tempfile.mkdtemp() 512 513 folder = os.path.join(self.test_dir, 'files_to_zip') 514 os.mkdir(folder) 515 file1 = os.path.join(folder, 'file.xml') 516 unittest_lib.create_file(file1, LARGE_SIZE) 517 518 folder = os.path.join(self.test_dir, 'files_to_delete') 519 os.mkdir(folder) 520 file1 = os.path.join(folder, 'file.png') 521 unittest_lib.create_file(file1, LARGE_SIZE) 522 523 folder = os.path.join(self.test_dir, 'files_to_dedupe') 524 os.mkdir(folder) 525 for i in range(5): 526 time.sleep(0.01) 527 file1 = os.path.join(folder, 'file_%d.dmp' % i) 528 unittest_lib.create_file(file1, SMALL_SIZE) 529 530 def tearDown(self): 531 """Cleanup the test directory.""" 532 shutil.rmtree(self.test_dir, ignore_errors=True) 533 534 def testThrottleResults(self): 535 """Test _throttle_results method.""" 536 summary = result_info.ResultInfo.build_from_path(self.test_dir) 537 result_utils._throttle_results(summary, LARGE_SIZE * 10 // 1024) 538 self.assertEqual(EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE, summary) 539 540 def testThrottleResults_Dedupe(self): 541 """Test _throttle_results method with dedupe triggered.""" 542 # Change AUTOTEST_LOG_PATTERN to protect file.xml from being compressed 543 # before deduping kicks in. 544 old_pattern = throttler_lib.AUTOTEST_LOG_PATTERN 545 throttler_lib.AUTOTEST_LOG_PATTERN = '.*/file.xml' 546 try: 547 summary = result_info.ResultInfo.build_from_path(self.test_dir) 548 result_utils._throttle_results( 549 summary, (2 * LARGE_SIZE + 3 * SMALL_SIZE) // 1024) 550 self.assertEqual(EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE, summary) 551 finally: 552 throttler_lib.AUTOTEST_LOG_PATTERN = old_pattern 553 554 def testThrottleResults_Zip(self): 555 """Test _throttle_results method with dedupe triggered.""" 556 summary = result_info.ResultInfo.build_from_path(self.test_dir) 557 result_utils._throttle_results( 558 summary, (LARGE_SIZE + 3 * SMALL_SIZE) // 1024 + 2) 559 self.assertEqual(2 * LARGE_SIZE + 5 * SMALL_SIZE, 560 summary.original_size) 561 562 entry = summary.get_file('files_to_zip').get_file('file.xml.tgz') 563 self.assertEqual(LARGE_SIZE, entry.original_size) 564 self.assertTrue(LARGE_SIZE > entry.trimmed_size) 565 566 # The compressed file size should be less than 2 KB. 567 self.assertTrue(summary.trimmed_size < (LARGE_SIZE + 3 * SMALL_SIZE + 568 2 * 1024)) 569 self.assertTrue(summary.trimmed_size > (LARGE_SIZE + 3 * SMALL_SIZE)) 570 571 def testThrottleResults_Delete(self): 572 """Test _throttle_results method with delete triggered.""" 573 summary = result_info.ResultInfo.build_from_path(self.test_dir) 574 result_utils._throttle_results(summary, (3 * SMALL_SIZE) // 1024 + 2) 575 576 # Confirm the original size is preserved. 577 self.assertEqual(2 * LARGE_SIZE + 5 * SMALL_SIZE, 578 summary.original_size) 579 580 # Confirm the deduped and zipped files are not deleted. 581 # The compressed file is at least 512 bytes. 582 self.assertTrue(3 * SMALL_SIZE + 512 < summary.original_size) 583 584 # Confirm the file to be zipped is compressed and not deleted. 585 entry = summary.get_file('files_to_zip').get_file('file.xml.tgz') 586 self.assertEqual(LARGE_SIZE, entry.original_size) 587 self.assertTrue(LARGE_SIZE > entry.trimmed_size) 588 self.assertTrue(entry.trimmed_size > 0) 589 590 # Confirm the file to be deleted is removed. 591 entry = summary.get_file('files_to_delete').get_file('file.png') 592 self.assertEqual(0, entry.trimmed_size) 593 self.assertEqual(LARGE_SIZE, entry.original_size) 594 595 596# this is so the test can be run in standalone mode 597if __name__ == '__main__': 598 """Main""" 599 unittest.main() 600