1#!/usr/bin/env vpython3 2# Copyright 2020 The Chromium Authors 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6from __future__ import print_function 7 8import collections 9import itertools 10import sys 11import tempfile 12from typing import Iterable, Set 13import unittest 14 15import six 16 17from pyfakefs import fake_filesystem_unittest 18 19from unexpected_passes_common import data_types 20from unexpected_passes_common import result_output 21from unexpected_passes_common import unittest_utils as uu 22 23 24def CreateTextOutputPermutations(text: str, inputs: Iterable[str]) -> Set[str]: 25 """Creates permutations of |text| filled with the contents of |inputs|. 26 27 Some output ordering is not guaranteed, so this acts as a way to generate 28 all possible outputs instead of manually listing them. 29 30 Args: 31 text: A string containing a single string field to format. 32 inputs: An iterable of strings to permute. 33 34 Returns: 35 A set of unique permutations of |text| filled with |inputs|. E.g. if |text| 36 is '1%s2' and |inputs| is ['a', 'b'], the return value will be 37 set(['1ab2', '1ba2']). 38 """ 39 permutations = set() 40 for p in itertools.permutations(inputs): 41 permutations.add(text % ''.join(p)) 42 return permutations 43 44 45class ConvertUnmatchedResultsToStringDictUnittest(unittest.TestCase): 46 def testEmptyResults(self) -> None: 47 """Tests that providing empty results is a no-op.""" 48 self.assertEqual(result_output._ConvertUnmatchedResultsToStringDict({}), {}) 49 50 def testMinimalData(self) -> None: 51 """Tests that everything functions when minimal data is provided.""" 52 unmatched_results = { 53 'builder': [ 54 data_types.Result('foo', [], 'Failure', 'step', 'build_id'), 55 ], 56 } 57 expected_output = { 58 'foo': { 59 'builder': { 60 'step': [ 61 'Got "Failure" on http://ci.chromium.org/b/build_id with ' 62 'tags []', 63 ], 64 }, 65 }, 66 } 67 output = result_output._ConvertUnmatchedResultsToStringDict( 68 unmatched_results) 69 self.assertEqual(output, expected_output) 70 71 def testRegularData(self) -> None: 72 """Tests that everything functions when regular data is provided.""" 73 unmatched_results = { 74 'builder': [ 75 data_types.Result('foo', ['win', 'intel'], 'Failure', 'step_name', 76 'build_id') 77 ], 78 } 79 # TODO(crbug.com/1198237): Hard-code the tag string once only Python 3 is 80 # supported. 81 expected_output = { 82 'foo': { 83 'builder': { 84 'step_name': [ 85 'Got "Failure" on http://ci.chromium.org/b/build_id with ' 86 'tags [%s]' % ' '.join(set(['win', 'intel'])), 87 ] 88 } 89 } 90 } 91 output = result_output._ConvertUnmatchedResultsToStringDict( 92 unmatched_results) 93 self.assertEqual(output, expected_output) 94 95 96class ConvertTestExpectationMapToStringDictUnittest(unittest.TestCase): 97 def testEmptyMap(self) -> None: 98 """Tests that providing an empty map is a no-op.""" 99 self.assertEqual( 100 result_output._ConvertTestExpectationMapToStringDict( 101 data_types.TestExpectationMap()), {}) 102 103 def testSemiStaleMap(self) -> None: 104 """Tests that everything functions when regular data is provided.""" 105 expectation_map = data_types.TestExpectationMap({ 106 'expectation_file': 107 data_types.ExpectationBuilderMap({ 108 data_types.Expectation('foo/test', ['win', 'intel'], [ 109 'RetryOnFailure' 110 ]): 111 data_types.BuilderStepMap({ 112 'builder': 113 data_types.StepBuildStatsMap({ 114 'all_pass': 115 uu.CreateStatsWithPassFails(2, 0), 116 'all_fail': 117 uu.CreateStatsWithPassFails(0, 2), 118 'some_pass': 119 uu.CreateStatsWithPassFails(1, 1), 120 }), 121 }), 122 data_types.Expectation('foo/test', ['linux', 'intel'], [ 123 'RetryOnFailure' 124 ]): 125 data_types.BuilderStepMap({ 126 'builder': 127 data_types.StepBuildStatsMap({ 128 'all_pass': 129 uu.CreateStatsWithPassFails(2, 0), 130 }), 131 }), 132 data_types.Expectation('foo/test', ['mac', 'intel'], [ 133 'RetryOnFailure' 134 ]): 135 data_types.BuilderStepMap({ 136 'builder': 137 data_types.StepBuildStatsMap({ 138 'all_fail': 139 uu.CreateStatsWithPassFails(0, 2), 140 }), 141 }), 142 }), 143 }) 144 # TODO(crbug.com/1198237): Remove the Python 2 version once we are fully 145 # switched to Python 3. 146 if six.PY2: 147 expected_output = { 148 'expectation_file': { 149 'foo/test': { 150 '"RetryOnFailure" expectation on "win intel"': { 151 'builder': { 152 'Fully passed in the following': [ 153 'all_pass (2/2 passed)', 154 ], 155 'Never passed in the following': [ 156 'all_fail (0/2 passed)', 157 ], 158 'Partially passed in the following': { 159 'some_pass (1/2 passed)': [ 160 data_types.BuildLinkFromBuildId('build_id0'), 161 ], 162 }, 163 }, 164 }, 165 '"RetryOnFailure" expectation on "intel linux"': { 166 'builder': { 167 'Fully passed in the following': [ 168 'all_pass (2/2 passed)', 169 ], 170 }, 171 }, 172 '"RetryOnFailure" expectation on "mac intel"': { 173 'builder': { 174 'Never passed in the following': [ 175 'all_fail (0/2 passed)', 176 ], 177 }, 178 }, 179 }, 180 }, 181 } 182 else: 183 # Set ordering does not appear to be stable between test runs, as we can 184 # get either order of tags. So, generate them now instead of hard coding 185 # them. 186 linux_tags = ' '.join(set(['linux', 'intel'])) 187 win_tags = ' '.join(set(['win', 'intel'])) 188 mac_tags = ' '.join(set(['mac', 'intel'])) 189 expected_output = { 190 'expectation_file': { 191 'foo/test': { 192 '"RetryOnFailure" expectation on "%s"' % linux_tags: { 193 'builder': { 194 'Fully passed in the following': [ 195 'all_pass (2/2 passed)', 196 ], 197 }, 198 }, 199 '"RetryOnFailure" expectation on "%s"' % win_tags: { 200 'builder': { 201 'Fully passed in the following': [ 202 'all_pass (2/2 passed)', 203 ], 204 'Partially passed in the following': { 205 'some_pass (1/2 passed)': [ 206 data_types.BuildLinkFromBuildId('build_id0'), 207 ], 208 }, 209 'Never passed in the following': [ 210 'all_fail (0/2 passed)', 211 ], 212 }, 213 }, 214 '"RetryOnFailure" expectation on "%s"' % mac_tags: { 215 'builder': { 216 'Never passed in the following': [ 217 'all_fail (0/2 passed)', 218 ], 219 }, 220 }, 221 }, 222 }, 223 } 224 225 str_dict = result_output._ConvertTestExpectationMapToStringDict( 226 expectation_map) 227 self.assertEqual(str_dict, expected_output) 228 229 230class ConvertUnusedExpectationsToStringDictUnittest(unittest.TestCase): 231 def testEmptyDict(self) -> None: 232 """Tests that nothing blows up when given an empty dict.""" 233 self.assertEqual(result_output._ConvertUnusedExpectationsToStringDict({}), 234 {}) 235 236 def testBasic(self) -> None: 237 """Basic functionality test.""" 238 unused = { 239 'foo_file': [ 240 data_types.Expectation('foo/test', ['win', 'nvidia'], 241 ['Failure', 'Timeout']), 242 ], 243 'bar_file': [ 244 data_types.Expectation('bar/test', ['win'], ['Failure']), 245 data_types.Expectation('bar/test2', ['win'], ['RetryOnFailure']) 246 ], 247 } 248 if six.PY2: 249 expected_output = { 250 'foo_file': [ 251 '[ win nvidia ] foo/test [ Failure Timeout ]', 252 ], 253 'bar_file': [ 254 '[ win ] bar/test [ Failure ]', 255 '[ win ] bar/test2 [ RetryOnFailure ]', 256 ], 257 } 258 else: 259 # Set ordering does not appear to be stable between test runs, as we can 260 # get either order of tags. So, generate them now instead of hard coding 261 # them. 262 tags = ' '.join(['nvidia', 'win']) 263 results = ' '.join(['Failure', 'Timeout']) 264 expected_output = { 265 'foo_file': [ 266 '[ %s ] foo/test [ %s ]' % (tags, results), 267 ], 268 'bar_file': [ 269 '[ win ] bar/test [ Failure ]', 270 '[ win ] bar/test2 [ RetryOnFailure ]', 271 ], 272 } 273 self.assertEqual( 274 result_output._ConvertUnusedExpectationsToStringDict(unused), 275 expected_output) 276 277 278class HtmlToFileUnittest(fake_filesystem_unittest.TestCase): 279 def setUp(self) -> None: 280 self.setUpPyfakefs() 281 self._file_handle = tempfile.NamedTemporaryFile(delete=False, mode='w') 282 self._filepath = self._file_handle.name 283 284 def testLinkifyString(self) -> None: 285 """Test for _LinkifyString().""" 286 self._file_handle.close() 287 s = 'a' 288 self.assertEqual(result_output._LinkifyString(s), 'a') 289 s = 'http://a' 290 self.assertEqual(result_output._LinkifyString(s), 291 '<a href="http://a">http://a</a>') 292 s = 'link to http://a, click it' 293 self.assertEqual(result_output._LinkifyString(s), 294 'link to <a href="http://a">http://a</a>, click it') 295 296 def testRecursiveHtmlToFileExpectationMap(self) -> None: 297 """Tests _RecursiveHtmlToFile() with an expectation map as input.""" 298 expectation_map = { 299 'foo': { 300 '"RetryOnFailure" expectation on "win intel"': { 301 'builder': { 302 'Fully passed in the following': [ 303 'all_pass (2/2)', 304 ], 305 'Never passed in the following': [ 306 'all_fail (0/2)', 307 ], 308 'Partially passed in the following': { 309 'some_pass (1/2)': [ 310 data_types.BuildLinkFromBuildId('build_id0'), 311 ], 312 }, 313 }, 314 }, 315 }, 316 } 317 result_output._RecursiveHtmlToFile(expectation_map, self._file_handle) 318 self._file_handle.close() 319 # pylint: disable=line-too-long 320 # TODO(crbug.com/1198237): Remove the Python 2 version once we've fully 321 # switched to Python 3. 322 if six.PY2: 323 expected_output = """\ 324<button type="button" class="collapsible_group">foo</button> 325<div class="content"> 326 <button type="button" class="collapsible_group">"RetryOnFailure" expectation on "win intel"</button> 327 <div class="content"> 328 <button type="button" class="collapsible_group">builder</button> 329 <div class="content"> 330 <button type="button" class="collapsible_group">Never passed in the following</button> 331 <div class="content"> 332 <p>all_fail (0/2)</p> 333 </div> 334 <button type="button" class="highlighted_collapsible_group">Fully passed in the following</button> 335 <div class="content"> 336 <p>all_pass (2/2)</p> 337 </div> 338 <button type="button" class="collapsible_group">Partially passed in the following</button> 339 <div class="content"> 340 <button type="button" class="collapsible_group">some_pass (1/2)</button> 341 <div class="content"> 342 <p><a href="http://ci.chromium.org/b/build_id0">http://ci.chromium.org/b/build_id0</a></p> 343 </div> 344 </div> 345 </div> 346 </div> 347</div> 348""" 349 else: 350 expected_output = """\ 351<button type="button" class="collapsible_group">foo</button> 352<div class="content"> 353 <button type="button" class="collapsible_group">"RetryOnFailure" expectation on "win intel"</button> 354 <div class="content"> 355 <button type="button" class="collapsible_group">builder</button> 356 <div class="content"> 357 <button type="button" class="highlighted_collapsible_group">Fully passed in the following</button> 358 <div class="content"> 359 <p>all_pass (2/2)</p> 360 </div> 361 <button type="button" class="collapsible_group">Never passed in the following</button> 362 <div class="content"> 363 <p>all_fail (0/2)</p> 364 </div> 365 <button type="button" class="collapsible_group">Partially passed in the following</button> 366 <div class="content"> 367 <button type="button" class="collapsible_group">some_pass (1/2)</button> 368 <div class="content"> 369 <p><a href="http://ci.chromium.org/b/build_id0">http://ci.chromium.org/b/build_id0</a></p> 370 </div> 371 </div> 372 </div> 373 </div> 374</div> 375""" 376 # pylint: enable=line-too-long 377 expected_output = _Dedent(expected_output) 378 with open(self._filepath) as f: 379 self.assertEqual(f.read(), expected_output) 380 381 def testRecursiveHtmlToFileUnmatchedResults(self) -> None: 382 """Tests _RecursiveHtmlToFile() with unmatched results as input.""" 383 unmatched_results = { 384 'foo': { 385 'builder': { 386 None: [ 387 'Expected "" on http://ci.chromium.org/b/build_id, got ' 388 '"Failure" with tags []', 389 ], 390 'step_name': [ 391 'Expected "Failure RetryOnFailure" on ' 392 'http://ci.chromium.org/b/build_id, got ' 393 '"Failure" with tags [win intel]', 394 ] 395 }, 396 }, 397 } 398 result_output._RecursiveHtmlToFile(unmatched_results, self._file_handle) 399 self._file_handle.close() 400 # pylint: disable=line-too-long 401 # Order is not guaranteed, so create permutations. 402 expected_template = """\ 403<button type="button" class="collapsible_group">foo</button> 404<div class="content"> 405 <button type="button" class="collapsible_group">builder</button> 406 <div class="content"> 407 %s 408 </div> 409</div> 410""" 411 values = [ 412 """\ 413 <button type="button" class="collapsible_group">None</button> 414 <div class="content"> 415 <p>Expected "" on <a href="http://ci.chromium.org/b/build_id">http://ci.chromium.org/b/build_id</a>, got "Failure" with tags []</p> 416 </div> 417""", 418 """\ 419 <button type="button" class="collapsible_group">step_name</button> 420 <div class="content"> 421 <p>Expected "Failure RetryOnFailure" on <a href="http://ci.chromium.org/b/build_id">http://ci.chromium.org/b/build_id</a>, got "Failure" with tags [win intel]</p> 422 </div> 423""", 424 ] 425 expected_output = CreateTextOutputPermutations(expected_template, values) 426 # pylint: enable=line-too-long 427 expected_output = [_Dedent(e) for e in expected_output] 428 with open(self._filepath) as f: 429 self.assertIn(f.read(), expected_output) 430 431 432class PrintToFileUnittest(fake_filesystem_unittest.TestCase): 433 def setUp(self) -> None: 434 self.setUpPyfakefs() 435 self._file_handle = tempfile.NamedTemporaryFile(delete=False, mode='w') 436 self._filepath = self._file_handle.name 437 438 def testRecursivePrintToFileExpectationMap(self) -> None: 439 """Tests RecursivePrintToFile() with an expectation map as input.""" 440 expectation_map = { 441 'foo': { 442 '"RetryOnFailure" expectation on "win intel"': { 443 'builder': { 444 'Fully passed in the following': [ 445 'all_pass (2/2)', 446 ], 447 'Never passed in the following': [ 448 'all_fail (0/2)', 449 ], 450 'Partially passed in the following': { 451 'some_pass (1/2)': [ 452 data_types.BuildLinkFromBuildId('build_id0'), 453 ], 454 }, 455 }, 456 }, 457 }, 458 } 459 result_output.RecursivePrintToFile(expectation_map, 0, self._file_handle) 460 self._file_handle.close() 461 462 # TODO(crbug.com/1198237): Keep the Python 3 version once we are fully 463 # switched. 464 if six.PY2: 465 expected_output = """\ 466foo 467 "RetryOnFailure" expectation on "win intel" 468 builder 469 Never passed in the following 470 all_fail (0/2) 471 Fully passed in the following 472 all_pass (2/2) 473 Partially passed in the following 474 some_pass (1/2) 475 http://ci.chromium.org/b/build_id0 476""" 477 else: 478 expected_output = """\ 479foo 480 "RetryOnFailure" expectation on "win intel" 481 builder 482 Fully passed in the following 483 all_pass (2/2) 484 Never passed in the following 485 all_fail (0/2) 486 Partially passed in the following 487 some_pass (1/2) 488 http://ci.chromium.org/b/build_id0 489""" 490 with open(self._filepath) as f: 491 self.assertEqual(f.read(), expected_output) 492 493 def testRecursivePrintToFileUnmatchedResults(self) -> None: 494 """Tests RecursivePrintToFile() with unmatched results as input.""" 495 unmatched_results = { 496 'foo': { 497 'builder': { 498 None: [ 499 'Expected "" on http://ci.chromium.org/b/build_id, got ' 500 '"Failure" with tags []', 501 ], 502 'step_name': [ 503 'Expected "Failure RetryOnFailure" on ' 504 'http://ci.chromium.org/b/build_id, got ' 505 '"Failure" with tags [win intel]', 506 ] 507 }, 508 }, 509 } 510 result_output.RecursivePrintToFile(unmatched_results, 0, self._file_handle) 511 self._file_handle.close() 512 # pylint: disable=line-too-long 513 # Order is not guaranteed, so create permutations. 514 expected_template = """\ 515foo 516 builder%s 517""" 518 values = [ 519 """ 520 None 521 Expected "" on http://ci.chromium.org/b/build_id, got "Failure" with tags []\ 522""", 523 """ 524 step_name 525 Expected "Failure RetryOnFailure" on http://ci.chromium.org/b/build_id, got "Failure" with tags [win intel]\ 526""", 527 ] 528 expected_output = CreateTextOutputPermutations(expected_template, values) 529 # pylint: enable=line-too-long 530 with open(self._filepath) as f: 531 self.assertIn(f.read(), expected_output) 532 533 534class OutputResultsUnittest(fake_filesystem_unittest.TestCase): 535 def setUp(self) -> None: 536 self.setUpPyfakefs() 537 self._file_handle = tempfile.NamedTemporaryFile(delete=False, mode='w') 538 self._filepath = self._file_handle.name 539 540 def testOutputResultsUnsupportedFormat(self) -> None: 541 """Tests that passing in an unsupported format is an error.""" 542 with self.assertRaises(RuntimeError): 543 result_output.OutputResults(data_types.TestExpectationMap(), 544 data_types.TestExpectationMap(), 545 data_types.TestExpectationMap(), {}, {}, 546 'asdf') 547 548 def testOutputResultsSmoketest(self) -> None: 549 """Test that nothing blows up when outputting.""" 550 expectation_map = data_types.TestExpectationMap({ 551 'foo': 552 data_types.ExpectationBuilderMap({ 553 data_types.Expectation('foo', ['win', 'intel'], 'RetryOnFailure'): 554 data_types.BuilderStepMap({ 555 'stale': 556 data_types.StepBuildStatsMap({ 557 'all_pass': 558 uu.CreateStatsWithPassFails(2, 0), 559 }), 560 }), 561 data_types.Expectation('foo', ['linux'], 'Failure'): 562 data_types.BuilderStepMap({ 563 'semi_stale': 564 data_types.StepBuildStatsMap({ 565 'all_pass': 566 uu.CreateStatsWithPassFails(2, 0), 567 'some_pass': 568 uu.CreateStatsWithPassFails(1, 1), 569 'none_pass': 570 uu.CreateStatsWithPassFails(0, 2), 571 }), 572 }), 573 data_types.Expectation('foo', ['mac'], 'Failure'): 574 data_types.BuilderStepMap({ 575 'active': 576 data_types.StepBuildStatsMap({ 577 'none_pass': 578 uu.CreateStatsWithPassFails(0, 2), 579 }), 580 }), 581 }), 582 }) 583 unmatched_results = { 584 'builder': [ 585 data_types.Result('foo', ['win', 'intel'], 'Failure', 'step_name', 586 'build_id'), 587 ], 588 } 589 unmatched_expectations = { 590 'foo_file': [ 591 data_types.Expectation('foo', ['linux'], 'RetryOnFailure'), 592 ], 593 } 594 595 stale, semi_stale, active = expectation_map.SplitByStaleness() 596 597 result_output.OutputResults(stale, semi_stale, active, {}, {}, 'print', 598 self._file_handle) 599 result_output.OutputResults(stale, semi_stale, active, unmatched_results, 600 {}, 'print', self._file_handle) 601 result_output.OutputResults(stale, semi_stale, active, {}, 602 unmatched_expectations, 'print', 603 self._file_handle) 604 result_output.OutputResults(stale, semi_stale, active, unmatched_results, 605 unmatched_expectations, 'print', 606 self._file_handle) 607 608 result_output.OutputResults(stale, semi_stale, active, {}, {}, 'html', 609 self._file_handle) 610 result_output.OutputResults(stale, semi_stale, active, unmatched_results, 611 {}, 'html', self._file_handle) 612 result_output.OutputResults(stale, semi_stale, active, {}, 613 unmatched_expectations, 'html', 614 self._file_handle) 615 result_output.OutputResults(stale, semi_stale, active, unmatched_results, 616 unmatched_expectations, 'html', 617 self._file_handle) 618 619 620class OutputAffectedUrlsUnittest(fake_filesystem_unittest.TestCase): 621 def setUp(self) -> None: 622 self.setUpPyfakefs() 623 self._file_handle = tempfile.NamedTemporaryFile(delete=False, mode='w') 624 self._filepath = self._file_handle.name 625 626 def testOutput(self) -> None: 627 """Tests that the output is correct.""" 628 urls = [ 629 'https://crbug.com/1234', 630 'https://crbug.com/angleproject/1234', 631 'http://crbug.com/2345', 632 'crbug.com/3456', 633 ] 634 orphaned_urls = ['https://crbug.com/1234', 'crbug.com/3456'] 635 result_output._OutputAffectedUrls(urls, orphaned_urls, self._file_handle) 636 self._file_handle.close() 637 with open(self._filepath) as f: 638 self.assertEqual(f.read(), ('Affected bugs: ' 639 'https://crbug.com/1234 ' 640 'https://crbug.com/angleproject/1234 ' 641 'http://crbug.com/2345 ' 642 'https://crbug.com/3456\n' 643 'Closable bugs: ' 644 'https://crbug.com/1234 ' 645 'https://crbug.com/3456\n')) 646 647 648class OutputUrlsForClDescriptionUnittest(fake_filesystem_unittest.TestCase): 649 def setUp(self) -> None: 650 self.setUpPyfakefs() 651 self._file_handle = tempfile.NamedTemporaryFile(delete=False, mode='w') 652 self._filepath = self._file_handle.name 653 654 def testSingleLine(self) -> None: 655 """Tests when all bugs can fit on a single line.""" 656 urls = [ 657 'crbug.com/1234', 658 'https://crbug.com/angleproject/2345', 659 ] 660 result_output._OutputUrlsForClDescription(urls, [], self._file_handle) 661 self._file_handle.close() 662 with open(self._filepath) as f: 663 self.assertEqual(f.read(), ('Affected bugs for CL description:\n' 664 'Bug: 1234, angleproject:2345\n')) 665 666 def testBugLimit(self) -> None: 667 """Tests that only a certain number of bugs are allowed per line.""" 668 urls = [ 669 'crbug.com/1', 670 'crbug.com/2', 671 'crbug.com/3', 672 'crbug.com/4', 673 'crbug.com/5', 674 'crbug.com/6', 675 ] 676 result_output._OutputUrlsForClDescription(urls, [], self._file_handle) 677 self._file_handle.close() 678 with open(self._filepath) as f: 679 self.assertEqual(f.read(), ('Affected bugs for CL description:\n' 680 'Bug: 1, 2, 3, 4, 5\n' 681 'Bug: 6\n')) 682 683 def testLengthLimit(self) -> None: 684 """Tests that only a certain number of characters are allowed per line.""" 685 urls = [ 686 'crbug.com/averylongprojectthatwillgooverthelinelength/1', 687 'crbug.com/averylongprojectthatwillgooverthelinelength/2', 688 ] 689 result_output._OutputUrlsForClDescription(urls, [], self._file_handle) 690 self._file_handle.close() 691 with open(self._filepath) as f: 692 self.assertEqual(f.read(), 693 ('Affected bugs for CL description:\n' 694 'Bug: averylongprojectthatwillgooverthelinelength:1\n' 695 'Bug: averylongprojectthatwillgooverthelinelength:2\n')) 696 697 project_name = (result_output.MAX_CHARACTERS_PER_CL_LINE - len('Bug: ') - 698 len(':1, 2')) * 'a' 699 urls = [ 700 'crbug.com/%s/1' % project_name, 701 'crbug.com/2', 702 ] 703 with open(self._filepath, 'w') as f: 704 result_output._OutputUrlsForClDescription(urls, [], f) 705 with open(self._filepath) as f: 706 self.assertEqual(f.read(), ('Affected bugs for CL description:\n' 707 'Bug: 2, %s:1\n' % project_name)) 708 709 project_name += 'a' 710 urls = [ 711 'crbug.com/%s/1' % project_name, 712 'crbug.com/2', 713 ] 714 with open(self._filepath, 'w') as f: 715 result_output._OutputUrlsForClDescription(urls, [], f) 716 with open(self._filepath) as f: 717 self.assertEqual(f.read(), ('Affected bugs for CL description:\n' 718 'Bug: 2\nBug: %s:1\n' % project_name)) 719 720 def testSingleBugOverLineLimit(self) -> None: 721 """Tests the behavior when a single bug by itself is over the line limit.""" 722 project_name = result_output.MAX_CHARACTERS_PER_CL_LINE * 'a' 723 urls = [ 724 'crbug.com/%s/1' % project_name, 725 'crbug.com/2', 726 ] 727 result_output._OutputUrlsForClDescription(urls, [], self._file_handle) 728 self._file_handle.close() 729 with open(self._filepath) as f: 730 self.assertEqual(f.read(), ('Affected bugs for CL description:\n' 731 'Bug: 2\n' 732 'Bug: %s:1\n' % project_name)) 733 734 def testOrphanedBugs(self) -> None: 735 """Tests that orphaned bugs are output properly alongside affected ones.""" 736 urls = [ 737 'crbug.com/1', 738 'crbug.com/2', 739 'crbug.com/3', 740 ] 741 orphaned_urls = ['crbug.com/2'] 742 result_output._OutputUrlsForClDescription(urls, orphaned_urls, 743 self._file_handle) 744 self._file_handle.close() 745 with open(self._filepath) as f: 746 self.assertEqual(f.read(), ('Affected bugs for CL description:\n' 747 'Bug: 1, 3\n' 748 'Fixed: 2\n')) 749 750 def testOnlyOrphanedBugs(self) -> None: 751 """Tests output when all affected bugs are orphaned bugs.""" 752 urls = [ 753 'crbug.com/1', 754 'crbug.com/2', 755 ] 756 orphaned_urls = [ 757 'crbug.com/1', 758 'crbug.com/2', 759 ] 760 result_output._OutputUrlsForClDescription(urls, orphaned_urls, 761 self._file_handle) 762 self._file_handle.close() 763 with open(self._filepath) as f: 764 self.assertEqual(f.read(), ('Affected bugs for CL description:\n' 765 'Fixed: 1, 2\n')) 766 767 768def _Dedent(s: str) -> str: 769 output = '' 770 for line in s.splitlines(True): 771 output += line.lstrip() 772 return output 773 774 775if __name__ == '__main__': 776 unittest.main(verbosity=2) 777