1# -*- coding: utf-8 -*- 2# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 3# See https://llvm.org/LICENSE.txt for license information. 4# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 5 6import json 7import libear 8import libscanbuild.report as sut 9import unittest 10import os 11import os.path 12 13 14def run_bug_parse(content): 15 with libear.TemporaryDirectory() as tmpdir: 16 file_name = os.path.join(tmpdir, "test.html") 17 with open(file_name, "w") as handle: 18 handle.writelines(content) 19 for bug in sut.parse_bug_html(file_name): 20 return bug 21 22 23def run_crash_parse(content, preproc): 24 with libear.TemporaryDirectory() as tmpdir: 25 file_name = os.path.join(tmpdir, preproc + ".info.txt") 26 with open(file_name, "w") as handle: 27 handle.writelines(content) 28 return sut.parse_crash(file_name) 29 30 31class ParseFileTest(unittest.TestCase): 32 def test_parse_bug(self): 33 content = [ 34 "some header\n", 35 "<!-- BUGDESC Division by zero -->\n", 36 "<!-- BUGTYPE Division by zero -->\n", 37 "<!-- BUGCATEGORY Logic error -->\n", 38 "<!-- BUGFILE xx -->\n", 39 "<!-- BUGLINE 5 -->\n", 40 "<!-- BUGCOLUMN 22 -->\n", 41 "<!-- BUGPATHLENGTH 4 -->\n", 42 "<!-- BUGMETAEND -->\n", 43 "<!-- REPORTHEADER -->\n", 44 "some tails\n", 45 ] 46 result = run_bug_parse(content) 47 self.assertEqual(result["bug_category"], "Logic error") 48 self.assertEqual(result["bug_path_length"], 4) 49 self.assertEqual(result["bug_line"], 5) 50 self.assertEqual(result["bug_description"], "Division by zero") 51 self.assertEqual(result["bug_type"], "Division by zero") 52 self.assertEqual(result["bug_file"], "xx") 53 54 def test_parse_bug_empty(self): 55 content = [] 56 result = run_bug_parse(content) 57 self.assertEqual(result["bug_category"], "Other") 58 self.assertEqual(result["bug_path_length"], 1) 59 self.assertEqual(result["bug_line"], 0) 60 61 def test_parse_crash(self): 62 content = [ 63 "/some/path/file.c\n", 64 "Some very serious Error\n", 65 "bla\n", 66 "bla-bla\n", 67 ] 68 result = run_crash_parse(content, "file.i") 69 self.assertEqual(result["source"], content[0].rstrip()) 70 self.assertEqual(result["problem"], content[1].rstrip()) 71 self.assertEqual(os.path.basename(result["file"]), "file.i") 72 self.assertEqual(os.path.basename(result["info"]), "file.i.info.txt") 73 self.assertEqual(os.path.basename(result["stderr"]), "file.i.stderr.txt") 74 75 def test_parse_real_crash(self): 76 import libscanbuild.analyze as sut2 77 import re 78 79 with libear.TemporaryDirectory() as tmpdir: 80 filename = os.path.join(tmpdir, "test.c") 81 with open(filename, "w") as handle: 82 handle.write("int main() { return 0") 83 # produce failure report 84 opts = { 85 "clang": "clang", 86 "directory": os.getcwd(), 87 "flags": [], 88 "file": filename, 89 "output_dir": tmpdir, 90 "language": "c", 91 "error_type": "other_error", 92 "error_output": "some output", 93 "exit_code": 13, 94 } 95 sut2.report_failure(opts) 96 # find the info file 97 pp_file = None 98 for root, _, files in os.walk(tmpdir): 99 keys = [os.path.join(root, name) for name in files] 100 for key in keys: 101 if re.match(r"^(.*/)+clang(.*)\.i$", key): 102 pp_file = key 103 self.assertIsNot(pp_file, None) 104 # read the failure report back 105 result = sut.parse_crash(pp_file + ".info.txt") 106 self.assertEqual(result["source"], filename) 107 self.assertEqual(result["problem"], "Other Error") 108 self.assertEqual(result["file"], pp_file) 109 self.assertEqual(result["info"], pp_file + ".info.txt") 110 self.assertEqual(result["stderr"], pp_file + ".stderr.txt") 111 112 113class ReportMethodTest(unittest.TestCase): 114 def test_chop(self): 115 self.assertEqual("file", sut.chop("/prefix", "/prefix/file")) 116 self.assertEqual("file", sut.chop("/prefix/", "/prefix/file")) 117 self.assertEqual("lib/file", sut.chop("/prefix/", "/prefix/lib/file")) 118 self.assertEqual("/prefix/file", sut.chop("", "/prefix/file")) 119 120 def test_chop_when_cwd(self): 121 self.assertEqual("../src/file", sut.chop("/cwd", "/src/file")) 122 self.assertEqual("../src/file", sut.chop("/prefix/cwd", "/prefix/src/file")) 123 124 125class GetPrefixFromCompilationDatabaseTest(unittest.TestCase): 126 def test_with_different_filenames(self): 127 self.assertEqual(sut.commonprefix(["/tmp/a.c", "/tmp/b.c"]), "/tmp") 128 129 def test_with_different_dirnames(self): 130 self.assertEqual(sut.commonprefix(["/tmp/abs/a.c", "/tmp/ack/b.c"]), "/tmp") 131 132 def test_no_common_prefix(self): 133 self.assertEqual(sut.commonprefix(["/tmp/abs/a.c", "/usr/ack/b.c"]), "/") 134 135 def test_with_single_file(self): 136 self.assertEqual(sut.commonprefix(["/tmp/a.c"]), "/tmp") 137 138 def test_empty(self): 139 self.assertEqual(sut.commonprefix([]), "") 140 141 142class MergeSarifTest(unittest.TestCase): 143 def test_merging_sarif(self): 144 sarif1 = { 145 "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", 146 "runs": [ 147 { 148 "artifacts": [ 149 { 150 "length": 100, 151 "location": { 152 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py" 153 }, 154 "mimeType": "text/plain", 155 "roles": ["resultFile"], 156 } 157 ], 158 "columnKind": "unicodeCodePoints", 159 "results": [ 160 { 161 "codeFlows": [ 162 { 163 "threadFlows": [ 164 { 165 "locations": [ 166 { 167 "importance": "important", 168 "location": { 169 "message": { 170 "text": "test message 1" 171 }, 172 "physicalLocation": { 173 "artifactLocation": { 174 "index": 0, 175 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py", 176 }, 177 "region": { 178 "endColumn": 5, 179 "startColumn": 1, 180 "startLine": 2, 181 }, 182 }, 183 }, 184 } 185 ] 186 } 187 ] 188 } 189 ] 190 }, 191 { 192 "codeFlows": [ 193 { 194 "threadFlows": [ 195 { 196 "locations": [ 197 { 198 "importance": "important", 199 "location": { 200 "message": { 201 "text": "test message 2" 202 }, 203 "physicalLocation": { 204 "artifactLocation": { 205 "index": 0, 206 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py", 207 }, 208 "region": { 209 "endColumn": 23, 210 "startColumn": 9, 211 "startLine": 10, 212 }, 213 }, 214 }, 215 } 216 ] 217 } 218 ] 219 } 220 ] 221 }, 222 ], 223 "tool": { 224 "driver": { 225 "fullName": "clang static analyzer", 226 "language": "en-US", 227 "name": "clang", 228 "rules": [ 229 { 230 "fullDescription": { 231 "text": "test rule for merge sarif test" 232 }, 233 "helpUrl": "//clang/tools/scan-build-py/tests/unit/test_report.py", 234 "id": "testId", 235 "name": "testName", 236 } 237 ], 238 "version": "test clang", 239 } 240 }, 241 } 242 ], 243 "version": "2.1.0", 244 } 245 sarif2 = { 246 "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", 247 "runs": [ 248 { 249 "artifacts": [ 250 { 251 "length": 1523, 252 "location": { 253 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py" 254 }, 255 "mimeType": "text/plain", 256 "roles": ["resultFile"], 257 } 258 ], 259 "columnKind": "unicodeCodePoints", 260 "results": [ 261 { 262 "codeFlows": [ 263 { 264 "threadFlows": [ 265 { 266 "locations": [ 267 { 268 "importance": "important", 269 "location": { 270 "message": { 271 "text": "test message 3" 272 }, 273 "physicalLocation": { 274 "artifactLocation": { 275 "index": 0, 276 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py", 277 }, 278 "region": { 279 "endColumn": 99, 280 "startColumn": 99, 281 "startLine": 17, 282 }, 283 }, 284 }, 285 } 286 ] 287 } 288 ] 289 } 290 ] 291 }, 292 { 293 "codeFlows": [ 294 { 295 "threadFlows": [ 296 { 297 "locations": [ 298 { 299 "importance": "important", 300 "location": { 301 "message": { 302 "text": "test message 4" 303 }, 304 "physicalLocation": { 305 "artifactLocation": { 306 "index": 0, 307 "uri": "//clang/tools/scan-build-py/tests/unit/test_report.py", 308 }, 309 "region": { 310 "endColumn": 305, 311 "startColumn": 304, 312 "startLine": 1, 313 }, 314 }, 315 }, 316 } 317 ] 318 } 319 ] 320 } 321 ] 322 }, 323 ], 324 "tool": { 325 "driver": { 326 "fullName": "clang static analyzer", 327 "language": "en-US", 328 "name": "clang", 329 "rules": [ 330 { 331 "fullDescription": { 332 "text": "test rule for merge sarif test" 333 }, 334 "helpUrl": "//clang/tools/scan-build-py/tests/unit/test_report.py", 335 "id": "testId", 336 "name": "testName", 337 } 338 ], 339 "version": "test clang", 340 } 341 }, 342 } 343 ], 344 "version": "2.1.0", 345 } 346 347 contents = [sarif1, sarif2] 348 with libear.TemporaryDirectory() as tmpdir: 349 for idx, content in enumerate(contents): 350 file_name = os.path.join(tmpdir, "results-{}.sarif".format(idx)) 351 with open(file_name, "w") as handle: 352 json.dump(content, handle) 353 354 sut.merge_sarif_files(tmpdir, sort_files=True) 355 356 self.assertIn("results-merged.sarif", os.listdir(tmpdir)) 357 with open(os.path.join(tmpdir, "results-merged.sarif")) as f: 358 merged = json.load(f) 359 self.assertEqual(len(merged["runs"]), 2) 360 self.assertEqual(len(merged["runs"][0]["results"]), 2) 361 self.assertEqual(len(merged["runs"][1]["results"]), 2) 362 363 expected = sarif1 364 for run in sarif2["runs"]: 365 expected["runs"].append(run) 366 367 self.assertEqual(merged, expected) 368 369 def test_merge_updates_embedded_link(self): 370 sarif1 = { 371 "runs": [ 372 { 373 "results": [ 374 { 375 "codeFlows": [ 376 { 377 "message": { 378 "text": "test message 1-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)" 379 }, 380 "threadFlows": [ 381 { 382 "message": { 383 "text": "test message 1-2 [link](sarif:/runs/1/results/0)" 384 } 385 } 386 ], 387 } 388 ] 389 } 390 ] 391 }, 392 { 393 "results": [ 394 { 395 "codeFlows": [ 396 { 397 "message": { 398 "text": "test message 2-1 [link](sarif:/runs/0/results/0)" 399 }, 400 "threadFlows": [ 401 { 402 "message": { 403 "text": "test message 2-2 [link](sarif:/runs/0/results/0)" 404 } 405 } 406 ], 407 } 408 ] 409 } 410 ] 411 }, 412 ] 413 } 414 sarif2 = { 415 "runs": [ 416 { 417 "results": [ 418 { 419 "codeFlows": [ 420 { 421 "message": { 422 "text": "test message 3-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)" 423 }, 424 "threadFlows": [ 425 { 426 "message": { 427 "text": "test message 3-2 [link](sarif:/runs/1/results/0)" 428 } 429 } 430 ], 431 } 432 ] 433 } 434 ], 435 }, 436 { 437 "results": [ 438 { 439 "codeFlows": [ 440 { 441 "message": { 442 "text": "test message 4-1 [link](sarif:/runs/0/results/0)" 443 }, 444 "threadFlows": [ 445 { 446 "message": { 447 "text": "test message 4-2 [link](sarif:/runs/0/results/0)" 448 } 449 } 450 ], 451 } 452 ] 453 } 454 ] 455 }, 456 ] 457 } 458 sarif3 = { 459 "runs": [ 460 { 461 "results": [ 462 { 463 "codeFlows": [ 464 { 465 "message": { 466 "text": "test message 5-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)" 467 }, 468 "threadFlows": [ 469 { 470 "message": { 471 "text": "test message 5-2 [link](sarif:/runs/1/results/0)" 472 } 473 } 474 ], 475 } 476 ] 477 } 478 ], 479 }, 480 { 481 "results": [ 482 { 483 "codeFlows": [ 484 { 485 "message": { 486 "text": "test message 6-1 [link](sarif:/runs/0/results/0)" 487 }, 488 "threadFlows": [ 489 { 490 "message": { 491 "text": "test message 6-2 [link](sarif:/runs/0/results/0)" 492 } 493 } 494 ], 495 } 496 ] 497 } 498 ] 499 }, 500 ] 501 } 502 503 contents = [sarif1, sarif2, sarif3] 504 505 with libear.TemporaryDirectory() as tmpdir: 506 for idx, content in enumerate(contents): 507 file_name = os.path.join(tmpdir, "results-{}.sarif".format(idx)) 508 with open(file_name, "w") as handle: 509 json.dump(content, handle) 510 511 sut.merge_sarif_files(tmpdir, sort_files=True) 512 513 self.assertIn("results-merged.sarif", os.listdir(tmpdir)) 514 with open(os.path.join(tmpdir, "results-merged.sarif")) as f: 515 merged = json.load(f) 516 self.assertEqual(len(merged["runs"]), 6) 517 518 code_flows = [ 519 merged["runs"][x]["results"][0]["codeFlows"][0]["message"]["text"] 520 for x in range(6) 521 ] 522 thread_flows = [ 523 merged["runs"][x]["results"][0]["codeFlows"][0]["threadFlows"][0][ 524 "message" 525 ]["text"] 526 for x in range(6) 527 ] 528 529 # The run index should be updated for the second and third sets of runs 530 self.assertEqual( 531 code_flows, 532 [ 533 "test message 1-1 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/1/results/0)", 534 "test message 2-1 [link](sarif:/runs/0/results/0)", 535 "test message 3-1 [link](sarif:/runs/3/results/0) [link2](sarif:/runs/3/results/0)", 536 "test message 4-1 [link](sarif:/runs/2/results/0)", 537 "test message 5-1 [link](sarif:/runs/5/results/0) [link2](sarif:/runs/5/results/0)", 538 "test message 6-1 [link](sarif:/runs/4/results/0)", 539 ], 540 ) 541 self.assertEqual( 542 thread_flows, 543 [ 544 "test message 1-2 [link](sarif:/runs/1/results/0)", 545 "test message 2-2 [link](sarif:/runs/0/results/0)", 546 "test message 3-2 [link](sarif:/runs/3/results/0)", 547 "test message 4-2 [link](sarif:/runs/2/results/0)", 548 "test message 5-2 [link](sarif:/runs/5/results/0)", 549 "test message 6-2 [link](sarif:/runs/4/results/0)", 550 ], 551 ) 552 553 def test_overflow_run_count(self): 554 sarif1 = { 555 "runs": [ 556 { 557 "results": [ 558 {"message": {"text": "run 1-0 [link](sarif:/runs/1/results/0)"}} 559 ] 560 }, 561 { 562 "results": [ 563 {"message": {"text": "run 1-1 [link](sarif:/runs/2/results/0)"}} 564 ] 565 }, 566 { 567 "results": [ 568 {"message": {"text": "run 1-2 [link](sarif:/runs/3/results/0)"}} 569 ] 570 }, 571 { 572 "results": [ 573 {"message": {"text": "run 1-3 [link](sarif:/runs/4/results/0)"}} 574 ] 575 }, 576 { 577 "results": [ 578 {"message": {"text": "run 1-4 [link](sarif:/runs/5/results/0)"}} 579 ] 580 }, 581 { 582 "results": [ 583 {"message": {"text": "run 1-5 [link](sarif:/runs/6/results/0)"}} 584 ] 585 }, 586 { 587 "results": [ 588 {"message": {"text": "run 1-6 [link](sarif:/runs/7/results/0)"}} 589 ] 590 }, 591 { 592 "results": [ 593 {"message": {"text": "run 1-7 [link](sarif:/runs/8/results/0)"}} 594 ] 595 }, 596 { 597 "results": [ 598 {"message": {"text": "run 1-8 [link](sarif:/runs/9/results/0)"}} 599 ] 600 }, 601 { 602 "results": [ 603 {"message": {"text": "run 1-9 [link](sarif:/runs/0/results/0)"}} 604 ] 605 }, 606 ] 607 } 608 sarif2 = { 609 "runs": [ 610 { 611 "results": [ 612 { 613 "message": { 614 "text": "run 2-0 [link](sarif:/runs/1/results/0) [link2](sarif:/runs/2/results/0)" 615 } 616 } 617 ] 618 }, 619 { 620 "results": [ 621 {"message": {"text": "run 2-1 [link](sarif:/runs/2/results/0)"}} 622 ] 623 }, 624 { 625 "results": [ 626 {"message": {"text": "run 2-2 [link](sarif:/runs/3/results/0)"}} 627 ] 628 }, 629 { 630 "results": [ 631 {"message": {"text": "run 2-3 [link](sarif:/runs/4/results/0)"}} 632 ] 633 }, 634 { 635 "results": [ 636 {"message": {"text": "run 2-4 [link](sarif:/runs/5/results/0)"}} 637 ] 638 }, 639 { 640 "results": [ 641 {"message": {"text": "run 2-5 [link](sarif:/runs/6/results/0)"}} 642 ] 643 }, 644 { 645 "results": [ 646 {"message": {"text": "run 2-6 [link](sarif:/runs/7/results/0)"}} 647 ] 648 }, 649 { 650 "results": [ 651 {"message": {"text": "run 2-7 [link](sarif:/runs/8/results/0)"}} 652 ] 653 }, 654 { 655 "results": [ 656 {"message": {"text": "run 2-8 [link](sarif:/runs/9/results/0)"}} 657 ] 658 }, 659 { 660 "results": [ 661 {"message": {"text": "run 2-9 [link](sarif:/runs/0/results/0)"}} 662 ] 663 }, 664 ] 665 } 666 667 contents = [sarif1, sarif2] 668 with libear.TemporaryDirectory() as tmpdir: 669 for idx, content in enumerate(contents): 670 file_name = os.path.join(tmpdir, "results-{}.sarif".format(idx)) 671 with open(file_name, "w") as handle: 672 json.dump(content, handle) 673 674 sut.merge_sarif_files(tmpdir, sort_files=True) 675 676 self.assertIn("results-merged.sarif", os.listdir(tmpdir)) 677 with open(os.path.join(tmpdir, "results-merged.sarif")) as f: 678 merged = json.load(f) 679 self.assertEqual(len(merged["runs"]), 20) 680 681 messages = [ 682 merged["runs"][x]["results"][0]["message"]["text"] 683 for x in range(20) 684 ] 685 self.assertEqual( 686 messages, 687 [ 688 "run 1-0 [link](sarif:/runs/1/results/0)", 689 "run 1-1 [link](sarif:/runs/2/results/0)", 690 "run 1-2 [link](sarif:/runs/3/results/0)", 691 "run 1-3 [link](sarif:/runs/4/results/0)", 692 "run 1-4 [link](sarif:/runs/5/results/0)", 693 "run 1-5 [link](sarif:/runs/6/results/0)", 694 "run 1-6 [link](sarif:/runs/7/results/0)", 695 "run 1-7 [link](sarif:/runs/8/results/0)", 696 "run 1-8 [link](sarif:/runs/9/results/0)", 697 "run 1-9 [link](sarif:/runs/0/results/0)", 698 "run 2-0 [link](sarif:/runs/11/results/0) [link2](sarif:/runs/12/results/0)", 699 "run 2-1 [link](sarif:/runs/12/results/0)", 700 "run 2-2 [link](sarif:/runs/13/results/0)", 701 "run 2-3 [link](sarif:/runs/14/results/0)", 702 "run 2-4 [link](sarif:/runs/15/results/0)", 703 "run 2-5 [link](sarif:/runs/16/results/0)", 704 "run 2-6 [link](sarif:/runs/17/results/0)", 705 "run 2-7 [link](sarif:/runs/18/results/0)", 706 "run 2-8 [link](sarif:/runs/19/results/0)", 707 "run 2-9 [link](sarif:/runs/10/results/0)", 708 ], 709 ) 710