1 /* Optimize and expand sanitizer functions. 2 Copyright (C) 2014-2020 Free Software Foundation, Inc. 3 Contributed by Marek Polacek <polacek@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "tree.h" 26 #include "gimple.h" 27 #include "ssa.h" 28 #include "tree-pass.h" 29 #include "tree-ssa-operands.h" 30 #include "gimple-pretty-print.h" 31 #include "fold-const.h" 32 #include "gimple-iterator.h" 33 #include "stringpool.h" 34 #include "attribs.h" 35 #include "asan.h" 36 #include "ubsan.h" 37 #include "tree-hash-traits.h" 38 #include "gimple-ssa.h" 39 #include "tree-phinodes.h" 40 #include "ssa-iterators.h" 41 #include "gimplify.h" 42 #include "gimple-iterator.h" 43 #include "gimple-walk.h" 44 #include "cfghooks.h" 45 #include "tree-dfa.h" 46 #include "tree-ssa.h" 47 #include "varasm.h" 48 49 /* This is used to carry information about basic blocks. It is 50 attached to the AUX field of the standard CFG block. */ 51 52 struct sanopt_info 53 { 54 /* True if this BB might call (directly or indirectly) free/munmap 55 or similar operation. */ 56 bool has_freeing_call_p; 57 58 /* True if HAS_FREEING_CALL_P flag has been computed. */ 59 bool has_freeing_call_computed_p; 60 61 /* True if there is a block with HAS_FREEING_CALL_P flag set 62 on any path between an immediate dominator of BB, denoted 63 imm(BB), and BB. */ 64 bool imm_dom_path_with_freeing_call_p; 65 66 /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed. */ 67 bool imm_dom_path_with_freeing_call_computed_p; 68 69 /* Number of possibly freeing calls encountered in this bb 70 (so far). */ 71 uint64_t freeing_call_events; 72 73 /* True if BB is currently being visited during computation 74 of IMM_DOM_PATH_WITH_FREEING_CALL_P flag. */ 75 bool being_visited_p; 76 77 /* True if this BB has been visited in the dominator walk. */ 78 bool visited_p; 79 }; 80 81 /* If T has a single definition of form T = T2, return T2. */ 82 83 static tree 84 maybe_get_single_definition (tree t) 85 { 86 if (TREE_CODE (t) == SSA_NAME) 87 { 88 gimple *g = SSA_NAME_DEF_STMT (t); 89 if (gimple_assign_single_p (g)) 90 return gimple_assign_rhs1 (g); 91 } 92 return NULL_TREE; 93 } 94 95 /* Tree triplet for vptr_check_map. */ 96 struct sanopt_tree_triplet 97 { 98 tree t1, t2, t3; 99 }; 100 101 /* Traits class for tree triplet hash maps below. */ 102 103 struct sanopt_tree_triplet_hash : typed_noop_remove <sanopt_tree_triplet> 104 { 105 typedef sanopt_tree_triplet value_type; 106 typedef sanopt_tree_triplet compare_type; 107 108 static hashval_t 109 hash (const sanopt_tree_triplet &ref) 110 { 111 inchash::hash hstate (0); 112 inchash::add_expr (ref.t1, hstate); 113 inchash::add_expr (ref.t2, hstate); 114 inchash::add_expr (ref.t3, hstate); 115 return hstate.end (); 116 } 117 118 static bool 119 equal (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2) 120 { 121 return operand_equal_p (ref1.t1, ref2.t1, 0) 122 && operand_equal_p (ref1.t2, ref2.t2, 0) 123 && operand_equal_p (ref1.t3, ref2.t3, 0); 124 } 125 126 static void 127 mark_deleted (sanopt_tree_triplet &ref) 128 { 129 ref.t1 = reinterpret_cast<tree> (1); 130 } 131 132 static const bool empty_zero_p = true; 133 134 static void 135 mark_empty (sanopt_tree_triplet &ref) 136 { 137 ref.t1 = NULL; 138 } 139 140 static bool 141 is_deleted (const sanopt_tree_triplet &ref) 142 { 143 return ref.t1 == reinterpret_cast<tree> (1); 144 } 145 146 static bool 147 is_empty (const sanopt_tree_triplet &ref) 148 { 149 return ref.t1 == NULL; 150 } 151 }; 152 153 /* Tree couple for ptr_check_map. */ 154 struct sanopt_tree_couple 155 { 156 tree ptr; 157 bool pos_p; 158 }; 159 160 /* Traits class for tree triplet hash maps below. */ 161 162 struct sanopt_tree_couple_hash : typed_noop_remove <sanopt_tree_couple> 163 { 164 typedef sanopt_tree_couple value_type; 165 typedef sanopt_tree_couple compare_type; 166 167 static hashval_t 168 hash (const sanopt_tree_couple &ref) 169 { 170 inchash::hash hstate (0); 171 inchash::add_expr (ref.ptr, hstate); 172 hstate.add_int (ref.pos_p); 173 return hstate.end (); 174 } 175 176 static bool 177 equal (const sanopt_tree_couple &ref1, const sanopt_tree_couple &ref2) 178 { 179 return operand_equal_p (ref1.ptr, ref2.ptr, 0) 180 && ref1.pos_p == ref2.pos_p; 181 } 182 183 static void 184 mark_deleted (sanopt_tree_couple &ref) 185 { 186 ref.ptr = reinterpret_cast<tree> (1); 187 } 188 189 static const bool empty_zero_p = true; 190 191 static void 192 mark_empty (sanopt_tree_couple &ref) 193 { 194 ref.ptr = NULL; 195 } 196 197 static bool 198 is_deleted (const sanopt_tree_couple &ref) 199 { 200 return ref.ptr == reinterpret_cast<tree> (1); 201 } 202 203 static bool 204 is_empty (const sanopt_tree_couple &ref) 205 { 206 return ref.ptr == NULL; 207 } 208 }; 209 210 /* This is used to carry various hash maps and variables used 211 in sanopt_optimize_walker. */ 212 213 class sanopt_ctx 214 { 215 public: 216 /* This map maps a pointer (the first argument of UBSAN_NULL) to 217 a vector of UBSAN_NULL call statements that check this pointer. */ 218 hash_map<tree, auto_vec<gimple *> > null_check_map; 219 220 /* This map maps a pointer (the second argument of ASAN_CHECK) to 221 a vector of ASAN_CHECK call statements that check the access. */ 222 hash_map<tree_operand_hash, auto_vec<gimple *> > asan_check_map; 223 224 /* This map maps a tree triplet (the first, second and fourth argument 225 of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check 226 that virtual table pointer. */ 227 hash_map<sanopt_tree_triplet_hash, auto_vec<gimple *> > vptr_check_map; 228 229 /* This map maps a couple (tree and boolean) to a vector of UBSAN_PTR 230 call statements that check that pointer overflow. */ 231 hash_map<sanopt_tree_couple_hash, auto_vec<gimple *> > ptr_check_map; 232 233 /* Number of IFN_ASAN_CHECK statements. */ 234 int asan_num_accesses; 235 236 /* True when the current functions constains an ASAN_MARK. */ 237 bool contains_asan_mark; 238 }; 239 240 /* Return true if there might be any call to free/munmap operation 241 on any path in between DOM (which should be imm(BB)) and BB. */ 242 243 static bool 244 imm_dom_path_with_freeing_call (basic_block bb, basic_block dom) 245 { 246 sanopt_info *info = (sanopt_info *) bb->aux; 247 edge e; 248 edge_iterator ei; 249 250 if (info->imm_dom_path_with_freeing_call_computed_p) 251 return info->imm_dom_path_with_freeing_call_p; 252 253 info->being_visited_p = true; 254 255 FOR_EACH_EDGE (e, ei, bb->preds) 256 { 257 sanopt_info *pred_info = (sanopt_info *) e->src->aux; 258 259 if (e->src == dom) 260 continue; 261 262 if ((pred_info->imm_dom_path_with_freeing_call_computed_p 263 && pred_info->imm_dom_path_with_freeing_call_p) 264 || (pred_info->has_freeing_call_computed_p 265 && pred_info->has_freeing_call_p)) 266 { 267 info->imm_dom_path_with_freeing_call_computed_p = true; 268 info->imm_dom_path_with_freeing_call_p = true; 269 info->being_visited_p = false; 270 return true; 271 } 272 } 273 274 FOR_EACH_EDGE (e, ei, bb->preds) 275 { 276 sanopt_info *pred_info = (sanopt_info *) e->src->aux; 277 278 if (e->src == dom) 279 continue; 280 281 if (pred_info->has_freeing_call_computed_p) 282 continue; 283 284 gimple_stmt_iterator gsi; 285 for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi)) 286 { 287 gimple *stmt = gsi_stmt (gsi); 288 gasm *asm_stmt; 289 290 if ((is_gimple_call (stmt) && !nonfreeing_call_p (stmt)) 291 || ((asm_stmt = dyn_cast <gasm *> (stmt)) 292 && (gimple_asm_clobbers_memory_p (asm_stmt) 293 || gimple_asm_volatile_p (asm_stmt)))) 294 { 295 pred_info->has_freeing_call_p = true; 296 break; 297 } 298 } 299 300 pred_info->has_freeing_call_computed_p = true; 301 if (pred_info->has_freeing_call_p) 302 { 303 info->imm_dom_path_with_freeing_call_computed_p = true; 304 info->imm_dom_path_with_freeing_call_p = true; 305 info->being_visited_p = false; 306 return true; 307 } 308 } 309 310 FOR_EACH_EDGE (e, ei, bb->preds) 311 { 312 if (e->src == dom) 313 continue; 314 315 basic_block src; 316 for (src = e->src; src != dom; ) 317 { 318 sanopt_info *pred_info = (sanopt_info *) src->aux; 319 if (pred_info->being_visited_p) 320 break; 321 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src); 322 if (imm_dom_path_with_freeing_call (src, imm)) 323 { 324 info->imm_dom_path_with_freeing_call_computed_p = true; 325 info->imm_dom_path_with_freeing_call_p = true; 326 info->being_visited_p = false; 327 return true; 328 } 329 src = imm; 330 } 331 } 332 333 info->imm_dom_path_with_freeing_call_computed_p = true; 334 info->imm_dom_path_with_freeing_call_p = false; 335 info->being_visited_p = false; 336 return false; 337 } 338 339 /* Get the first dominating check from the list of stored checks. 340 Non-dominating checks are silently dropped. */ 341 342 static gimple * 343 maybe_get_dominating_check (auto_vec<gimple *> &v) 344 { 345 for (; !v.is_empty (); v.pop ()) 346 { 347 gimple *g = v.last (); 348 sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux; 349 if (!si->visited_p) 350 /* At this point we shouldn't have any statements 351 that aren't dominating the current BB. */ 352 return g; 353 } 354 return NULL; 355 } 356 357 /* Optimize away redundant UBSAN_NULL calls. */ 358 359 static bool 360 maybe_optimize_ubsan_null_ifn (class sanopt_ctx *ctx, gimple *stmt) 361 { 362 gcc_assert (gimple_call_num_args (stmt) == 3); 363 tree ptr = gimple_call_arg (stmt, 0); 364 tree cur_align = gimple_call_arg (stmt, 2); 365 gcc_assert (TREE_CODE (cur_align) == INTEGER_CST); 366 bool remove = false; 367 368 auto_vec<gimple *> &v = ctx->null_check_map.get_or_insert (ptr); 369 gimple *g = maybe_get_dominating_check (v); 370 if (!g) 371 { 372 /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's 373 nothing to optimize yet. */ 374 v.safe_push (stmt); 375 return false; 376 } 377 378 /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we 379 can drop this one. But only if this check doesn't specify stricter 380 alignment. */ 381 382 tree align = gimple_call_arg (g, 2); 383 int kind = tree_to_shwi (gimple_call_arg (g, 1)); 384 /* If this is a NULL pointer check where we had segv anyway, we can 385 remove it. */ 386 if (integer_zerop (align) 387 && (kind == UBSAN_LOAD_OF 388 || kind == UBSAN_STORE_OF 389 || kind == UBSAN_MEMBER_ACCESS)) 390 remove = true; 391 /* Otherwise remove the check in non-recovering mode, or if the 392 stmts have same location. */ 393 else if (integer_zerop (align)) 394 remove = (flag_sanitize_recover & SANITIZE_NULL) == 0 395 || flag_sanitize_undefined_trap_on_error 396 || gimple_location (g) == gimple_location (stmt); 397 else if (tree_int_cst_le (cur_align, align)) 398 remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0 399 || flag_sanitize_undefined_trap_on_error 400 || gimple_location (g) == gimple_location (stmt); 401 402 if (!remove && gimple_bb (g) == gimple_bb (stmt) 403 && tree_int_cst_compare (cur_align, align) == 0) 404 v.pop (); 405 406 if (!remove) 407 v.safe_push (stmt); 408 return remove; 409 } 410 411 /* Return true when pointer PTR for a given CUR_OFFSET is already sanitized 412 in a given sanitization context CTX. */ 413 414 static bool 415 has_dominating_ubsan_ptr_check (sanopt_ctx *ctx, tree ptr, 416 offset_int &cur_offset) 417 { 418 bool pos_p = !wi::neg_p (cur_offset); 419 sanopt_tree_couple couple; 420 couple.ptr = ptr; 421 couple.pos_p = pos_p; 422 423 auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple); 424 gimple *g = maybe_get_dominating_check (v); 425 if (!g) 426 return false; 427 428 /* We already have recorded a UBSAN_PTR check for this pointer. Perhaps we 429 can drop this one. But only if this check doesn't specify larger offset. 430 */ 431 tree offset = gimple_call_arg (g, 1); 432 gcc_assert (TREE_CODE (offset) == INTEGER_CST); 433 offset_int ooffset = wi::sext (wi::to_offset (offset), POINTER_SIZE); 434 435 if (pos_p) 436 { 437 if (wi::les_p (cur_offset, ooffset)) 438 return true; 439 } 440 else if (!pos_p && wi::les_p (ooffset, cur_offset)) 441 return true; 442 443 return false; 444 } 445 446 /* Record UBSAN_PTR check of given context CTX. Register pointer PTR on 447 a given OFFSET that it's handled by GIMPLE STMT. */ 448 449 static void 450 record_ubsan_ptr_check_stmt (sanopt_ctx *ctx, gimple *stmt, tree ptr, 451 const offset_int &offset) 452 { 453 sanopt_tree_couple couple; 454 couple.ptr = ptr; 455 couple.pos_p = !wi::neg_p (offset); 456 457 auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple); 458 v.safe_push (stmt); 459 } 460 461 /* Optimize away redundant UBSAN_PTR calls. */ 462 463 static bool 464 maybe_optimize_ubsan_ptr_ifn (sanopt_ctx *ctx, gimple *stmt) 465 { 466 poly_int64 bitsize, pbitpos; 467 machine_mode mode; 468 int volatilep = 0, reversep, unsignedp = 0; 469 tree offset; 470 471 gcc_assert (gimple_call_num_args (stmt) == 2); 472 tree ptr = gimple_call_arg (stmt, 0); 473 tree off = gimple_call_arg (stmt, 1); 474 475 if (TREE_CODE (off) != INTEGER_CST) 476 return false; 477 478 if (integer_zerop (off)) 479 return true; 480 481 offset_int cur_offset = wi::sext (wi::to_offset (off), POINTER_SIZE); 482 if (has_dominating_ubsan_ptr_check (ctx, ptr, cur_offset)) 483 return true; 484 485 tree base = ptr; 486 if (TREE_CODE (base) == ADDR_EXPR) 487 { 488 base = TREE_OPERAND (base, 0); 489 490 HOST_WIDE_INT bitpos; 491 base = get_inner_reference (base, &bitsize, &pbitpos, &offset, &mode, 492 &unsignedp, &reversep, &volatilep); 493 if ((offset == NULL_TREE || TREE_CODE (offset) == INTEGER_CST) 494 && DECL_P (base) 495 && ((!VAR_P (base) 496 && TREE_CODE (base) != PARM_DECL 497 && TREE_CODE (base) != RESULT_DECL) 498 || !DECL_REGISTER (base)) 499 && pbitpos.is_constant (&bitpos)) 500 { 501 offset_int expr_offset; 502 if (offset) 503 expr_offset = wi::to_offset (offset) + bitpos / BITS_PER_UNIT; 504 else 505 expr_offset = bitpos / BITS_PER_UNIT; 506 expr_offset = wi::sext (expr_offset, POINTER_SIZE); 507 offset_int total_offset = expr_offset + cur_offset; 508 if (total_offset != wi::sext (total_offset, POINTER_SIZE)) 509 { 510 record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset); 511 return false; 512 } 513 514 /* If BASE is a fixed size automatic variable or 515 global variable defined in the current TU, we don't have 516 to instrument anything if offset is within address 517 of the variable. */ 518 if ((VAR_P (base) 519 || TREE_CODE (base) == PARM_DECL 520 || TREE_CODE (base) == RESULT_DECL) 521 && DECL_SIZE_UNIT (base) 522 && TREE_CODE (DECL_SIZE_UNIT (base)) == INTEGER_CST 523 && (!is_global_var (base) || decl_binds_to_current_def_p (base))) 524 { 525 offset_int base_size = wi::to_offset (DECL_SIZE_UNIT (base)); 526 if (!wi::neg_p (expr_offset) 527 && wi::les_p (total_offset, base_size)) 528 { 529 if (!wi::neg_p (total_offset) 530 && wi::les_p (total_offset, base_size)) 531 return true; 532 } 533 } 534 535 /* Following expression: UBSAN_PTR (&MEM_REF[ptr + x], y) can be 536 handled as follows: 537 538 1) sign (x) == sign (y), then check for dominating check of (x + y) 539 2) sign (x) != sign (y), then first check if we have a dominating 540 check for ptr + x. If so, then we have 2 situations: 541 a) sign (x) == sign (x + y), here we are done, example: 542 UBSAN_PTR (&MEM_REF[ptr + 100], -50) 543 b) check for dominating check of ptr + x + y. 544 */ 545 546 bool sign_cur_offset = !wi::neg_p (cur_offset); 547 bool sign_expr_offset = !wi::neg_p (expr_offset); 548 549 tree base_addr 550 = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (base)), base); 551 552 bool add = false; 553 if (sign_cur_offset == sign_expr_offset) 554 { 555 if (has_dominating_ubsan_ptr_check (ctx, base_addr, total_offset)) 556 return true; 557 else 558 add = true; 559 } 560 else 561 { 562 if (!has_dominating_ubsan_ptr_check (ctx, base_addr, expr_offset)) 563 ; /* Don't record base_addr + expr_offset, it's not a guarding 564 check. */ 565 else 566 { 567 bool sign_total_offset = !wi::neg_p (total_offset); 568 if (sign_expr_offset == sign_total_offset) 569 return true; 570 else 571 { 572 if (has_dominating_ubsan_ptr_check (ctx, base_addr, 573 total_offset)) 574 return true; 575 else 576 add = true; 577 } 578 } 579 } 580 581 /* Record a new dominating check for base_addr + total_offset. */ 582 if (add && !operand_equal_p (base, base_addr, 0)) 583 record_ubsan_ptr_check_stmt (ctx, stmt, base_addr, 584 total_offset); 585 } 586 } 587 588 /* For this PTR we don't have any UBSAN_PTR stmts recorded, so there's 589 nothing to optimize yet. */ 590 record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset); 591 592 return false; 593 } 594 595 /* Optimize away redundant UBSAN_VPTR calls. The second argument 596 is the value loaded from the virtual table, so rely on FRE to find out 597 when we can actually optimize. */ 598 599 static bool 600 maybe_optimize_ubsan_vptr_ifn (class sanopt_ctx *ctx, gimple *stmt) 601 { 602 gcc_assert (gimple_call_num_args (stmt) == 5); 603 sanopt_tree_triplet triplet; 604 triplet.t1 = gimple_call_arg (stmt, 0); 605 triplet.t2 = gimple_call_arg (stmt, 1); 606 triplet.t3 = gimple_call_arg (stmt, 3); 607 608 auto_vec<gimple *> &v = ctx->vptr_check_map.get_or_insert (triplet); 609 gimple *g = maybe_get_dominating_check (v); 610 if (!g) 611 { 612 /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's 613 nothing to optimize yet. */ 614 v.safe_push (stmt); 615 return false; 616 } 617 618 return true; 619 } 620 621 /* Returns TRUE if ASan check of length LEN in block BB can be removed 622 if preceded by checks in V. */ 623 624 static bool 625 can_remove_asan_check (auto_vec<gimple *> &v, tree len, basic_block bb) 626 { 627 unsigned int i; 628 gimple *g; 629 gimple *to_pop = NULL; 630 bool remove = false; 631 basic_block last_bb = bb; 632 bool cleanup = false; 633 634 FOR_EACH_VEC_ELT_REVERSE (v, i, g) 635 { 636 basic_block gbb = gimple_bb (g); 637 sanopt_info *si = (sanopt_info *) gbb->aux; 638 if (gimple_uid (g) < si->freeing_call_events) 639 { 640 /* If there is a potentially freeing call after g in gbb, we should 641 remove it from the vector, can't use in optimization. */ 642 cleanup = true; 643 continue; 644 } 645 646 tree glen = gimple_call_arg (g, 2); 647 gcc_assert (TREE_CODE (glen) == INTEGER_CST); 648 649 /* If we've checked only smaller length than we want to check now, 650 we can't remove the current stmt. If g is in the same basic block, 651 we want to remove it though, as the current stmt is better. */ 652 if (tree_int_cst_lt (glen, len)) 653 { 654 if (gbb == bb) 655 { 656 to_pop = g; 657 cleanup = true; 658 } 659 continue; 660 } 661 662 while (last_bb != gbb) 663 { 664 /* Paths from last_bb to bb have been checked before. 665 gbb is necessarily a dominator of last_bb, but not necessarily 666 immediate dominator. */ 667 if (((sanopt_info *) last_bb->aux)->freeing_call_events) 668 break; 669 670 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb); 671 gcc_assert (imm); 672 if (imm_dom_path_with_freeing_call (last_bb, imm)) 673 break; 674 675 last_bb = imm; 676 } 677 if (last_bb == gbb) 678 remove = true; 679 break; 680 } 681 682 if (cleanup) 683 { 684 unsigned int j = 0, l = v.length (); 685 for (i = 0; i < l; i++) 686 if (v[i] != to_pop 687 && (gimple_uid (v[i]) 688 == ((sanopt_info *) 689 gimple_bb (v[i])->aux)->freeing_call_events)) 690 { 691 if (i != j) 692 v[j] = v[i]; 693 j++; 694 } 695 v.truncate (j); 696 } 697 698 return remove; 699 } 700 701 /* Optimize away redundant ASAN_CHECK calls. */ 702 703 static bool 704 maybe_optimize_asan_check_ifn (class sanopt_ctx *ctx, gimple *stmt) 705 { 706 gcc_assert (gimple_call_num_args (stmt) == 4); 707 tree ptr = gimple_call_arg (stmt, 1); 708 tree len = gimple_call_arg (stmt, 2); 709 basic_block bb = gimple_bb (stmt); 710 sanopt_info *info = (sanopt_info *) bb->aux; 711 712 if (TREE_CODE (len) != INTEGER_CST) 713 return false; 714 if (integer_zerop (len)) 715 return false; 716 717 gimple_set_uid (stmt, info->freeing_call_events); 718 719 auto_vec<gimple *> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr); 720 721 tree base_addr = maybe_get_single_definition (ptr); 722 auto_vec<gimple *> *base_checks = NULL; 723 if (base_addr) 724 { 725 base_checks = &ctx->asan_check_map.get_or_insert (base_addr); 726 /* Original pointer might have been invalidated. */ 727 ptr_checks = ctx->asan_check_map.get (ptr); 728 } 729 730 gimple *g = maybe_get_dominating_check (*ptr_checks); 731 gimple *g2 = NULL; 732 733 if (base_checks) 734 /* Try with base address as well. */ 735 g2 = maybe_get_dominating_check (*base_checks); 736 737 if (g == NULL && g2 == NULL) 738 { 739 /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's 740 nothing to optimize yet. */ 741 ptr_checks->safe_push (stmt); 742 if (base_checks) 743 base_checks->safe_push (stmt); 744 return false; 745 } 746 747 bool remove = false; 748 749 if (ptr_checks) 750 remove = can_remove_asan_check (*ptr_checks, len, bb); 751 752 if (!remove && base_checks) 753 /* Try with base address as well. */ 754 remove = can_remove_asan_check (*base_checks, len, bb); 755 756 if (!remove) 757 { 758 ptr_checks->safe_push (stmt); 759 if (base_checks) 760 base_checks->safe_push (stmt); 761 } 762 763 return remove; 764 } 765 766 /* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls. 767 768 We walk blocks in the CFG via a depth first search of the dominator 769 tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector 770 in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the 771 blocks. When leaving a block, we mark the block as visited; then 772 when checking the statements in the vector, we ignore statements that 773 are coming from already visited blocks, because these cannot dominate 774 anything anymore. CTX is a sanopt context. */ 775 776 static void 777 sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx) 778 { 779 basic_block son; 780 gimple_stmt_iterator gsi; 781 sanopt_info *info = (sanopt_info *) bb->aux; 782 bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0; 783 784 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) 785 { 786 gimple *stmt = gsi_stmt (gsi); 787 bool remove = false; 788 789 if (!is_gimple_call (stmt)) 790 { 791 /* Handle asm volatile or asm with "memory" clobber 792 the same as potentionally freeing call. */ 793 gasm *asm_stmt = dyn_cast <gasm *> (stmt); 794 if (asm_stmt 795 && asan_check_optimize 796 && (gimple_asm_clobbers_memory_p (asm_stmt) 797 || gimple_asm_volatile_p (asm_stmt))) 798 info->freeing_call_events++; 799 gsi_next (&gsi); 800 continue; 801 } 802 803 if (asan_check_optimize && !nonfreeing_call_p (stmt)) 804 info->freeing_call_events++; 805 806 /* If __asan_before_dynamic_init ("module"); is followed by 807 __asan_after_dynamic_init (); without intervening memory loads/stores, 808 there is nothing to guard, so optimize both away. */ 809 if (asan_check_optimize 810 && gimple_call_builtin_p (stmt, BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT)) 811 { 812 use_operand_p use; 813 gimple *use_stmt; 814 if (single_imm_use (gimple_vdef (stmt), &use, &use_stmt)) 815 { 816 if (is_gimple_call (use_stmt) 817 && gimple_call_builtin_p (use_stmt, 818 BUILT_IN_ASAN_AFTER_DYNAMIC_INIT)) 819 { 820 unlink_stmt_vdef (use_stmt); 821 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt); 822 gsi_remove (&gsi2, true); 823 remove = true; 824 } 825 } 826 } 827 828 if (gimple_call_internal_p (stmt)) 829 switch (gimple_call_internal_fn (stmt)) 830 { 831 case IFN_UBSAN_NULL: 832 remove = maybe_optimize_ubsan_null_ifn (ctx, stmt); 833 break; 834 case IFN_UBSAN_VPTR: 835 remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt); 836 break; 837 case IFN_UBSAN_PTR: 838 remove = maybe_optimize_ubsan_ptr_ifn (ctx, stmt); 839 break; 840 case IFN_ASAN_CHECK: 841 if (asan_check_optimize) 842 remove = maybe_optimize_asan_check_ifn (ctx, stmt); 843 if (!remove) 844 ctx->asan_num_accesses++; 845 break; 846 case IFN_ASAN_MARK: 847 ctx->contains_asan_mark = true; 848 break; 849 default: 850 break; 851 } 852 853 if (remove) 854 { 855 /* Drop this check. */ 856 if (dump_file && (dump_flags & TDF_DETAILS)) 857 { 858 fprintf (dump_file, "Optimizing out: "); 859 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 860 } 861 unlink_stmt_vdef (stmt); 862 gsi_remove (&gsi, true); 863 } 864 else 865 { 866 if (dump_file && (dump_flags & TDF_DETAILS)) 867 { 868 fprintf (dump_file, "Leaving: "); 869 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 870 } 871 872 gsi_next (&gsi); 873 } 874 } 875 876 if (asan_check_optimize) 877 { 878 info->has_freeing_call_p = info->freeing_call_events != 0; 879 info->has_freeing_call_computed_p = true; 880 } 881 882 for (son = first_dom_son (CDI_DOMINATORS, bb); 883 son; 884 son = next_dom_son (CDI_DOMINATORS, son)) 885 sanopt_optimize_walker (son, ctx); 886 887 /* We're leaving this BB, so mark it to that effect. */ 888 info->visited_p = true; 889 } 890 891 /* Try to remove redundant sanitizer checks in function FUN. */ 892 893 static int 894 sanopt_optimize (function *fun, bool *contains_asan_mark) 895 { 896 class sanopt_ctx ctx; 897 ctx.asan_num_accesses = 0; 898 ctx.contains_asan_mark = false; 899 900 /* Set up block info for each basic block. */ 901 alloc_aux_for_blocks (sizeof (sanopt_info)); 902 903 /* We're going to do a dominator walk, so ensure that we have 904 dominance information. */ 905 calculate_dominance_info (CDI_DOMINATORS); 906 907 /* Recursively walk the dominator tree optimizing away 908 redundant checks. */ 909 sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), &ctx); 910 911 free_aux_for_blocks (); 912 913 *contains_asan_mark = ctx.contains_asan_mark; 914 return ctx.asan_num_accesses; 915 } 916 917 /* Perform optimization of sanitize functions. */ 918 919 namespace { 920 921 const pass_data pass_data_sanopt = 922 { 923 GIMPLE_PASS, /* type */ 924 "sanopt", /* name */ 925 OPTGROUP_NONE, /* optinfo_flags */ 926 TV_NONE, /* tv_id */ 927 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */ 928 0, /* properties_provided */ 929 0, /* properties_destroyed */ 930 0, /* todo_flags_start */ 931 TODO_update_ssa, /* todo_flags_finish */ 932 }; 933 934 class pass_sanopt : public gimple_opt_pass 935 { 936 public: 937 pass_sanopt (gcc::context *ctxt) 938 : gimple_opt_pass (pass_data_sanopt, ctxt) 939 {} 940 941 /* opt_pass methods: */ 942 virtual bool gate (function *) { return flag_sanitize; } 943 virtual unsigned int execute (function *); 944 945 }; // class pass_sanopt 946 947 /* Sanitize all ASAN_MARK unpoison calls that are not reachable by a BB 948 that contains an ASAN_MARK poison. All these ASAN_MARK unpoison call 949 can be removed as all variables are unpoisoned in a function prologue. */ 950 951 static void 952 sanitize_asan_mark_unpoison (void) 953 { 954 /* 1) Find all BBs that contain an ASAN_MARK poison call. */ 955 auto_sbitmap with_poison (last_basic_block_for_fn (cfun) + 1); 956 bitmap_clear (with_poison); 957 basic_block bb; 958 959 FOR_EACH_BB_FN (bb, cfun) 960 { 961 if (bitmap_bit_p (with_poison, bb->index)) 962 continue; 963 964 gimple_stmt_iterator gsi; 965 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) 966 { 967 gimple *stmt = gsi_stmt (gsi); 968 if (asan_mark_p (stmt, ASAN_MARK_POISON)) 969 { 970 bitmap_set_bit (with_poison, bb->index); 971 break; 972 } 973 } 974 } 975 976 auto_sbitmap poisoned (last_basic_block_for_fn (cfun) + 1); 977 bitmap_clear (poisoned); 978 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1); 979 bitmap_copy (worklist, with_poison); 980 981 /* 2) Propagate the information to all reachable blocks. */ 982 while (!bitmap_empty_p (worklist)) 983 { 984 unsigned i = bitmap_first_set_bit (worklist); 985 bitmap_clear_bit (worklist, i); 986 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i); 987 gcc_assert (bb); 988 989 edge e; 990 edge_iterator ei; 991 FOR_EACH_EDGE (e, ei, bb->succs) 992 if (!bitmap_bit_p (poisoned, e->dest->index)) 993 { 994 bitmap_set_bit (poisoned, e->dest->index); 995 bitmap_set_bit (worklist, e->dest->index); 996 } 997 } 998 999 /* 3) Iterate all BBs not included in POISONED BBs and remove unpoison 1000 ASAN_MARK preceding an ASAN_MARK poison (which can still happen). */ 1001 FOR_EACH_BB_FN (bb, cfun) 1002 { 1003 if (bitmap_bit_p (poisoned, bb->index)) 1004 continue; 1005 1006 gimple_stmt_iterator gsi; 1007 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) 1008 { 1009 gimple *stmt = gsi_stmt (gsi); 1010 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1011 { 1012 if (asan_mark_p (stmt, ASAN_MARK_POISON)) 1013 break; 1014 else 1015 { 1016 if (dump_file) 1017 fprintf (dump_file, "Removing ASAN_MARK unpoison\n"); 1018 unlink_stmt_vdef (stmt); 1019 release_defs (stmt); 1020 gsi_remove (&gsi, true); 1021 continue; 1022 } 1023 } 1024 1025 gsi_next (&gsi); 1026 } 1027 } 1028 } 1029 1030 /* Return true when STMT is either ASAN_CHECK call or a call of a function 1031 that can contain an ASAN_CHECK. */ 1032 1033 static bool 1034 maybe_contains_asan_check (gimple *stmt) 1035 { 1036 if (is_gimple_call (stmt)) 1037 { 1038 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1039 return false; 1040 else 1041 return !(gimple_call_flags (stmt) & ECF_CONST); 1042 } 1043 else if (is_a<gasm *> (stmt)) 1044 return true; 1045 1046 return false; 1047 } 1048 1049 /* Sanitize all ASAN_MARK poison calls that are not followed by an ASAN_CHECK 1050 call. These calls can be removed. */ 1051 1052 static void 1053 sanitize_asan_mark_poison (void) 1054 { 1055 /* 1) Find all BBs that possibly contain an ASAN_CHECK. */ 1056 auto_sbitmap with_check (last_basic_block_for_fn (cfun) + 1); 1057 bitmap_clear (with_check); 1058 basic_block bb; 1059 1060 FOR_EACH_BB_FN (bb, cfun) 1061 { 1062 gimple_stmt_iterator gsi; 1063 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) 1064 { 1065 gimple *stmt = gsi_stmt (gsi); 1066 if (maybe_contains_asan_check (stmt)) 1067 { 1068 bitmap_set_bit (with_check, bb->index); 1069 break; 1070 } 1071 } 1072 } 1073 1074 auto_sbitmap can_reach_check (last_basic_block_for_fn (cfun) + 1); 1075 bitmap_clear (can_reach_check); 1076 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1); 1077 bitmap_copy (worklist, with_check); 1078 1079 /* 2) Propagate the information to all definitions blocks. */ 1080 while (!bitmap_empty_p (worklist)) 1081 { 1082 unsigned i = bitmap_first_set_bit (worklist); 1083 bitmap_clear_bit (worklist, i); 1084 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i); 1085 gcc_assert (bb); 1086 1087 edge e; 1088 edge_iterator ei; 1089 FOR_EACH_EDGE (e, ei, bb->preds) 1090 if (!bitmap_bit_p (can_reach_check, e->src->index)) 1091 { 1092 bitmap_set_bit (can_reach_check, e->src->index); 1093 bitmap_set_bit (worklist, e->src->index); 1094 } 1095 } 1096 1097 /* 3) Iterate all BBs not included in CAN_REACH_CHECK BBs and remove poison 1098 ASAN_MARK not followed by a call to function having an ASAN_CHECK. */ 1099 FOR_EACH_BB_FN (bb, cfun) 1100 { 1101 if (bitmap_bit_p (can_reach_check, bb->index)) 1102 continue; 1103 1104 gimple_stmt_iterator gsi; 1105 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);) 1106 { 1107 gimple *stmt = gsi_stmt (gsi); 1108 if (maybe_contains_asan_check (stmt)) 1109 break; 1110 else if (asan_mark_p (stmt, ASAN_MARK_POISON)) 1111 { 1112 if (dump_file) 1113 fprintf (dump_file, "Removing ASAN_MARK poison\n"); 1114 unlink_stmt_vdef (stmt); 1115 release_defs (stmt); 1116 gimple_stmt_iterator gsi2 = gsi; 1117 gsi_prev (&gsi); 1118 gsi_remove (&gsi2, true); 1119 continue; 1120 } 1121 1122 gsi_prev (&gsi); 1123 } 1124 } 1125 } 1126 1127 /* Rewrite all usages of tree OP which is a PARM_DECL with a VAR_DECL 1128 that is it's DECL_VALUE_EXPR. */ 1129 1130 static tree 1131 rewrite_usage_of_param (tree *op, int *walk_subtrees, void *) 1132 { 1133 if (TREE_CODE (*op) == PARM_DECL && DECL_HAS_VALUE_EXPR_P (*op)) 1134 { 1135 *op = DECL_VALUE_EXPR (*op); 1136 *walk_subtrees = 0; 1137 } 1138 1139 return NULL; 1140 } 1141 1142 /* For a given function FUN, rewrite all addressable parameters so that 1143 a new automatic variable is introduced. Right after function entry 1144 a parameter is assigned to the variable. */ 1145 1146 static void 1147 sanitize_rewrite_addressable_params (function *fun) 1148 { 1149 gimple *g; 1150 gimple_seq stmts = NULL; 1151 bool has_any_addressable_param = false; 1152 auto_vec<tree> clear_value_expr_list; 1153 1154 for (tree arg = DECL_ARGUMENTS (current_function_decl); 1155 arg; arg = DECL_CHAIN (arg)) 1156 { 1157 tree type = TREE_TYPE (arg); 1158 if (TREE_ADDRESSABLE (arg) 1159 && !TREE_ADDRESSABLE (type) 1160 && !TREE_THIS_VOLATILE (arg) 1161 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) 1162 { 1163 TREE_ADDRESSABLE (arg) = 0; 1164 /* The parameter is no longer addressable. */ 1165 has_any_addressable_param = true; 1166 1167 /* Create a new automatic variable. */ 1168 tree var = build_decl (DECL_SOURCE_LOCATION (arg), 1169 VAR_DECL, DECL_NAME (arg), type); 1170 TREE_ADDRESSABLE (var) = 1; 1171 DECL_IGNORED_P (var) = 1; 1172 1173 gimple_add_tmp_var (var); 1174 1175 /* We skip parameters that have a DECL_VALUE_EXPR. */ 1176 if (DECL_HAS_VALUE_EXPR_P (arg)) 1177 continue; 1178 1179 if (dump_file) 1180 { 1181 fprintf (dump_file, 1182 "Rewriting parameter whose address is taken: "); 1183 print_generic_expr (dump_file, arg, dump_flags); 1184 fputc ('\n', dump_file); 1185 } 1186 1187 SET_DECL_PT_UID (var, DECL_PT_UID (arg)); 1188 1189 /* Assign value of parameter to newly created variable. */ 1190 if ((TREE_CODE (type) == COMPLEX_TYPE 1191 || TREE_CODE (type) == VECTOR_TYPE)) 1192 { 1193 /* We need to create a SSA name that will be used for the 1194 assignment. */ 1195 DECL_GIMPLE_REG_P (arg) = 1; 1196 tree tmp = get_or_create_ssa_default_def (cfun, arg); 1197 g = gimple_build_assign (var, tmp); 1198 gimple_set_location (g, DECL_SOURCE_LOCATION (arg)); 1199 gimple_seq_add_stmt (&stmts, g); 1200 } 1201 else 1202 { 1203 g = gimple_build_assign (var, arg); 1204 gimple_set_location (g, DECL_SOURCE_LOCATION (arg)); 1205 gimple_seq_add_stmt (&stmts, g); 1206 } 1207 1208 if (target_for_debug_bind (arg)) 1209 { 1210 g = gimple_build_debug_bind (arg, var, NULL); 1211 gimple_seq_add_stmt (&stmts, g); 1212 clear_value_expr_list.safe_push (arg); 1213 } 1214 1215 DECL_HAS_VALUE_EXPR_P (arg) = 1; 1216 SET_DECL_VALUE_EXPR (arg, var); 1217 } 1218 } 1219 1220 if (!has_any_addressable_param) 1221 return; 1222 1223 /* Replace all usages of PARM_DECLs with the newly 1224 created variable VAR. */ 1225 basic_block bb; 1226 FOR_EACH_BB_FN (bb, fun) 1227 { 1228 gimple_stmt_iterator gsi; 1229 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1230 { 1231 gimple *stmt = gsi_stmt (gsi); 1232 gimple_stmt_iterator it = gsi_for_stmt (stmt); 1233 walk_gimple_stmt (&it, NULL, rewrite_usage_of_param, NULL); 1234 } 1235 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1236 { 1237 gphi *phi = dyn_cast<gphi *> (gsi_stmt (gsi)); 1238 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i) 1239 { 1240 hash_set<tree> visited_nodes; 1241 walk_tree (gimple_phi_arg_def_ptr (phi, i), 1242 rewrite_usage_of_param, NULL, &visited_nodes); 1243 } 1244 } 1245 } 1246 1247 /* Unset value expr for parameters for which we created debug bind 1248 expressions. */ 1249 unsigned i; 1250 tree arg; 1251 FOR_EACH_VEC_ELT (clear_value_expr_list, i, arg) 1252 { 1253 DECL_HAS_VALUE_EXPR_P (arg) = 0; 1254 SET_DECL_VALUE_EXPR (arg, NULL_TREE); 1255 } 1256 1257 /* Insert default assignments at the beginning of a function. */ 1258 basic_block entry_bb = ENTRY_BLOCK_PTR_FOR_FN (fun); 1259 entry_bb = split_edge (single_succ_edge (entry_bb)); 1260 1261 gimple_stmt_iterator gsi = gsi_start_bb (entry_bb); 1262 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT); 1263 } 1264 1265 unsigned int 1266 pass_sanopt::execute (function *fun) 1267 { 1268 basic_block bb; 1269 int asan_num_accesses = 0; 1270 bool contains_asan_mark = false; 1271 int ret = 0; 1272 1273 /* Try to remove redundant checks. */ 1274 if (optimize 1275 && (flag_sanitize 1276 & (SANITIZE_NULL | SANITIZE_ALIGNMENT 1277 | SANITIZE_ADDRESS | SANITIZE_VPTR | SANITIZE_POINTER_OVERFLOW))) 1278 asan_num_accesses = sanopt_optimize (fun, &contains_asan_mark); 1279 else if (flag_sanitize & SANITIZE_ADDRESS) 1280 { 1281 gimple_stmt_iterator gsi; 1282 FOR_EACH_BB_FN (bb, fun) 1283 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1284 { 1285 gimple *stmt = gsi_stmt (gsi); 1286 if (gimple_call_internal_p (stmt, IFN_ASAN_CHECK)) 1287 ++asan_num_accesses; 1288 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1289 contains_asan_mark = true; 1290 } 1291 } 1292 1293 if (contains_asan_mark) 1294 { 1295 sanitize_asan_mark_unpoison (); 1296 sanitize_asan_mark_poison (); 1297 } 1298 1299 if (asan_sanitize_stack_p ()) 1300 sanitize_rewrite_addressable_params (fun); 1301 1302 bool use_calls = param_asan_instrumentation_with_call_threshold < INT_MAX 1303 && asan_num_accesses >= param_asan_instrumentation_with_call_threshold; 1304 1305 hash_map<tree, tree> shadow_vars_mapping; 1306 bool need_commit_edge_insert = false; 1307 FOR_EACH_BB_FN (bb, fun) 1308 { 1309 gimple_stmt_iterator gsi; 1310 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) 1311 { 1312 gimple *stmt = gsi_stmt (gsi); 1313 bool no_next = false; 1314 1315 if (!is_gimple_call (stmt)) 1316 { 1317 gsi_next (&gsi); 1318 continue; 1319 } 1320 1321 if (gimple_call_internal_p (stmt)) 1322 { 1323 enum internal_fn ifn = gimple_call_internal_fn (stmt); 1324 int this_ret = TODO_cleanup_cfg; 1325 switch (ifn) 1326 { 1327 case IFN_UBSAN_NULL: 1328 no_next = ubsan_expand_null_ifn (&gsi); 1329 break; 1330 case IFN_UBSAN_BOUNDS: 1331 no_next = ubsan_expand_bounds_ifn (&gsi); 1332 break; 1333 case IFN_UBSAN_OBJECT_SIZE: 1334 no_next = ubsan_expand_objsize_ifn (&gsi); 1335 break; 1336 case IFN_UBSAN_PTR: 1337 no_next = ubsan_expand_ptr_ifn (&gsi); 1338 break; 1339 case IFN_UBSAN_VPTR: 1340 no_next = ubsan_expand_vptr_ifn (&gsi); 1341 break; 1342 case IFN_ASAN_CHECK: 1343 no_next = asan_expand_check_ifn (&gsi, use_calls); 1344 break; 1345 case IFN_ASAN_MARK: 1346 no_next = asan_expand_mark_ifn (&gsi); 1347 break; 1348 case IFN_ASAN_POISON: 1349 no_next = asan_expand_poison_ifn (&gsi, 1350 &need_commit_edge_insert, 1351 shadow_vars_mapping); 1352 break; 1353 default: 1354 this_ret = 0; 1355 break; 1356 } 1357 ret |= this_ret; 1358 } 1359 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) 1360 { 1361 tree callee = gimple_call_fndecl (stmt); 1362 switch (DECL_FUNCTION_CODE (callee)) 1363 { 1364 case BUILT_IN_UNREACHABLE: 1365 if (sanitize_flags_p (SANITIZE_UNREACHABLE)) 1366 no_next = ubsan_instrument_unreachable (&gsi); 1367 break; 1368 default: 1369 break; 1370 } 1371 } 1372 1373 if (dump_file && (dump_flags & TDF_DETAILS)) 1374 { 1375 fprintf (dump_file, "Expanded: "); 1376 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 1377 } 1378 1379 if (!no_next) 1380 gsi_next (&gsi); 1381 } 1382 } 1383 1384 if (need_commit_edge_insert) 1385 gsi_commit_edge_inserts (); 1386 1387 return ret; 1388 } 1389 1390 } // anon namespace 1391 1392 gimple_opt_pass * 1393 make_pass_sanopt (gcc::context *ctxt) 1394 { 1395 return new pass_sanopt (ctxt); 1396 } 1397