1 /* Miscellaneous SSA utility functions. 2 Copyright (C) 2001-2013 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify 7 it under the terms of the GNU General Public License as published by 8 the Free Software Foundation; either version 3, or (at your option) 9 any later version. 10 11 GCC is distributed in the hope that it will be useful, 12 but WITHOUT ANY WARRANTY; without even the implied warranty of 13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 GNU General Public License for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "tm.h" 24 #include "tree.h" 25 #include "flags.h" 26 #include "tm_p.h" 27 #include "target.h" 28 #include "ggc.h" 29 #include "langhooks.h" 30 #include "basic-block.h" 31 #include "function.h" 32 #include "gimple-pretty-print.h" 33 #include "bitmap.h" 34 #include "pointer-set.h" 35 #include "tree-flow.h" 36 #include "gimple.h" 37 #include "tree-inline.h" 38 #include "hashtab.h" 39 #include "tree-pass.h" 40 #include "diagnostic-core.h" 41 #include "cfgloop.h" 42 43 /* Pointer map of variable mappings, keyed by edge. */ 44 static struct pointer_map_t *edge_var_maps; 45 46 47 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */ 48 49 void 50 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus) 51 { 52 void **slot; 53 edge_var_map_vector *head; 54 edge_var_map new_node; 55 56 if (edge_var_maps == NULL) 57 edge_var_maps = pointer_map_create (); 58 59 slot = pointer_map_insert (edge_var_maps, e); 60 head = (edge_var_map_vector *) *slot; 61 if (!head) 62 vec_safe_reserve (head, 5); 63 new_node.def = def; 64 new_node.result = result; 65 new_node.locus = locus; 66 67 vec_safe_push (head, new_node); 68 *slot = head; 69 } 70 71 72 /* Clear the var mappings in edge E. */ 73 74 void 75 redirect_edge_var_map_clear (edge e) 76 { 77 void **slot; 78 edge_var_map_vector *head; 79 80 if (!edge_var_maps) 81 return; 82 83 slot = pointer_map_contains (edge_var_maps, e); 84 85 if (slot) 86 { 87 head = (edge_var_map_vector *) *slot; 88 vec_free (head); 89 *slot = NULL; 90 } 91 } 92 93 94 /* Duplicate the redirected var mappings in OLDE in NEWE. 95 96 Since we can't remove a mapping, let's just duplicate it. This assumes a 97 pointer_map can have multiple edges mapping to the same var_map (many to 98 one mapping), since we don't remove the previous mappings. */ 99 100 void 101 redirect_edge_var_map_dup (edge newe, edge olde) 102 { 103 void **new_slot, **old_slot; 104 edge_var_map_vector *head; 105 106 if (!edge_var_maps) 107 return; 108 109 new_slot = pointer_map_insert (edge_var_maps, newe); 110 old_slot = pointer_map_contains (edge_var_maps, olde); 111 if (!old_slot) 112 return; 113 head = (edge_var_map_vector *) *old_slot; 114 115 edge_var_map_vector *new_head = NULL; 116 if (head) 117 new_head = vec_safe_copy (head); 118 else 119 vec_safe_reserve (new_head, 5); 120 *new_slot = new_head; 121 } 122 123 124 /* Return the variable mappings for a given edge. If there is none, return 125 NULL. */ 126 127 edge_var_map_vector * 128 redirect_edge_var_map_vector (edge e) 129 { 130 void **slot; 131 132 /* Hey, what kind of idiot would... you'd be surprised. */ 133 if (!edge_var_maps) 134 return NULL; 135 136 slot = pointer_map_contains (edge_var_maps, e); 137 if (!slot) 138 return NULL; 139 140 return (edge_var_map_vector *) *slot; 141 } 142 143 /* Used by redirect_edge_var_map_destroy to free all memory. */ 144 145 static bool 146 free_var_map_entry (const void *key ATTRIBUTE_UNUSED, 147 void **value, 148 void *data ATTRIBUTE_UNUSED) 149 { 150 edge_var_map_vector *head = (edge_var_map_vector *) *value; 151 vec_free (head); 152 return true; 153 } 154 155 /* Clear the edge variable mappings. */ 156 157 void 158 redirect_edge_var_map_destroy (void) 159 { 160 if (edge_var_maps) 161 { 162 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL); 163 pointer_map_destroy (edge_var_maps); 164 edge_var_maps = NULL; 165 } 166 } 167 168 169 /* Remove the corresponding arguments from the PHI nodes in E's 170 destination block and redirect it to DEST. Return redirected edge. 171 The list of removed arguments is stored in a vector accessed 172 through edge_var_maps. */ 173 174 edge 175 ssa_redirect_edge (edge e, basic_block dest) 176 { 177 gimple_stmt_iterator gsi; 178 gimple phi; 179 180 redirect_edge_var_map_clear (e); 181 182 /* Remove the appropriate PHI arguments in E's destination block. */ 183 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) 184 { 185 tree def; 186 source_location locus ; 187 188 phi = gsi_stmt (gsi); 189 def = gimple_phi_arg_def (phi, e->dest_idx); 190 locus = gimple_phi_arg_location (phi, e->dest_idx); 191 192 if (def == NULL_TREE) 193 continue; 194 195 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus); 196 } 197 198 e = redirect_edge_succ_nodup (e, dest); 199 200 return e; 201 } 202 203 204 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge 205 E->dest. */ 206 207 void 208 flush_pending_stmts (edge e) 209 { 210 gimple phi; 211 edge_var_map_vector *v; 212 edge_var_map *vm; 213 int i; 214 gimple_stmt_iterator gsi; 215 216 v = redirect_edge_var_map_vector (e); 217 if (!v) 218 return; 219 220 for (gsi = gsi_start_phis (e->dest), i = 0; 221 !gsi_end_p (gsi) && v->iterate (i, &vm); 222 gsi_next (&gsi), i++) 223 { 224 tree def; 225 226 phi = gsi_stmt (gsi); 227 def = redirect_edge_var_map_def (vm); 228 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm)); 229 } 230 231 redirect_edge_var_map_clear (e); 232 } 233 234 /* Given a tree for an expression for which we might want to emit 235 locations or values in debug information (generally a variable, but 236 we might deal with other kinds of trees in the future), return the 237 tree that should be used as the variable of a DEBUG_BIND STMT or 238 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */ 239 240 tree 241 target_for_debug_bind (tree var) 242 { 243 if (!MAY_HAVE_DEBUG_STMTS) 244 return NULL_TREE; 245 246 if (TREE_CODE (var) == SSA_NAME) 247 { 248 var = SSA_NAME_VAR (var); 249 if (var == NULL_TREE) 250 return NULL_TREE; 251 } 252 253 if ((TREE_CODE (var) != VAR_DECL 254 || VAR_DECL_IS_VIRTUAL_OPERAND (var)) 255 && TREE_CODE (var) != PARM_DECL) 256 return NULL_TREE; 257 258 if (DECL_HAS_VALUE_EXPR_P (var)) 259 return target_for_debug_bind (DECL_VALUE_EXPR (var)); 260 261 if (DECL_IGNORED_P (var)) 262 return NULL_TREE; 263 264 /* var-tracking only tracks registers. */ 265 if (!is_gimple_reg_type (TREE_TYPE (var))) 266 return NULL_TREE; 267 268 return var; 269 } 270 271 /* Called via walk_tree, look for SSA_NAMEs that have already been 272 released. */ 273 274 static tree 275 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_) 276 { 277 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_; 278 279 if (wi && wi->is_lhs) 280 return NULL_TREE; 281 282 if (TREE_CODE (*tp) == SSA_NAME) 283 { 284 if (SSA_NAME_IN_FREE_LIST (*tp)) 285 return *tp; 286 287 *walk_subtrees = 0; 288 } 289 else if (IS_TYPE_OR_DECL_P (*tp)) 290 *walk_subtrees = 0; 291 292 return NULL_TREE; 293 } 294 295 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced 296 by other DEBUG stmts, and replace uses of the DEF with the 297 newly-created debug temp. */ 298 299 void 300 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var) 301 { 302 imm_use_iterator imm_iter; 303 use_operand_p use_p; 304 gimple stmt; 305 gimple def_stmt = NULL; 306 int usecount = 0; 307 tree value = NULL; 308 309 if (!MAY_HAVE_DEBUG_STMTS) 310 return; 311 312 /* If this name has already been registered for replacement, do nothing 313 as anything that uses this name isn't in SSA form. */ 314 if (name_registered_for_update_p (var)) 315 return; 316 317 /* Check whether there are debug stmts that reference this variable and, 318 if there are, decide whether we should use a debug temp. */ 319 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var) 320 { 321 stmt = USE_STMT (use_p); 322 323 if (!gimple_debug_bind_p (stmt)) 324 continue; 325 326 if (usecount++) 327 break; 328 329 if (gimple_debug_bind_get_value (stmt) != var) 330 { 331 /* Count this as an additional use, so as to make sure we 332 use a temp unless VAR's definition has a SINGLE_RHS that 333 can be shared. */ 334 usecount++; 335 break; 336 } 337 } 338 339 if (!usecount) 340 return; 341 342 if (gsi) 343 def_stmt = gsi_stmt (*gsi); 344 else 345 def_stmt = SSA_NAME_DEF_STMT (var); 346 347 /* If we didn't get an insertion point, and the stmt has already 348 been removed, we won't be able to insert the debug bind stmt, so 349 we'll have to drop debug information. */ 350 if (gimple_code (def_stmt) == GIMPLE_PHI) 351 { 352 value = degenerate_phi_result (def_stmt); 353 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL)) 354 value = NULL; 355 /* error_mark_node is what fixup_noreturn_call changes PHI arguments 356 to. */ 357 else if (value == error_mark_node) 358 value = NULL; 359 } 360 else if (is_gimple_assign (def_stmt)) 361 { 362 bool no_value = false; 363 364 if (!dom_info_available_p (CDI_DOMINATORS)) 365 { 366 struct walk_stmt_info wi; 367 368 memset (&wi, 0, sizeof (wi)); 369 370 /* When removing blocks without following reverse dominance 371 order, we may sometimes encounter SSA_NAMEs that have 372 already been released, referenced in other SSA_DEFs that 373 we're about to release. Consider: 374 375 <bb X>: 376 v_1 = foo; 377 378 <bb Y>: 379 w_2 = v_1 + bar; 380 # DEBUG w => w_2 381 382 If we deleted BB X first, propagating the value of w_2 383 won't do us any good. It's too late to recover their 384 original definition of v_1: when it was deleted, it was 385 only referenced in other DEFs, it couldn't possibly know 386 it should have been retained, and propagating every 387 single DEF just in case it might have to be propagated 388 into a DEBUG STMT would probably be too wasteful. 389 390 When dominator information is not readily available, we 391 check for and accept some loss of debug information. But 392 if it is available, there's no excuse for us to remove 393 blocks in the wrong order, so we don't even check for 394 dead SSA NAMEs. SSA verification shall catch any 395 errors. */ 396 if ((!gsi && !gimple_bb (def_stmt)) 397 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi)) 398 no_value = true; 399 } 400 401 if (!no_value) 402 value = gimple_assign_rhs_to_tree (def_stmt); 403 } 404 405 if (value) 406 { 407 /* If there's a single use of VAR, and VAR is the entire debug 408 expression (usecount would have been incremented again 409 otherwise), and the definition involves only constants and 410 SSA names, then we can propagate VALUE into this single use, 411 avoiding the temp. 412 413 We can also avoid using a temp if VALUE can be shared and 414 propagated into all uses, without generating expressions that 415 wouldn't be valid gimple RHSs. 416 417 Other cases that would require unsharing or non-gimple RHSs 418 are deferred to a debug temp, although we could avoid temps 419 at the expense of duplication of expressions. */ 420 421 if (CONSTANT_CLASS_P (value) 422 || gimple_code (def_stmt) == GIMPLE_PHI 423 || (usecount == 1 424 && (!gimple_assign_single_p (def_stmt) 425 || is_gimple_min_invariant (value))) 426 || is_gimple_reg (value)) 427 ; 428 else 429 { 430 gimple def_temp; 431 tree vexpr = make_node (DEBUG_EXPR_DECL); 432 433 def_temp = gimple_build_debug_bind (vexpr, 434 unshare_expr (value), 435 def_stmt); 436 437 DECL_ARTIFICIAL (vexpr) = 1; 438 TREE_TYPE (vexpr) = TREE_TYPE (value); 439 if (DECL_P (value)) 440 DECL_MODE (vexpr) = DECL_MODE (value); 441 else 442 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value)); 443 444 if (gsi) 445 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT); 446 else 447 { 448 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt); 449 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT); 450 } 451 452 value = vexpr; 453 } 454 } 455 456 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var) 457 { 458 if (!gimple_debug_bind_p (stmt)) 459 continue; 460 461 if (value) 462 { 463 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) 464 /* unshare_expr is not needed here. vexpr is either a 465 SINGLE_RHS, that can be safely shared, some other RHS 466 that was unshared when we found it had a single debug 467 use, or a DEBUG_EXPR_DECL, that can be safely 468 shared. */ 469 SET_USE (use_p, unshare_expr (value)); 470 /* If we didn't replace uses with a debug decl fold the 471 resulting expression. Otherwise we end up with invalid IL. */ 472 if (TREE_CODE (value) != DEBUG_EXPR_DECL) 473 { 474 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 475 fold_stmt_inplace (&gsi); 476 } 477 } 478 else 479 gimple_debug_bind_reset_value (stmt); 480 481 update_stmt (stmt); 482 } 483 } 484 485 486 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by 487 other DEBUG stmts, and replace uses of the DEF with the 488 newly-created debug temp. */ 489 490 void 491 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi) 492 { 493 gimple stmt; 494 ssa_op_iter op_iter; 495 def_operand_p def_p; 496 497 if (!MAY_HAVE_DEBUG_STMTS) 498 return; 499 500 stmt = gsi_stmt (*gsi); 501 502 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) 503 { 504 tree var = DEF_FROM_PTR (def_p); 505 506 if (TREE_CODE (var) != SSA_NAME) 507 continue; 508 509 insert_debug_temp_for_var_def (gsi, var); 510 } 511 } 512 513 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */ 514 515 void 516 reset_debug_uses (gimple stmt) 517 { 518 ssa_op_iter op_iter; 519 def_operand_p def_p; 520 imm_use_iterator imm_iter; 521 gimple use_stmt; 522 523 if (!MAY_HAVE_DEBUG_STMTS) 524 return; 525 526 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) 527 { 528 tree var = DEF_FROM_PTR (def_p); 529 530 if (TREE_CODE (var) != SSA_NAME) 531 continue; 532 533 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var) 534 { 535 if (!gimple_debug_bind_p (use_stmt)) 536 continue; 537 538 gimple_debug_bind_reset_value (use_stmt); 539 update_stmt (use_stmt); 540 } 541 } 542 } 543 544 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing 545 dominated stmts before their dominators, so that release_ssa_defs 546 stands a chance of propagating DEFs into debug bind stmts. */ 547 548 void 549 release_defs_bitset (bitmap toremove) 550 { 551 unsigned j; 552 bitmap_iterator bi; 553 554 /* Performing a topological sort is probably overkill, this will 555 most likely run in slightly superlinear time, rather than the 556 pathological quadratic worst case. */ 557 while (!bitmap_empty_p (toremove)) 558 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi) 559 { 560 bool remove_now = true; 561 tree var = ssa_name (j); 562 gimple stmt; 563 imm_use_iterator uit; 564 565 FOR_EACH_IMM_USE_STMT (stmt, uit, var) 566 { 567 ssa_op_iter dit; 568 def_operand_p def_p; 569 570 /* We can't propagate PHI nodes into debug stmts. */ 571 if (gimple_code (stmt) == GIMPLE_PHI 572 || is_gimple_debug (stmt)) 573 continue; 574 575 /* If we find another definition to remove that uses 576 the one we're looking at, defer the removal of this 577 one, so that it can be propagated into debug stmts 578 after the other is. */ 579 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF) 580 { 581 tree odef = DEF_FROM_PTR (def_p); 582 583 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef))) 584 { 585 remove_now = false; 586 break; 587 } 588 } 589 590 if (!remove_now) 591 BREAK_FROM_IMM_USE_STMT (uit); 592 } 593 594 if (remove_now) 595 { 596 gimple def = SSA_NAME_DEF_STMT (var); 597 gimple_stmt_iterator gsi = gsi_for_stmt (def); 598 599 if (gimple_code (def) == GIMPLE_PHI) 600 remove_phi_node (&gsi, true); 601 else 602 { 603 gsi_remove (&gsi, true); 604 release_defs (def); 605 } 606 607 bitmap_clear_bit (toremove, j); 608 } 609 } 610 } 611 612 /* Return true if SSA_NAME is malformed and mark it visited. 613 614 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual 615 operand. */ 616 617 static bool 618 verify_ssa_name (tree ssa_name, bool is_virtual) 619 { 620 if (TREE_CODE (ssa_name) != SSA_NAME) 621 { 622 error ("expected an SSA_NAME object"); 623 return true; 624 } 625 626 if (SSA_NAME_IN_FREE_LIST (ssa_name)) 627 { 628 error ("found an SSA_NAME that had been released into the free pool"); 629 return true; 630 } 631 632 if (SSA_NAME_VAR (ssa_name) != NULL_TREE 633 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name))) 634 { 635 error ("type mismatch between an SSA_NAME and its symbol"); 636 return true; 637 } 638 639 if (is_virtual && !virtual_operand_p (ssa_name)) 640 { 641 error ("found a virtual definition for a GIMPLE register"); 642 return true; 643 } 644 645 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun)) 646 { 647 error ("virtual SSA name for non-VOP decl"); 648 return true; 649 } 650 651 if (!is_virtual && virtual_operand_p (ssa_name)) 652 { 653 error ("found a real definition for a non-register"); 654 return true; 655 } 656 657 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) 658 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))) 659 { 660 error ("found a default name with a non-empty defining statement"); 661 return true; 662 } 663 664 return false; 665 } 666 667 668 /* Return true if the definition of SSA_NAME at block BB is malformed. 669 670 STMT is the statement where SSA_NAME is created. 671 672 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME 673 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, 674 it means that the block in that array slot contains the 675 definition of SSA_NAME. 676 677 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */ 678 679 static bool 680 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name, 681 gimple stmt, bool is_virtual) 682 { 683 if (verify_ssa_name (ssa_name, is_virtual)) 684 goto err; 685 686 if (SSA_NAME_VAR (ssa_name) 687 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL 688 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name))) 689 { 690 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set"); 691 goto err; 692 } 693 694 if (definition_block[SSA_NAME_VERSION (ssa_name)]) 695 { 696 error ("SSA_NAME created in two different blocks %i and %i", 697 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index); 698 goto err; 699 } 700 701 definition_block[SSA_NAME_VERSION (ssa_name)] = bb; 702 703 if (SSA_NAME_DEF_STMT (ssa_name) != stmt) 704 { 705 error ("SSA_NAME_DEF_STMT is wrong"); 706 fprintf (stderr, "Expected definition statement:\n"); 707 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS); 708 fprintf (stderr, "\nActual definition statement:\n"); 709 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 710 goto err; 711 } 712 713 return false; 714 715 err: 716 fprintf (stderr, "while verifying SSA_NAME "); 717 print_generic_expr (stderr, ssa_name, 0); 718 fprintf (stderr, " in statement\n"); 719 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 720 721 return true; 722 } 723 724 725 /* Return true if the use of SSA_NAME at statement STMT in block BB is 726 malformed. 727 728 DEF_BB is the block where SSA_NAME was found to be created. 729 730 IDOM contains immediate dominator information for the flowgraph. 731 732 CHECK_ABNORMAL is true if the caller wants to check whether this use 733 is flowing through an abnormal edge (only used when checking PHI 734 arguments). 735 736 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names 737 that are defined before STMT in basic block BB. */ 738 739 static bool 740 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p, 741 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb) 742 { 743 bool err = false; 744 tree ssa_name = USE_FROM_PTR (use_p); 745 746 if (!TREE_VISITED (ssa_name)) 747 if (verify_imm_links (stderr, ssa_name)) 748 err = true; 749 750 TREE_VISITED (ssa_name) = 1; 751 752 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)) 753 && SSA_NAME_IS_DEFAULT_DEF (ssa_name)) 754 ; /* Default definitions have empty statements. Nothing to do. */ 755 else if (!def_bb) 756 { 757 error ("missing definition"); 758 err = true; 759 } 760 else if (bb != def_bb 761 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb)) 762 { 763 error ("definition in block %i does not dominate use in block %i", 764 def_bb->index, bb->index); 765 err = true; 766 } 767 else if (bb == def_bb 768 && names_defined_in_bb != NULL 769 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name))) 770 { 771 error ("definition in block %i follows the use", def_bb->index); 772 err = true; 773 } 774 775 if (check_abnormal 776 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 777 { 778 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set"); 779 err = true; 780 } 781 782 /* Make sure the use is in an appropriate list by checking the previous 783 element to make sure it's the same. */ 784 if (use_p->prev == NULL) 785 { 786 error ("no immediate_use list"); 787 err = true; 788 } 789 else 790 { 791 tree listvar; 792 if (use_p->prev->use == NULL) 793 listvar = use_p->prev->loc.ssa_name; 794 else 795 listvar = USE_FROM_PTR (use_p->prev); 796 if (listvar != ssa_name) 797 { 798 error ("wrong immediate use list"); 799 err = true; 800 } 801 } 802 803 if (err) 804 { 805 fprintf (stderr, "for SSA_NAME: "); 806 print_generic_expr (stderr, ssa_name, TDF_VOPS); 807 fprintf (stderr, " in statement:\n"); 808 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 809 } 810 811 return err; 812 } 813 814 815 /* Return true if any of the arguments for PHI node PHI at block BB is 816 malformed. 817 818 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME 819 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, 820 it means that the block in that array slot contains the 821 definition of SSA_NAME. */ 822 823 static bool 824 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block) 825 { 826 edge e; 827 bool err = false; 828 size_t i, phi_num_args = gimple_phi_num_args (phi); 829 830 if (EDGE_COUNT (bb->preds) != phi_num_args) 831 { 832 error ("incoming edge count does not match number of PHI arguments"); 833 err = true; 834 goto error; 835 } 836 837 for (i = 0; i < phi_num_args; i++) 838 { 839 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i); 840 tree op = USE_FROM_PTR (op_p); 841 842 e = EDGE_PRED (bb, i); 843 844 if (op == NULL_TREE) 845 { 846 error ("PHI argument is missing for edge %d->%d", 847 e->src->index, 848 e->dest->index); 849 err = true; 850 goto error; 851 } 852 853 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op)) 854 { 855 error ("PHI argument is not SSA_NAME, or invariant"); 856 err = true; 857 } 858 859 if (TREE_CODE (op) == SSA_NAME) 860 { 861 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi))); 862 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], 863 op_p, phi, e->flags & EDGE_ABNORMAL, NULL); 864 } 865 866 if (TREE_CODE (op) == ADDR_EXPR) 867 { 868 tree base = TREE_OPERAND (op, 0); 869 while (handled_component_p (base)) 870 base = TREE_OPERAND (base, 0); 871 if ((TREE_CODE (base) == VAR_DECL 872 || TREE_CODE (base) == PARM_DECL 873 || TREE_CODE (base) == RESULT_DECL) 874 && !TREE_ADDRESSABLE (base)) 875 { 876 error ("address taken, but ADDRESSABLE bit not set"); 877 err = true; 878 } 879 } 880 881 if (e->dest != bb) 882 { 883 error ("wrong edge %d->%d for PHI argument", 884 e->src->index, e->dest->index); 885 err = true; 886 } 887 888 if (err) 889 { 890 fprintf (stderr, "PHI argument\n"); 891 print_generic_stmt (stderr, op, TDF_VOPS); 892 goto error; 893 } 894 } 895 896 error: 897 if (err) 898 { 899 fprintf (stderr, "for PHI node\n"); 900 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS); 901 } 902 903 904 return err; 905 } 906 907 908 /* Verify common invariants in the SSA web. 909 TODO: verify the variable annotations. */ 910 911 DEBUG_FUNCTION void 912 verify_ssa (bool check_modified_stmt) 913 { 914 size_t i; 915 basic_block bb; 916 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names); 917 ssa_op_iter iter; 918 tree op; 919 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS); 920 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL); 921 922 gcc_assert (!need_ssa_update_p (cfun)); 923 924 timevar_push (TV_TREE_SSA_VERIFY); 925 926 /* Keep track of SSA names present in the IL. */ 927 for (i = 1; i < num_ssa_names; i++) 928 { 929 tree name = ssa_name (i); 930 if (name) 931 { 932 gimple stmt; 933 TREE_VISITED (name) = 0; 934 935 verify_ssa_name (name, virtual_operand_p (name)); 936 937 stmt = SSA_NAME_DEF_STMT (name); 938 if (!gimple_nop_p (stmt)) 939 { 940 basic_block bb = gimple_bb (stmt); 941 verify_def (bb, definition_block, 942 name, stmt, virtual_operand_p (name)); 943 944 } 945 } 946 } 947 948 calculate_dominance_info (CDI_DOMINATORS); 949 950 /* Now verify all the uses and make sure they agree with the definitions 951 found in the previous pass. */ 952 FOR_EACH_BB (bb) 953 { 954 edge e; 955 gimple phi; 956 edge_iterator ei; 957 gimple_stmt_iterator gsi; 958 959 /* Make sure that all edges have a clear 'aux' field. */ 960 FOR_EACH_EDGE (e, ei, bb->preds) 961 { 962 if (e->aux) 963 { 964 error ("AUX pointer initialized for edge %d->%d", e->src->index, 965 e->dest->index); 966 goto err; 967 } 968 } 969 970 /* Verify the arguments for every PHI node in the block. */ 971 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 972 { 973 phi = gsi_stmt (gsi); 974 if (verify_phi_args (phi, bb, definition_block)) 975 goto err; 976 977 bitmap_set_bit (names_defined_in_bb, 978 SSA_NAME_VERSION (gimple_phi_result (phi))); 979 } 980 981 /* Now verify all the uses and vuses in every statement of the block. */ 982 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 983 { 984 gimple stmt = gsi_stmt (gsi); 985 use_operand_p use_p; 986 987 if (check_modified_stmt && gimple_modified_p (stmt)) 988 { 989 error ("stmt (%p) marked modified after optimization pass: ", 990 (void *)stmt); 991 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 992 goto err; 993 } 994 995 if (verify_ssa_operands (stmt)) 996 { 997 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 998 goto err; 999 } 1000 1001 if (gimple_debug_bind_p (stmt) 1002 && !gimple_debug_bind_has_value_p (stmt)) 1003 continue; 1004 1005 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE) 1006 { 1007 op = USE_FROM_PTR (use_p); 1008 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)], 1009 use_p, stmt, false, names_defined_in_bb)) 1010 goto err; 1011 } 1012 1013 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS) 1014 { 1015 if (SSA_NAME_DEF_STMT (op) != stmt) 1016 { 1017 error ("SSA_NAME_DEF_STMT is wrong"); 1018 fprintf (stderr, "Expected definition statement:\n"); 1019 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 1020 fprintf (stderr, "\nActual definition statement:\n"); 1021 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op), 1022 4, TDF_VOPS); 1023 goto err; 1024 } 1025 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op)); 1026 } 1027 } 1028 1029 bitmap_clear (names_defined_in_bb); 1030 } 1031 1032 free (definition_block); 1033 1034 /* Restore the dominance information to its prior known state, so 1035 that we do not perturb the compiler's subsequent behavior. */ 1036 if (orig_dom_state == DOM_NONE) 1037 free_dominance_info (CDI_DOMINATORS); 1038 else 1039 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state); 1040 1041 BITMAP_FREE (names_defined_in_bb); 1042 timevar_pop (TV_TREE_SSA_VERIFY); 1043 return; 1044 1045 err: 1046 internal_error ("verify_ssa failed"); 1047 } 1048 1049 /* Return true if the uid in both int tree maps are equal. */ 1050 1051 int 1052 int_tree_map_eq (const void *va, const void *vb) 1053 { 1054 const struct int_tree_map *a = (const struct int_tree_map *) va; 1055 const struct int_tree_map *b = (const struct int_tree_map *) vb; 1056 return (a->uid == b->uid); 1057 } 1058 1059 /* Hash a UID in a int_tree_map. */ 1060 1061 unsigned int 1062 int_tree_map_hash (const void *item) 1063 { 1064 return ((const struct int_tree_map *)item)->uid; 1065 } 1066 1067 /* Return true if the DECL_UID in both trees are equal. */ 1068 1069 int 1070 uid_decl_map_eq (const void *va, const void *vb) 1071 { 1072 const_tree a = (const_tree) va; 1073 const_tree b = (const_tree) vb; 1074 return (a->decl_minimal.uid == b->decl_minimal.uid); 1075 } 1076 1077 /* Hash a tree in a uid_decl_map. */ 1078 1079 unsigned int 1080 uid_decl_map_hash (const void *item) 1081 { 1082 return ((const_tree)item)->decl_minimal.uid; 1083 } 1084 1085 /* Return true if the DECL_UID in both trees are equal. */ 1086 1087 static int 1088 uid_ssaname_map_eq (const void *va, const void *vb) 1089 { 1090 const_tree a = (const_tree) va; 1091 const_tree b = (const_tree) vb; 1092 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid); 1093 } 1094 1095 /* Hash a tree in a uid_decl_map. */ 1096 1097 static unsigned int 1098 uid_ssaname_map_hash (const void *item) 1099 { 1100 return ((const_tree)item)->ssa_name.var->decl_minimal.uid; 1101 } 1102 1103 1104 /* Initialize global DFA and SSA structures. */ 1105 1106 void 1107 init_tree_ssa (struct function *fn) 1108 { 1109 fn->gimple_df = ggc_alloc_cleared_gimple_df (); 1110 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash, 1111 uid_ssaname_map_eq, NULL); 1112 pt_solution_reset (&fn->gimple_df->escaped); 1113 init_ssanames (fn, 0); 1114 } 1115 1116 /* Do the actions required to initialize internal data structures used 1117 in tree-ssa optimization passes. */ 1118 1119 static unsigned int 1120 execute_init_datastructures (void) 1121 { 1122 /* Allocate hash tables, arrays and other structures. */ 1123 init_tree_ssa (cfun); 1124 return 0; 1125 } 1126 1127 struct gimple_opt_pass pass_init_datastructures = 1128 { 1129 { 1130 GIMPLE_PASS, 1131 "*init_datastructures", /* name */ 1132 OPTGROUP_NONE, /* optinfo_flags */ 1133 NULL, /* gate */ 1134 execute_init_datastructures, /* execute */ 1135 NULL, /* sub */ 1136 NULL, /* next */ 1137 0, /* static_pass_number */ 1138 TV_NONE, /* tv_id */ 1139 PROP_cfg, /* properties_required */ 1140 0, /* properties_provided */ 1141 0, /* properties_destroyed */ 1142 0, /* todo_flags_start */ 1143 0 /* todo_flags_finish */ 1144 } 1145 }; 1146 1147 /* Deallocate memory associated with SSA data structures for FNDECL. */ 1148 1149 void 1150 delete_tree_ssa (void) 1151 { 1152 fini_ssanames (); 1153 1154 /* We no longer maintain the SSA operand cache at this point. */ 1155 if (ssa_operands_active (cfun)) 1156 fini_ssa_operands (); 1157 1158 htab_delete (cfun->gimple_df->default_defs); 1159 cfun->gimple_df->default_defs = NULL; 1160 pt_solution_reset (&cfun->gimple_df->escaped); 1161 if (cfun->gimple_df->decls_to_pointers != NULL) 1162 pointer_map_destroy (cfun->gimple_df->decls_to_pointers); 1163 cfun->gimple_df->decls_to_pointers = NULL; 1164 cfun->gimple_df->modified_noreturn_calls = NULL; 1165 cfun->gimple_df = NULL; 1166 1167 /* We no longer need the edge variable maps. */ 1168 redirect_edge_var_map_destroy (); 1169 } 1170 1171 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a 1172 useless type conversion, otherwise return false. 1173 1174 This function implicitly defines the middle-end type system. With 1175 the notion of 'a < b' meaning that useless_type_conversion_p (a, b) 1176 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds, 1177 the following invariants shall be fulfilled: 1178 1179 1) useless_type_conversion_p is transitive. 1180 If a < b and b < c then a < c. 1181 1182 2) useless_type_conversion_p is not symmetric. 1183 From a < b does not follow a > b. 1184 1185 3) Types define the available set of operations applicable to values. 1186 A type conversion is useless if the operations for the target type 1187 is a subset of the operations for the source type. For example 1188 casts to void* are useless, casts from void* are not (void* can't 1189 be dereferenced or offsetted, but copied, hence its set of operations 1190 is a strict subset of that of all other data pointer types). Casts 1191 to const T* are useless (can't be written to), casts from const T* 1192 to T* are not. */ 1193 1194 bool 1195 useless_type_conversion_p (tree outer_type, tree inner_type) 1196 { 1197 /* Do the following before stripping toplevel qualifiers. */ 1198 if (POINTER_TYPE_P (inner_type) 1199 && POINTER_TYPE_P (outer_type)) 1200 { 1201 /* Do not lose casts between pointers to different address spaces. */ 1202 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) 1203 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))) 1204 return false; 1205 } 1206 1207 /* From now on qualifiers on value types do not matter. */ 1208 inner_type = TYPE_MAIN_VARIANT (inner_type); 1209 outer_type = TYPE_MAIN_VARIANT (outer_type); 1210 1211 if (inner_type == outer_type) 1212 return true; 1213 1214 /* If we know the canonical types, compare them. */ 1215 if (TYPE_CANONICAL (inner_type) 1216 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type)) 1217 return true; 1218 1219 /* Changes in machine mode are never useless conversions unless we 1220 deal with aggregate types in which case we defer to later checks. */ 1221 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type) 1222 && !AGGREGATE_TYPE_P (inner_type)) 1223 return false; 1224 1225 /* If both the inner and outer types are integral types, then the 1226 conversion is not necessary if they have the same mode and 1227 signedness and precision, and both or neither are boolean. */ 1228 if (INTEGRAL_TYPE_P (inner_type) 1229 && INTEGRAL_TYPE_P (outer_type)) 1230 { 1231 /* Preserve changes in signedness or precision. */ 1232 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) 1233 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) 1234 return false; 1235 1236 /* Preserve conversions to/from BOOLEAN_TYPE if types are not 1237 of precision one. */ 1238 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE) 1239 != (TREE_CODE (outer_type) == BOOLEAN_TYPE)) 1240 && TYPE_PRECISION (outer_type) != 1) 1241 return false; 1242 1243 /* We don't need to preserve changes in the types minimum or 1244 maximum value in general as these do not generate code 1245 unless the types precisions are different. */ 1246 return true; 1247 } 1248 1249 /* Scalar floating point types with the same mode are compatible. */ 1250 else if (SCALAR_FLOAT_TYPE_P (inner_type) 1251 && SCALAR_FLOAT_TYPE_P (outer_type)) 1252 return true; 1253 1254 /* Fixed point types with the same mode are compatible. */ 1255 else if (FIXED_POINT_TYPE_P (inner_type) 1256 && FIXED_POINT_TYPE_P (outer_type)) 1257 return true; 1258 1259 /* We need to take special care recursing to pointed-to types. */ 1260 else if (POINTER_TYPE_P (inner_type) 1261 && POINTER_TYPE_P (outer_type)) 1262 { 1263 /* Do not lose casts to function pointer types. */ 1264 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE 1265 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE) 1266 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE 1267 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE)) 1268 return false; 1269 1270 /* We do not care for const qualification of the pointed-to types 1271 as const qualification has no semantic value to the middle-end. */ 1272 1273 /* Otherwise pointers/references are equivalent. */ 1274 return true; 1275 } 1276 1277 /* Recurse for complex types. */ 1278 else if (TREE_CODE (inner_type) == COMPLEX_TYPE 1279 && TREE_CODE (outer_type) == COMPLEX_TYPE) 1280 return useless_type_conversion_p (TREE_TYPE (outer_type), 1281 TREE_TYPE (inner_type)); 1282 1283 /* Recurse for vector types with the same number of subparts. */ 1284 else if (TREE_CODE (inner_type) == VECTOR_TYPE 1285 && TREE_CODE (outer_type) == VECTOR_TYPE 1286 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type)) 1287 return useless_type_conversion_p (TREE_TYPE (outer_type), 1288 TREE_TYPE (inner_type)); 1289 1290 else if (TREE_CODE (inner_type) == ARRAY_TYPE 1291 && TREE_CODE (outer_type) == ARRAY_TYPE) 1292 { 1293 /* Preserve string attributes. */ 1294 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type)) 1295 return false; 1296 1297 /* Conversions from array types with unknown extent to 1298 array types with known extent are not useless. */ 1299 if (!TYPE_DOMAIN (inner_type) 1300 && TYPE_DOMAIN (outer_type)) 1301 return false; 1302 1303 /* Nor are conversions from array types with non-constant size to 1304 array types with constant size or to different size. */ 1305 if (TYPE_SIZE (outer_type) 1306 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST 1307 && (!TYPE_SIZE (inner_type) 1308 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST 1309 || !tree_int_cst_equal (TYPE_SIZE (outer_type), 1310 TYPE_SIZE (inner_type)))) 1311 return false; 1312 1313 /* Check conversions between arrays with partially known extents. 1314 If the array min/max values are constant they have to match. 1315 Otherwise allow conversions to unknown and variable extents. 1316 In particular this declares conversions that may change the 1317 mode to BLKmode as useless. */ 1318 if (TYPE_DOMAIN (inner_type) 1319 && TYPE_DOMAIN (outer_type) 1320 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type)) 1321 { 1322 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type)); 1323 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type)); 1324 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type)); 1325 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type)); 1326 1327 /* After gimplification a variable min/max value carries no 1328 additional information compared to a NULL value. All that 1329 matters has been lowered to be part of the IL. */ 1330 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST) 1331 inner_min = NULL_TREE; 1332 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST) 1333 outer_min = NULL_TREE; 1334 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST) 1335 inner_max = NULL_TREE; 1336 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST) 1337 outer_max = NULL_TREE; 1338 1339 /* Conversions NULL / variable <- cst are useless, but not 1340 the other way around. */ 1341 if (outer_min 1342 && (!inner_min 1343 || !tree_int_cst_equal (inner_min, outer_min))) 1344 return false; 1345 if (outer_max 1346 && (!inner_max 1347 || !tree_int_cst_equal (inner_max, outer_max))) 1348 return false; 1349 } 1350 1351 /* Recurse on the element check. */ 1352 return useless_type_conversion_p (TREE_TYPE (outer_type), 1353 TREE_TYPE (inner_type)); 1354 } 1355 1356 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE 1357 || TREE_CODE (inner_type) == METHOD_TYPE) 1358 && TREE_CODE (inner_type) == TREE_CODE (outer_type)) 1359 { 1360 tree outer_parm, inner_parm; 1361 1362 /* If the return types are not compatible bail out. */ 1363 if (!useless_type_conversion_p (TREE_TYPE (outer_type), 1364 TREE_TYPE (inner_type))) 1365 return false; 1366 1367 /* Method types should belong to a compatible base class. */ 1368 if (TREE_CODE (inner_type) == METHOD_TYPE 1369 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type), 1370 TYPE_METHOD_BASETYPE (inner_type))) 1371 return false; 1372 1373 /* A conversion to an unprototyped argument list is ok. */ 1374 if (!prototype_p (outer_type)) 1375 return true; 1376 1377 /* If the unqualified argument types are compatible the conversion 1378 is useless. */ 1379 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type)) 1380 return true; 1381 1382 for (outer_parm = TYPE_ARG_TYPES (outer_type), 1383 inner_parm = TYPE_ARG_TYPES (inner_type); 1384 outer_parm && inner_parm; 1385 outer_parm = TREE_CHAIN (outer_parm), 1386 inner_parm = TREE_CHAIN (inner_parm)) 1387 if (!useless_type_conversion_p 1388 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)), 1389 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm)))) 1390 return false; 1391 1392 /* If there is a mismatch in the number of arguments the functions 1393 are not compatible. */ 1394 if (outer_parm || inner_parm) 1395 return false; 1396 1397 /* Defer to the target if necessary. */ 1398 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type)) 1399 return comp_type_attributes (outer_type, inner_type) != 0; 1400 1401 return true; 1402 } 1403 1404 /* For aggregates we rely on TYPE_CANONICAL exclusively and require 1405 explicit conversions for types involving to be structurally 1406 compared types. */ 1407 else if (AGGREGATE_TYPE_P (inner_type) 1408 && TREE_CODE (inner_type) == TREE_CODE (outer_type)) 1409 return false; 1410 1411 return false; 1412 } 1413 1414 /* Return true if a conversion from either type of TYPE1 and TYPE2 1415 to the other is not required. Otherwise return false. */ 1416 1417 bool 1418 types_compatible_p (tree type1, tree type2) 1419 { 1420 return (type1 == type2 1421 || (useless_type_conversion_p (type1, type2) 1422 && useless_type_conversion_p (type2, type1))); 1423 } 1424 1425 /* Return true if EXPR is a useless type conversion, otherwise return 1426 false. */ 1427 1428 bool 1429 tree_ssa_useless_type_conversion (tree expr) 1430 { 1431 /* If we have an assignment that merely uses a NOP_EXPR to change 1432 the top of the RHS to the type of the LHS and the type conversion 1433 is "safe", then strip away the type conversion so that we can 1434 enter LHS = RHS into the const_and_copies table. */ 1435 if (CONVERT_EXPR_P (expr) 1436 || TREE_CODE (expr) == VIEW_CONVERT_EXPR 1437 || TREE_CODE (expr) == NON_LVALUE_EXPR) 1438 return useless_type_conversion_p 1439 (TREE_TYPE (expr), 1440 TREE_TYPE (TREE_OPERAND (expr, 0))); 1441 1442 return false; 1443 } 1444 1445 /* Strip conversions from EXP according to 1446 tree_ssa_useless_type_conversion and return the resulting 1447 expression. */ 1448 1449 tree 1450 tree_ssa_strip_useless_type_conversions (tree exp) 1451 { 1452 while (tree_ssa_useless_type_conversion (exp)) 1453 exp = TREE_OPERAND (exp, 0); 1454 return exp; 1455 } 1456 1457 1458 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as 1459 described in walk_use_def_chains. 1460 1461 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid 1462 infinite loops. We used to have a bitmap for this to just mark 1463 SSA versions we had visited. But non-sparse bitmaps are way too 1464 expensive, while sparse bitmaps may cause quadratic behavior. 1465 1466 IS_DFS is true if the caller wants to perform a depth-first search 1467 when visiting PHI nodes. A DFS will visit each PHI argument and 1468 call FN after each one. Otherwise, all the arguments are 1469 visited first and then FN is called with each of the visited 1470 arguments in a separate pass. */ 1471 1472 static bool 1473 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data, 1474 struct pointer_set_t *visited, bool is_dfs) 1475 { 1476 gimple def_stmt; 1477 1478 if (pointer_set_insert (visited, var)) 1479 return false; 1480 1481 def_stmt = SSA_NAME_DEF_STMT (var); 1482 1483 if (gimple_code (def_stmt) != GIMPLE_PHI) 1484 { 1485 /* If we reached the end of the use-def chain, call FN. */ 1486 return fn (var, def_stmt, data); 1487 } 1488 else 1489 { 1490 size_t i; 1491 1492 /* When doing a breadth-first search, call FN before following the 1493 use-def links for each argument. */ 1494 if (!is_dfs) 1495 for (i = 0; i < gimple_phi_num_args (def_stmt); i++) 1496 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data)) 1497 return true; 1498 1499 /* Follow use-def links out of each PHI argument. */ 1500 for (i = 0; i < gimple_phi_num_args (def_stmt); i++) 1501 { 1502 tree arg = gimple_phi_arg_def (def_stmt, i); 1503 1504 /* ARG may be NULL for newly introduced PHI nodes. */ 1505 if (arg 1506 && TREE_CODE (arg) == SSA_NAME 1507 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs)) 1508 return true; 1509 } 1510 1511 /* When doing a depth-first search, call FN after following the 1512 use-def links for each argument. */ 1513 if (is_dfs) 1514 for (i = 0; i < gimple_phi_num_args (def_stmt); i++) 1515 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data)) 1516 return true; 1517 } 1518 1519 return false; 1520 } 1521 1522 1523 1524 /* Walk use-def chains starting at the SSA variable VAR. Call 1525 function FN at each reaching definition found. FN takes three 1526 arguments: VAR, its defining statement (DEF_STMT) and a generic 1527 pointer to whatever state information that FN may want to maintain 1528 (DATA). FN is able to stop the walk by returning true, otherwise 1529 in order to continue the walk, FN should return false. 1530 1531 Note, that if DEF_STMT is a PHI node, the semantics are slightly 1532 different. The first argument to FN is no longer the original 1533 variable VAR, but the PHI argument currently being examined. If FN 1534 wants to get at VAR, it should call PHI_RESULT (PHI). 1535 1536 If IS_DFS is true, this function will: 1537 1538 1- walk the use-def chains for all the PHI arguments, and, 1539 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments. 1540 1541 If IS_DFS is false, the two steps above are done in reverse order 1542 (i.e., a breadth-first search). */ 1543 1544 void 1545 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data, 1546 bool is_dfs) 1547 { 1548 gimple def_stmt; 1549 1550 gcc_assert (TREE_CODE (var) == SSA_NAME); 1551 1552 def_stmt = SSA_NAME_DEF_STMT (var); 1553 1554 /* We only need to recurse if the reaching definition comes from a PHI 1555 node. */ 1556 if (gimple_code (def_stmt) != GIMPLE_PHI) 1557 (*fn) (var, def_stmt, data); 1558 else 1559 { 1560 struct pointer_set_t *visited = pointer_set_create (); 1561 walk_use_def_chains_1 (var, fn, data, visited, is_dfs); 1562 pointer_set_destroy (visited); 1563 } 1564 } 1565 1566 1567 /* Emit warnings for uninitialized variables. This is done in two passes. 1568 1569 The first pass notices real uses of SSA names with undefined values. 1570 Such uses are unconditionally uninitialized, and we can be certain that 1571 such a use is a mistake. This pass is run before most optimizations, 1572 so that we catch as many as we can. 1573 1574 The second pass follows PHI nodes to find uses that are potentially 1575 uninitialized. In this case we can't necessarily prove that the use 1576 is really uninitialized. This pass is run after most optimizations, 1577 so that we thread as many jumps and possible, and delete as much dead 1578 code as possible, in order to reduce false positives. We also look 1579 again for plain uninitialized variables, since optimization may have 1580 changed conditionally uninitialized to unconditionally uninitialized. */ 1581 1582 /* Emit a warning for EXPR based on variable VAR at the point in the 1583 program T, an SSA_NAME, is used being uninitialized. The exact 1584 warning text is in MSGID and LOCUS may contain a location or be null. 1585 WC is the warning code. */ 1586 1587 void 1588 warn_uninit (enum opt_code wc, tree t, 1589 tree expr, tree var, const char *gmsgid, void *data) 1590 { 1591 gimple context = (gimple) data; 1592 location_t location, cfun_loc; 1593 expanded_location xloc, floc; 1594 1595 if (!ssa_undefined_value_p (t)) 1596 return; 1597 1598 /* TREE_NO_WARNING either means we already warned, or the front end 1599 wishes to suppress the warning. */ 1600 if ((context 1601 && (gimple_no_warning_p (context) 1602 || (gimple_assign_single_p (context) 1603 && TREE_NO_WARNING (gimple_assign_rhs1 (context))))) 1604 || TREE_NO_WARNING (expr)) 1605 return; 1606 1607 location = (context != NULL && gimple_has_location (context)) 1608 ? gimple_location (context) 1609 : DECL_SOURCE_LOCATION (var); 1610 location = linemap_resolve_location (line_table, location, 1611 LRK_SPELLING_LOCATION, 1612 NULL); 1613 cfun_loc = DECL_SOURCE_LOCATION (cfun->decl); 1614 xloc = expand_location (location); 1615 floc = expand_location (cfun_loc); 1616 if (warning_at (location, wc, gmsgid, expr)) 1617 { 1618 TREE_NO_WARNING (expr) = 1; 1619 1620 if (location == DECL_SOURCE_LOCATION (var)) 1621 return; 1622 if (xloc.file != floc.file 1623 || linemap_location_before_p (line_table, 1624 location, cfun_loc) 1625 || linemap_location_before_p (line_table, 1626 cfun->function_end_locus, 1627 location)) 1628 inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var); 1629 } 1630 } 1631 1632 unsigned int 1633 warn_uninitialized_vars (bool warn_possibly_uninitialized) 1634 { 1635 gimple_stmt_iterator gsi; 1636 basic_block bb; 1637 1638 FOR_EACH_BB (bb) 1639 { 1640 bool always_executed = dominated_by_p (CDI_POST_DOMINATORS, 1641 single_succ (ENTRY_BLOCK_PTR), bb); 1642 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1643 { 1644 gimple stmt = gsi_stmt (gsi); 1645 use_operand_p use_p; 1646 ssa_op_iter op_iter; 1647 tree use; 1648 1649 if (is_gimple_debug (stmt)) 1650 continue; 1651 1652 /* We only do data flow with SSA_NAMEs, so that's all we 1653 can warn about. */ 1654 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, op_iter, SSA_OP_USE) 1655 { 1656 use = USE_FROM_PTR (use_p); 1657 if (always_executed) 1658 warn_uninit (OPT_Wuninitialized, use, 1659 SSA_NAME_VAR (use), SSA_NAME_VAR (use), 1660 "%qD is used uninitialized in this function", 1661 stmt); 1662 else if (warn_possibly_uninitialized) 1663 warn_uninit (OPT_Wmaybe_uninitialized, use, 1664 SSA_NAME_VAR (use), SSA_NAME_VAR (use), 1665 "%qD may be used uninitialized in this function", 1666 stmt); 1667 } 1668 1669 /* For memory the only cheap thing we can do is see if we 1670 have a use of the default def of the virtual operand. 1671 ??? Note that at -O0 we do not have virtual operands. 1672 ??? Not so cheap would be to use the alias oracle via 1673 walk_aliased_vdefs, if we don't find any aliasing vdef 1674 warn as is-used-uninitialized, if we don't find an aliasing 1675 vdef that kills our use (stmt_kills_ref_p), warn as 1676 may-be-used-uninitialized. But this walk is quadratic and 1677 so must be limited which means we would miss warning 1678 opportunities. */ 1679 use = gimple_vuse (stmt); 1680 if (use 1681 && gimple_assign_single_p (stmt) 1682 && !gimple_vdef (stmt) 1683 && SSA_NAME_IS_DEFAULT_DEF (use)) 1684 { 1685 tree rhs = gimple_assign_rhs1 (stmt); 1686 tree base = get_base_address (rhs); 1687 1688 /* Do not warn if it can be initialized outside this function. */ 1689 if (TREE_CODE (base) != VAR_DECL 1690 || DECL_HARD_REGISTER (base) 1691 || is_global_var (base)) 1692 continue; 1693 1694 if (always_executed) 1695 warn_uninit (OPT_Wuninitialized, use, 1696 gimple_assign_rhs1 (stmt), base, 1697 "%qE is used uninitialized in this function", 1698 stmt); 1699 else if (warn_possibly_uninitialized) 1700 warn_uninit (OPT_Wmaybe_uninitialized, use, 1701 gimple_assign_rhs1 (stmt), base, 1702 "%qE may be used uninitialized in this function", 1703 stmt); 1704 } 1705 } 1706 } 1707 1708 return 0; 1709 } 1710 1711 static unsigned int 1712 execute_early_warn_uninitialized (void) 1713 { 1714 /* Currently, this pass runs always but 1715 execute_late_warn_uninitialized only runs with optimization. With 1716 optimization we want to warn about possible uninitialized as late 1717 as possible, thus don't do it here. However, without 1718 optimization we need to warn here about "may be uninitialized". 1719 */ 1720 calculate_dominance_info (CDI_POST_DOMINATORS); 1721 1722 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize); 1723 1724 /* Post-dominator information can not be reliably updated. Free it 1725 after the use. */ 1726 1727 free_dominance_info (CDI_POST_DOMINATORS); 1728 return 0; 1729 } 1730 1731 static bool 1732 gate_warn_uninitialized (void) 1733 { 1734 return warn_uninitialized != 0; 1735 } 1736 1737 struct gimple_opt_pass pass_early_warn_uninitialized = 1738 { 1739 { 1740 GIMPLE_PASS, 1741 "*early_warn_uninitialized", /* name */ 1742 OPTGROUP_NONE, /* optinfo_flags */ 1743 gate_warn_uninitialized, /* gate */ 1744 execute_early_warn_uninitialized, /* execute */ 1745 NULL, /* sub */ 1746 NULL, /* next */ 1747 0, /* static_pass_number */ 1748 TV_TREE_UNINIT, /* tv_id */ 1749 PROP_ssa, /* properties_required */ 1750 0, /* properties_provided */ 1751 0, /* properties_destroyed */ 1752 0, /* todo_flags_start */ 1753 0 /* todo_flags_finish */ 1754 } 1755 }; 1756 1757 1758 /* If necessary, rewrite the base of the reference tree *TP from 1759 a MEM_REF to a plain or converted symbol. */ 1760 1761 static void 1762 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming) 1763 { 1764 tree sym; 1765 1766 while (handled_component_p (*tp)) 1767 tp = &TREE_OPERAND (*tp, 0); 1768 if (TREE_CODE (*tp) == MEM_REF 1769 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR 1770 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0)) 1771 && DECL_P (sym) 1772 && !TREE_ADDRESSABLE (sym) 1773 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))) 1774 { 1775 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE 1776 && useless_type_conversion_p (TREE_TYPE (*tp), 1777 TREE_TYPE (TREE_TYPE (sym))) 1778 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1), 1779 TYPE_SIZE_UNIT (TREE_TYPE (*tp)))) 1780 { 1781 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, 1782 TYPE_SIZE (TREE_TYPE (*tp)), 1783 int_const_binop (MULT_EXPR, 1784 bitsize_int (BITS_PER_UNIT), 1785 TREE_OPERAND (*tp, 1))); 1786 } 1787 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE 1788 && useless_type_conversion_p (TREE_TYPE (*tp), 1789 TREE_TYPE (TREE_TYPE (sym)))) 1790 { 1791 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1)) 1792 ? REALPART_EXPR : IMAGPART_EXPR, 1793 TREE_TYPE (*tp), sym); 1794 } 1795 else if (integer_zerop (TREE_OPERAND (*tp, 1))) 1796 { 1797 if (!useless_type_conversion_p (TREE_TYPE (*tp), 1798 TREE_TYPE (sym))) 1799 *tp = build1 (VIEW_CONVERT_EXPR, 1800 TREE_TYPE (*tp), sym); 1801 else 1802 *tp = sym; 1803 } 1804 } 1805 } 1806 1807 /* For a tree REF return its base if it is the base of a MEM_REF 1808 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */ 1809 1810 static tree 1811 non_rewritable_mem_ref_base (tree ref) 1812 { 1813 tree base = ref; 1814 1815 /* A plain decl does not need it set. */ 1816 if (DECL_P (ref)) 1817 return NULL_TREE; 1818 1819 while (handled_component_p (base)) 1820 base = TREE_OPERAND (base, 0); 1821 1822 /* But watch out for MEM_REFs we cannot lower to a 1823 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */ 1824 if (TREE_CODE (base) == MEM_REF 1825 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR) 1826 { 1827 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0); 1828 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE 1829 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE) 1830 && useless_type_conversion_p (TREE_TYPE (base), 1831 TREE_TYPE (TREE_TYPE (decl))) 1832 && mem_ref_offset (base).fits_uhwi () 1833 && tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl))) 1834 .ugt (mem_ref_offset (base)) 1835 && multiple_of_p (sizetype, TREE_OPERAND (base, 1), 1836 TYPE_SIZE_UNIT (TREE_TYPE (base)))) 1837 return NULL_TREE; 1838 if (DECL_P (decl) 1839 && (!integer_zerop (TREE_OPERAND (base, 1)) 1840 || (DECL_SIZE (decl) 1841 != TYPE_SIZE (TREE_TYPE (base))) 1842 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))) 1843 return decl; 1844 } 1845 1846 return NULL_TREE; 1847 } 1848 1849 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form. 1850 Otherwise return true. */ 1851 1852 static bool 1853 non_rewritable_lvalue_p (tree lhs) 1854 { 1855 /* A plain decl is always rewritable. */ 1856 if (DECL_P (lhs)) 1857 return false; 1858 1859 /* A decl that is wrapped inside a MEM-REF that covers 1860 it full is also rewritable. 1861 ??? The following could be relaxed allowing component 1862 references that do not change the access size. */ 1863 if (TREE_CODE (lhs) == MEM_REF 1864 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR 1865 && integer_zerop (TREE_OPERAND (lhs, 1))) 1866 { 1867 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0); 1868 if (DECL_P (decl) 1869 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs)) 1870 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs))) 1871 return false; 1872 } 1873 1874 return true; 1875 } 1876 1877 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and 1878 mark the variable VAR for conversion into SSA. Return true when updating 1879 stmts is required. */ 1880 1881 static void 1882 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs, 1883 bitmap suitable_for_renaming) 1884 { 1885 /* Global Variables, result decls cannot be changed. */ 1886 if (is_global_var (var) 1887 || TREE_CODE (var) == RESULT_DECL 1888 || bitmap_bit_p (addresses_taken, DECL_UID (var))) 1889 return; 1890 1891 if (TREE_ADDRESSABLE (var) 1892 /* Do not change TREE_ADDRESSABLE if we need to preserve var as 1893 a non-register. Otherwise we are confused and forget to 1894 add virtual operands for it. */ 1895 && (!is_gimple_reg_type (TREE_TYPE (var)) 1896 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE 1897 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 1898 || !bitmap_bit_p (not_reg_needs, DECL_UID (var)))) 1899 { 1900 TREE_ADDRESSABLE (var) = 0; 1901 if (is_gimple_reg (var)) 1902 bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); 1903 if (dump_file) 1904 { 1905 fprintf (dump_file, "No longer having address taken: "); 1906 print_generic_expr (dump_file, var, 0); 1907 fprintf (dump_file, "\n"); 1908 } 1909 } 1910 1911 if (!DECL_GIMPLE_REG_P (var) 1912 && !bitmap_bit_p (not_reg_needs, DECL_UID (var)) 1913 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 1914 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) 1915 && !TREE_THIS_VOLATILE (var) 1916 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var))) 1917 { 1918 DECL_GIMPLE_REG_P (var) = 1; 1919 bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); 1920 if (dump_file) 1921 { 1922 fprintf (dump_file, "Now a gimple register: "); 1923 print_generic_expr (dump_file, var, 0); 1924 fprintf (dump_file, "\n"); 1925 } 1926 } 1927 } 1928 1929 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */ 1930 1931 void 1932 execute_update_addresses_taken (void) 1933 { 1934 gimple_stmt_iterator gsi; 1935 basic_block bb; 1936 bitmap addresses_taken = BITMAP_ALLOC (NULL); 1937 bitmap not_reg_needs = BITMAP_ALLOC (NULL); 1938 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL); 1939 tree var; 1940 unsigned i; 1941 1942 timevar_push (TV_ADDRESS_TAKEN); 1943 1944 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within 1945 the function body. */ 1946 FOR_EACH_BB (bb) 1947 { 1948 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1949 { 1950 gimple stmt = gsi_stmt (gsi); 1951 enum gimple_code code = gimple_code (stmt); 1952 tree decl; 1953 1954 /* Note all addresses taken by the stmt. */ 1955 gimple_ior_addresses_taken (addresses_taken, stmt); 1956 1957 /* If we have a call or an assignment, see if the lhs contains 1958 a local decl that requires not to be a gimple register. */ 1959 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) 1960 { 1961 tree lhs = gimple_get_lhs (stmt); 1962 if (lhs 1963 && TREE_CODE (lhs) != SSA_NAME 1964 && non_rewritable_lvalue_p (lhs)) 1965 { 1966 decl = get_base_address (lhs); 1967 if (DECL_P (decl)) 1968 bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1969 } 1970 } 1971 1972 if (gimple_assign_single_p (stmt)) 1973 { 1974 tree rhs = gimple_assign_rhs1 (stmt); 1975 if ((decl = non_rewritable_mem_ref_base (rhs))) 1976 bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1977 } 1978 1979 else if (code == GIMPLE_CALL) 1980 { 1981 for (i = 0; i < gimple_call_num_args (stmt); ++i) 1982 { 1983 tree arg = gimple_call_arg (stmt, i); 1984 if ((decl = non_rewritable_mem_ref_base (arg))) 1985 bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1986 } 1987 } 1988 1989 else if (code == GIMPLE_ASM) 1990 { 1991 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) 1992 { 1993 tree link = gimple_asm_output_op (stmt, i); 1994 tree lhs = TREE_VALUE (link); 1995 if (TREE_CODE (lhs) != SSA_NAME) 1996 { 1997 decl = get_base_address (lhs); 1998 if (DECL_P (decl) 1999 && (non_rewritable_lvalue_p (lhs) 2000 /* We cannot move required conversions from 2001 the lhs to the rhs in asm statements, so 2002 require we do not need any. */ 2003 || !useless_type_conversion_p 2004 (TREE_TYPE (lhs), TREE_TYPE (decl)))) 2005 bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 2006 } 2007 } 2008 for (i = 0; i < gimple_asm_ninputs (stmt); ++i) 2009 { 2010 tree link = gimple_asm_input_op (stmt, i); 2011 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link)))) 2012 bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 2013 } 2014 } 2015 } 2016 2017 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 2018 { 2019 size_t i; 2020 gimple phi = gsi_stmt (gsi); 2021 2022 for (i = 0; i < gimple_phi_num_args (phi); i++) 2023 { 2024 tree op = PHI_ARG_DEF (phi, i), var; 2025 if (TREE_CODE (op) == ADDR_EXPR 2026 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL 2027 && DECL_P (var)) 2028 bitmap_set_bit (addresses_taken, DECL_UID (var)); 2029 } 2030 } 2031 } 2032 2033 /* We cannot iterate over all referenced vars because that can contain 2034 unused vars from BLOCK trees, which causes code generation differences 2035 for -g vs. -g0. */ 2036 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var)) 2037 maybe_optimize_var (var, addresses_taken, not_reg_needs, 2038 suitable_for_renaming); 2039 2040 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var) 2041 maybe_optimize_var (var, addresses_taken, not_reg_needs, 2042 suitable_for_renaming); 2043 2044 /* Operand caches need to be recomputed for operands referencing the updated 2045 variables and operands need to be rewritten to expose bare symbols. */ 2046 if (!bitmap_empty_p (suitable_for_renaming)) 2047 { 2048 FOR_EACH_BB (bb) 2049 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) 2050 { 2051 gimple stmt = gsi_stmt (gsi); 2052 2053 /* Re-write TARGET_MEM_REFs of symbols we want to 2054 rewrite into SSA form. */ 2055 if (gimple_assign_single_p (stmt)) 2056 { 2057 tree lhs = gimple_assign_lhs (stmt); 2058 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt); 2059 tree sym; 2060 2061 /* We shouldn't have any fancy wrapping of 2062 component-refs on the LHS, but look through 2063 VIEW_CONVERT_EXPRs as that is easy. */ 2064 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR) 2065 lhs = TREE_OPERAND (lhs, 0); 2066 if (TREE_CODE (lhs) == MEM_REF 2067 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR 2068 && integer_zerop (TREE_OPERAND (lhs, 1)) 2069 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) 2070 && DECL_P (sym) 2071 && !TREE_ADDRESSABLE (sym) 2072 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))) 2073 lhs = sym; 2074 else 2075 lhs = gimple_assign_lhs (stmt); 2076 2077 /* Rewrite the RHS and make sure the resulting assignment 2078 is validly typed. */ 2079 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming); 2080 rhs = gimple_assign_rhs1 (stmt); 2081 if (gimple_assign_lhs (stmt) != lhs 2082 && !useless_type_conversion_p (TREE_TYPE (lhs), 2083 TREE_TYPE (rhs))) 2084 rhs = fold_build1 (VIEW_CONVERT_EXPR, 2085 TREE_TYPE (lhs), rhs); 2086 2087 if (gimple_assign_lhs (stmt) != lhs) 2088 gimple_assign_set_lhs (stmt, lhs); 2089 2090 /* For var ={v} {CLOBBER}; where var lost 2091 TREE_ADDRESSABLE just remove the stmt. */ 2092 if (DECL_P (lhs) 2093 && TREE_CLOBBER_P (rhs) 2094 && bitmap_bit_p (suitable_for_renaming, DECL_UID (lhs))) 2095 { 2096 unlink_stmt_vdef (stmt); 2097 gsi_remove (&gsi, true); 2098 release_defs (stmt); 2099 continue; 2100 } 2101 2102 if (gimple_assign_rhs1 (stmt) != rhs) 2103 { 2104 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 2105 gimple_assign_set_rhs_from_tree (&gsi, rhs); 2106 } 2107 } 2108 2109 else if (gimple_code (stmt) == GIMPLE_CALL) 2110 { 2111 unsigned i; 2112 for (i = 0; i < gimple_call_num_args (stmt); ++i) 2113 { 2114 tree *argp = gimple_call_arg_ptr (stmt, i); 2115 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming); 2116 } 2117 } 2118 2119 else if (gimple_code (stmt) == GIMPLE_ASM) 2120 { 2121 unsigned i; 2122 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) 2123 { 2124 tree link = gimple_asm_output_op (stmt, i); 2125 maybe_rewrite_mem_ref_base (&TREE_VALUE (link), 2126 suitable_for_renaming); 2127 } 2128 for (i = 0; i < gimple_asm_ninputs (stmt); ++i) 2129 { 2130 tree link = gimple_asm_input_op (stmt, i); 2131 maybe_rewrite_mem_ref_base (&TREE_VALUE (link), 2132 suitable_for_renaming); 2133 } 2134 } 2135 2136 else if (gimple_debug_bind_p (stmt) 2137 && gimple_debug_bind_has_value_p (stmt)) 2138 { 2139 tree *valuep = gimple_debug_bind_get_value_ptr (stmt); 2140 tree decl; 2141 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming); 2142 decl = non_rewritable_mem_ref_base (*valuep); 2143 if (decl 2144 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl))) 2145 gimple_debug_bind_reset_value (stmt); 2146 } 2147 2148 if (gimple_references_memory_p (stmt) 2149 || is_gimple_debug (stmt)) 2150 update_stmt (stmt); 2151 2152 gsi_next (&gsi); 2153 } 2154 2155 /* Update SSA form here, we are called as non-pass as well. */ 2156 if (number_of_loops () > 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA)) 2157 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); 2158 else 2159 update_ssa (TODO_update_ssa); 2160 } 2161 2162 BITMAP_FREE (not_reg_needs); 2163 BITMAP_FREE (addresses_taken); 2164 BITMAP_FREE (suitable_for_renaming); 2165 timevar_pop (TV_ADDRESS_TAKEN); 2166 } 2167 2168 struct gimple_opt_pass pass_update_address_taken = 2169 { 2170 { 2171 GIMPLE_PASS, 2172 "addressables", /* name */ 2173 OPTGROUP_NONE, /* optinfo_flags */ 2174 NULL, /* gate */ 2175 NULL, /* execute */ 2176 NULL, /* sub */ 2177 NULL, /* next */ 2178 0, /* static_pass_number */ 2179 TV_ADDRESS_TAKEN, /* tv_id */ 2180 PROP_ssa, /* properties_required */ 2181 0, /* properties_provided */ 2182 0, /* properties_destroyed */ 2183 0, /* todo_flags_start */ 2184 TODO_update_address_taken /* todo_flags_finish */ 2185 } 2186 }; 2187