1 /* SSA operands management for trees. 2 Copyright (C) 2003-2013 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify 7 it under the terms of the GNU General Public License as published by 8 the Free Software Foundation; either version 3, or (at your option) 9 any later version. 10 11 GCC is distributed in the hope that it will be useful, 12 but WITHOUT ANY WARRANTY; without even the implied warranty of 13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 GNU General Public License for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "tm.h" 24 #include "tree.h" 25 #include "flags.h" 26 #include "function.h" 27 #include "gimple-pretty-print.h" 28 #include "tree-flow.h" 29 #include "tree-inline.h" 30 #include "timevar.h" 31 #include "dumpfile.h" 32 #include "ggc.h" 33 #include "timevar.h" 34 #include "langhooks.h" 35 #include "diagnostic-core.h" 36 37 38 /* This file contains the code required to manage the operands cache of the 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt 40 annotation. This cache contains operands that will be of interest to 41 optimizers and other passes wishing to manipulate the IL. 42 43 The operand type are broken up into REAL and VIRTUAL operands. The real 44 operands are represented as pointers into the stmt's operand tree. Thus 45 any manipulation of the real operands will be reflected in the actual tree. 46 Virtual operands are represented solely in the cache, although the base 47 variable for the SSA_NAME may, or may not occur in the stmt's tree. 48 Manipulation of the virtual operands will not be reflected in the stmt tree. 49 50 The routines in this file are concerned with creating this operand cache 51 from a stmt tree. 52 53 The operand tree is the parsed by the various get_* routines which look 54 through the stmt tree for the occurrence of operands which may be of 55 interest, and calls are made to the append_* routines whenever one is 56 found. There are 4 of these routines, each representing one of the 57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. 58 59 The append_* routines check for duplication, and simply keep a list of 60 unique objects for each operand type in the build_* extendable vectors. 61 62 Once the stmt tree is completely parsed, the finalize_ssa_operands() 63 routine is called, which proceeds to perform the finalization routine 64 on each of the 4 operand vectors which have been built up. 65 66 If the stmt had a previous operand cache, the finalization routines 67 attempt to match up the new operands with the old ones. If it's a perfect 68 match, the old vector is simply reused. If it isn't a perfect match, then 69 a new vector is created and the new operands are placed there. For 70 virtual operands, if the previous cache had SSA_NAME version of a 71 variable, and that same variable occurs in the same operands cache, then 72 the new cache vector will also get the same SSA_NAME. 73 74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new 75 operand vector for VUSE, then the new vector will also be modified 76 such that it contains 'a_5' rather than 'a'. */ 77 78 79 /* Flags to describe operand properties in helpers. */ 80 81 /* By default, operands are loaded. */ 82 #define opf_use 0 83 84 /* Operand is the target of an assignment expression or a 85 call-clobbered variable. */ 86 #define opf_def (1 << 0) 87 88 /* No virtual operands should be created in the expression. This is used 89 when traversing ADDR_EXPR nodes which have different semantics than 90 other expressions. Inside an ADDR_EXPR node, the only operands that we 91 need to consider are indices into arrays. For instance, &a.b[i] should 92 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a 93 VUSE for 'b'. */ 94 #define opf_no_vops (1 << 1) 95 96 /* Operand is an implicit reference. This is used to distinguish 97 explicit assignments in the form of MODIFY_EXPR from 98 clobbering sites like function calls or ASM_EXPRs. */ 99 #define opf_implicit (1 << 2) 100 101 /* Operand is in a place where address-taken does not imply addressable. */ 102 #define opf_non_addressable (1 << 3) 103 104 /* Operand is in a place where opf_non_addressable does not apply. */ 105 #define opf_not_non_addressable (1 << 4) 106 107 /* Array for building all the use operands. */ 108 static vec<tree> build_uses; 109 110 /* The built VDEF operand. */ 111 static tree build_vdef; 112 113 /* The built VUSE operand. */ 114 static tree build_vuse; 115 116 /* Bitmap obstack for our datastructures that needs to survive across 117 compilations of multiple functions. */ 118 static bitmap_obstack operands_bitmap_obstack; 119 120 static void get_expr_operands (gimple, tree *, int); 121 122 /* Number of functions with initialized ssa_operands. */ 123 static int n_initialized = 0; 124 125 126 /* Return true if the SSA operands cache is active. */ 127 128 bool 129 ssa_operands_active (struct function *fun) 130 { 131 if (fun == NULL) 132 return false; 133 134 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; 135 } 136 137 138 /* Create the VOP variable, an artificial global variable to act as a 139 representative of all of the virtual operands FUD chain. */ 140 141 static void 142 create_vop_var (struct function *fn) 143 { 144 tree global_var; 145 146 gcc_assert (fn->gimple_df->vop == NULL_TREE); 147 148 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, 149 get_identifier (".MEM"), 150 void_type_node); 151 DECL_ARTIFICIAL (global_var) = 1; 152 TREE_READONLY (global_var) = 0; 153 DECL_EXTERNAL (global_var) = 1; 154 TREE_STATIC (global_var) = 1; 155 TREE_USED (global_var) = 1; 156 DECL_CONTEXT (global_var) = NULL_TREE; 157 TREE_THIS_VOLATILE (global_var) = 0; 158 TREE_ADDRESSABLE (global_var) = 0; 159 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; 160 161 fn->gimple_df->vop = global_var; 162 } 163 164 /* These are the sizes of the operand memory buffer in bytes which gets 165 allocated each time more operands space is required. The final value is 166 the amount that is allocated every time after that. 167 In 1k we can fit 25 use operands (or 63 def operands) on a host with 168 8 byte pointers, that would be 10 statements each with 1 def and 2 169 uses. */ 170 171 #define OP_SIZE_INIT 0 172 #define OP_SIZE_1 (1024 - sizeof (void *)) 173 #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) 174 #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) 175 176 /* Initialize the operand cache routines. */ 177 178 void 179 init_ssa_operands (struct function *fn) 180 { 181 if (!n_initialized++) 182 { 183 build_uses.create (10); 184 build_vuse = NULL_TREE; 185 build_vdef = NULL_TREE; 186 bitmap_obstack_initialize (&operands_bitmap_obstack); 187 } 188 189 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); 190 gimple_ssa_operands (fn)->operand_memory_index 191 = gimple_ssa_operands (fn)->ssa_operand_mem_size; 192 gimple_ssa_operands (fn)->ops_active = true; 193 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT; 194 create_vop_var (fn); 195 } 196 197 198 /* Dispose of anything required by the operand routines. */ 199 200 void 201 fini_ssa_operands (void) 202 { 203 struct ssa_operand_memory_d *ptr; 204 205 if (!--n_initialized) 206 { 207 build_uses.release (); 208 build_vdef = NULL_TREE; 209 build_vuse = NULL_TREE; 210 } 211 212 gimple_ssa_operands (cfun)->free_uses = NULL; 213 214 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) 215 { 216 gimple_ssa_operands (cfun)->operand_memory 217 = gimple_ssa_operands (cfun)->operand_memory->next; 218 ggc_free (ptr); 219 } 220 221 gimple_ssa_operands (cfun)->ops_active = false; 222 223 if (!n_initialized) 224 bitmap_obstack_release (&operands_bitmap_obstack); 225 226 cfun->gimple_df->vop = NULL_TREE; 227 } 228 229 230 /* Return memory for an operand of size SIZE. */ 231 232 static inline void * 233 ssa_operand_alloc (unsigned size) 234 { 235 char *ptr; 236 237 gcc_assert (size == sizeof (struct use_optype_d)); 238 239 if (gimple_ssa_operands (cfun)->operand_memory_index + size 240 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) 241 { 242 struct ssa_operand_memory_d *ptr; 243 244 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size) 245 { 246 case OP_SIZE_INIT: 247 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1; 248 break; 249 case OP_SIZE_1: 250 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2; 251 break; 252 case OP_SIZE_2: 253 case OP_SIZE_3: 254 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3; 255 break; 256 default: 257 gcc_unreachable (); 258 } 259 260 261 ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *) 262 + gimple_ssa_operands (cfun)->ssa_operand_mem_size); 263 264 ptr->next = gimple_ssa_operands (cfun)->operand_memory; 265 gimple_ssa_operands (cfun)->operand_memory = ptr; 266 gimple_ssa_operands (cfun)->operand_memory_index = 0; 267 } 268 269 ptr = &(gimple_ssa_operands (cfun)->operand_memory 270 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); 271 gimple_ssa_operands (cfun)->operand_memory_index += size; 272 return ptr; 273 } 274 275 276 /* Allocate a USE operand. */ 277 278 static inline struct use_optype_d * 279 alloc_use (void) 280 { 281 struct use_optype_d *ret; 282 if (gimple_ssa_operands (cfun)->free_uses) 283 { 284 ret = gimple_ssa_operands (cfun)->free_uses; 285 gimple_ssa_operands (cfun)->free_uses 286 = gimple_ssa_operands (cfun)->free_uses->next; 287 } 288 else 289 ret = (struct use_optype_d *) 290 ssa_operand_alloc (sizeof (struct use_optype_d)); 291 return ret; 292 } 293 294 295 /* Adds OP to the list of uses of statement STMT after LAST. */ 296 297 static inline use_optype_p 298 add_use_op (gimple stmt, tree *op, use_optype_p last) 299 { 300 use_optype_p new_use; 301 302 new_use = alloc_use (); 303 USE_OP_PTR (new_use)->use = op; 304 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); 305 last->next = new_use; 306 new_use->next = NULL; 307 return new_use; 308 } 309 310 311 312 /* Takes elements from build_defs and turns them into def operands of STMT. 313 TODO -- Make build_defs vec of tree *. */ 314 315 static inline void 316 finalize_ssa_defs (gimple stmt) 317 { 318 /* Pre-pend the vdef we may have built. */ 319 if (build_vdef != NULL_TREE) 320 { 321 tree oldvdef = gimple_vdef (stmt); 322 if (oldvdef 323 && TREE_CODE (oldvdef) == SSA_NAME) 324 oldvdef = SSA_NAME_VAR (oldvdef); 325 if (oldvdef != build_vdef) 326 gimple_set_vdef (stmt, build_vdef); 327 } 328 329 /* Clear and unlink a no longer necessary VDEF. */ 330 if (build_vdef == NULL_TREE 331 && gimple_vdef (stmt) != NULL_TREE) 332 { 333 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) 334 { 335 unlink_stmt_vdef (stmt); 336 release_ssa_name (gimple_vdef (stmt)); 337 } 338 gimple_set_vdef (stmt, NULL_TREE); 339 } 340 341 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ 342 if (gimple_vdef (stmt) 343 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) 344 { 345 cfun->gimple_df->rename_vops = 1; 346 cfun->gimple_df->ssa_renaming_needed = 1; 347 } 348 } 349 350 351 /* Takes elements from build_uses and turns them into use operands of STMT. 352 TODO -- Make build_uses vec of tree *. */ 353 354 static inline void 355 finalize_ssa_uses (gimple stmt) 356 { 357 unsigned new_i; 358 struct use_optype_d new_list; 359 use_optype_p old_ops, ptr, last; 360 361 /* Pre-pend the VUSE we may have built. */ 362 if (build_vuse != NULL_TREE) 363 { 364 tree oldvuse = gimple_vuse (stmt); 365 if (oldvuse 366 && TREE_CODE (oldvuse) == SSA_NAME) 367 oldvuse = SSA_NAME_VAR (oldvuse); 368 if (oldvuse != (build_vuse != NULL_TREE 369 ? build_vuse : build_vdef)) 370 gimple_set_vuse (stmt, NULL_TREE); 371 build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt)); 372 } 373 374 new_list.next = NULL; 375 last = &new_list; 376 377 old_ops = gimple_use_ops (stmt); 378 379 /* Clear a no longer necessary VUSE. */ 380 if (build_vuse == NULL_TREE 381 && gimple_vuse (stmt) != NULL_TREE) 382 gimple_set_vuse (stmt, NULL_TREE); 383 384 /* If there is anything in the old list, free it. */ 385 if (old_ops) 386 { 387 for (ptr = old_ops; ptr; ptr = ptr->next) 388 delink_imm_use (USE_OP_PTR (ptr)); 389 old_ops->next = gimple_ssa_operands (cfun)->free_uses; 390 gimple_ssa_operands (cfun)->free_uses = old_ops; 391 } 392 393 /* If we added a VUSE, make sure to set the operand if it is not already 394 present and mark it for renaming. */ 395 if (build_vuse != NULL_TREE 396 && gimple_vuse (stmt) == NULL_TREE) 397 { 398 gimple_set_vuse (stmt, gimple_vop (cfun)); 399 cfun->gimple_df->rename_vops = 1; 400 cfun->gimple_df->ssa_renaming_needed = 1; 401 } 402 403 /* Now create nodes for all the new nodes. */ 404 for (new_i = 0; new_i < build_uses.length (); new_i++) 405 { 406 tree *op = (tree *) build_uses[new_i]; 407 last = add_use_op (stmt, op, last); 408 } 409 410 /* Now set the stmt's operands. */ 411 gimple_set_use_ops (stmt, new_list.next); 412 } 413 414 415 /* Clear the in_list bits and empty the build array for VDEFs and 416 VUSEs. */ 417 418 static inline void 419 cleanup_build_arrays (void) 420 { 421 build_vdef = NULL_TREE; 422 build_vuse = NULL_TREE; 423 build_uses.truncate (0); 424 } 425 426 427 /* Finalize all the build vectors, fill the new ones into INFO. */ 428 429 static inline void 430 finalize_ssa_stmt_operands (gimple stmt) 431 { 432 finalize_ssa_defs (stmt); 433 finalize_ssa_uses (stmt); 434 cleanup_build_arrays (); 435 } 436 437 438 /* Start the process of building up operands vectors in INFO. */ 439 440 static inline void 441 start_ssa_stmt_operands (void) 442 { 443 gcc_assert (build_uses.length () == 0); 444 gcc_assert (build_vuse == NULL_TREE); 445 gcc_assert (build_vdef == NULL_TREE); 446 } 447 448 449 /* Add USE_P to the list of pointers to operands. */ 450 451 static inline void 452 append_use (tree *use_p) 453 { 454 build_uses.safe_push ((tree) use_p); 455 } 456 457 458 /* Add VAR to the set of variables that require a VDEF operator. */ 459 460 static inline void 461 append_vdef (tree var) 462 { 463 if (!optimize) 464 return; 465 466 gcc_assert ((build_vdef == NULL_TREE 467 || build_vdef == var) 468 && (build_vuse == NULL_TREE 469 || build_vuse == var)); 470 471 build_vdef = var; 472 build_vuse = var; 473 } 474 475 476 /* Add VAR to the set of variables that require a VUSE operator. */ 477 478 static inline void 479 append_vuse (tree var) 480 { 481 if (!optimize) 482 return; 483 484 gcc_assert (build_vuse == NULL_TREE 485 || build_vuse == var); 486 487 build_vuse = var; 488 } 489 490 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ 491 492 static void 493 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags) 494 { 495 /* Add virtual operands to the stmt, unless the caller has specifically 496 requested not to do that (used when adding operands inside an 497 ADDR_EXPR expression). */ 498 if (flags & opf_no_vops) 499 return; 500 501 gcc_assert (!is_gimple_debug (stmt)); 502 503 if (flags & opf_def) 504 append_vdef (gimple_vop (cfun)); 505 else 506 append_vuse (gimple_vop (cfun)); 507 } 508 509 510 /* Add *VAR_P to the appropriate operand array for statement STMT. 511 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, 512 it will be added to the statement's real operands, otherwise it is 513 added to virtual operands. */ 514 515 static void 516 add_stmt_operand (tree *var_p, gimple stmt, int flags) 517 { 518 tree var = *var_p; 519 520 gcc_assert (SSA_VAR_P (*var_p)); 521 522 if (is_gimple_reg (var)) 523 { 524 /* The variable is a GIMPLE register. Add it to real operands. */ 525 if (flags & opf_def) 526 ; 527 else 528 append_use (var_p); 529 if (DECL_P (*var_p)) 530 cfun->gimple_df->ssa_renaming_needed = 1; 531 } 532 else 533 { 534 /* Mark statements with volatile operands. */ 535 if (!(flags & opf_no_vops) 536 && TREE_THIS_VOLATILE (var)) 537 gimple_set_has_volatile_ops (stmt, true); 538 539 /* The variable is a memory access. Add virtual operands. */ 540 add_virtual_operand (stmt, flags); 541 } 542 } 543 544 /* Mark the base address of REF as having its address taken. 545 REF may be a single variable whose address has been taken or any 546 other valid GIMPLE memory reference (structure reference, array, 547 etc). */ 548 549 static void 550 mark_address_taken (tree ref) 551 { 552 tree var; 553 554 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF 555 as the only thing we take the address of. If VAR is a structure, 556 taking the address of a field means that the whole structure may 557 be referenced using pointer arithmetic. See PR 21407 and the 558 ensuing mailing list discussion. */ 559 var = get_base_address (ref); 560 if (var) 561 { 562 if (DECL_P (var)) 563 TREE_ADDRESSABLE (var) = 1; 564 else if (TREE_CODE (var) == MEM_REF 565 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR 566 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0))) 567 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1; 568 } 569 } 570 571 572 /* A subroutine of get_expr_operands to handle MEM_REF. 573 574 STMT is the statement being processed, EXPR is the MEM_REF 575 that got us here. 576 577 FLAGS is as in get_expr_operands. */ 578 579 static void 580 get_indirect_ref_operands (gimple stmt, tree expr, int flags) 581 { 582 tree *pptr = &TREE_OPERAND (expr, 0); 583 584 if (!(flags & opf_no_vops) 585 && TREE_THIS_VOLATILE (expr)) 586 gimple_set_has_volatile_ops (stmt, true); 587 588 /* Add the VOP. */ 589 add_virtual_operand (stmt, flags); 590 591 /* If requested, add a USE operand for the base pointer. */ 592 get_expr_operands (stmt, pptr, 593 opf_non_addressable | opf_use 594 | (flags & (opf_no_vops|opf_not_non_addressable))); 595 } 596 597 598 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ 599 600 static void 601 get_tmr_operands (gimple stmt, tree expr, int flags) 602 { 603 if (!(flags & opf_no_vops) 604 && TREE_THIS_VOLATILE (expr)) 605 gimple_set_has_volatile_ops (stmt, true); 606 607 /* First record the real operands. */ 608 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); 609 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); 610 get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); 611 612 add_virtual_operand (stmt, flags); 613 } 614 615 616 /* If STMT is a call that may clobber globals and other symbols that 617 escape, add them to the VDEF/VUSE lists for it. */ 618 619 static void 620 maybe_add_call_vops (gimple stmt) 621 { 622 int call_flags = gimple_call_flags (stmt); 623 624 /* If aliases have been computed already, add VDEF or VUSE 625 operands for all the symbols that have been found to be 626 call-clobbered. */ 627 if (!(call_flags & ECF_NOVOPS)) 628 { 629 /* A 'pure' or a 'const' function never call-clobbers anything. */ 630 if (!(call_flags & (ECF_PURE | ECF_CONST))) 631 add_virtual_operand (stmt, opf_def); 632 else if (!(call_flags & ECF_CONST)) 633 add_virtual_operand (stmt, opf_use); 634 } 635 } 636 637 638 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ 639 640 static void 641 get_asm_expr_operands (gimple stmt) 642 { 643 size_t i, noutputs; 644 const char **oconstraints; 645 const char *constraint; 646 bool allows_mem, allows_reg, is_inout; 647 648 noutputs = gimple_asm_noutputs (stmt); 649 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 650 651 /* Gather all output operands. */ 652 for (i = 0; i < gimple_asm_noutputs (stmt); i++) 653 { 654 tree link = gimple_asm_output_op (stmt, i); 655 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 656 oconstraints[i] = constraint; 657 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, 658 &allows_reg, &is_inout); 659 660 /* This should have been split in gimplify_asm_expr. */ 661 gcc_assert (!allows_reg || !is_inout); 662 663 /* Memory operands are addressable. Note that STMT needs the 664 address of this operand. */ 665 if (!allows_reg && allows_mem) 666 mark_address_taken (TREE_VALUE (link)); 667 668 get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable); 669 } 670 671 /* Gather all input operands. */ 672 for (i = 0; i < gimple_asm_ninputs (stmt); i++) 673 { 674 tree link = gimple_asm_input_op (stmt, i); 675 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 676 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, 677 &allows_mem, &allows_reg); 678 679 /* Memory operands are addressable. Note that STMT needs the 680 address of this operand. */ 681 if (!allows_reg && allows_mem) 682 mark_address_taken (TREE_VALUE (link)); 683 684 get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable); 685 } 686 687 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ 688 if (gimple_asm_clobbers_memory_p (stmt)) 689 add_virtual_operand (stmt, opf_def); 690 } 691 692 693 /* Recursively scan the expression pointed to by EXPR_P in statement 694 STMT. FLAGS is one of the OPF_* constants modifying how to 695 interpret the operands found. */ 696 697 static void 698 get_expr_operands (gimple stmt, tree *expr_p, int flags) 699 { 700 enum tree_code code; 701 enum tree_code_class codeclass; 702 tree expr = *expr_p; 703 int uflags = opf_use; 704 705 if (expr == NULL) 706 return; 707 708 if (is_gimple_debug (stmt)) 709 uflags |= (flags & opf_no_vops); 710 711 code = TREE_CODE (expr); 712 codeclass = TREE_CODE_CLASS (code); 713 714 switch (code) 715 { 716 case ADDR_EXPR: 717 /* Taking the address of a variable does not represent a 718 reference to it, but the fact that the statement takes its 719 address will be of interest to some passes (e.g. alias 720 resolution). */ 721 if ((!(flags & opf_non_addressable) 722 || (flags & opf_not_non_addressable)) 723 && !is_gimple_debug (stmt)) 724 mark_address_taken (TREE_OPERAND (expr, 0)); 725 726 /* If the address is invariant, there may be no interesting 727 variable references inside. */ 728 if (is_gimple_min_invariant (expr)) 729 return; 730 731 /* Otherwise, there may be variables referenced inside but there 732 should be no VUSEs created, since the referenced objects are 733 not really accessed. The only operands that we should find 734 here are ARRAY_REF indices which will always be real operands 735 (GIMPLE does not allow non-registers as array indices). */ 736 flags |= opf_no_vops; 737 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), 738 flags | opf_not_non_addressable); 739 return; 740 741 case SSA_NAME: 742 case VAR_DECL: 743 case PARM_DECL: 744 case RESULT_DECL: 745 add_stmt_operand (expr_p, stmt, flags); 746 return; 747 748 case DEBUG_EXPR_DECL: 749 gcc_assert (gimple_debug_bind_p (stmt)); 750 return; 751 752 case MEM_REF: 753 get_indirect_ref_operands (stmt, expr, flags); 754 return; 755 756 case TARGET_MEM_REF: 757 get_tmr_operands (stmt, expr, flags); 758 return; 759 760 case ARRAY_REF: 761 case ARRAY_RANGE_REF: 762 case COMPONENT_REF: 763 case REALPART_EXPR: 764 case IMAGPART_EXPR: 765 { 766 if (!(flags & opf_no_vops) 767 && TREE_THIS_VOLATILE (expr)) 768 gimple_set_has_volatile_ops (stmt, true); 769 770 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); 771 772 if (code == COMPONENT_REF) 773 { 774 if (!(flags & opf_no_vops) 775 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) 776 gimple_set_has_volatile_ops (stmt, true); 777 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); 778 } 779 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) 780 { 781 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); 782 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); 783 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); 784 } 785 786 return; 787 } 788 789 case WITH_SIZE_EXPR: 790 /* WITH_SIZE_EXPR is a pass-through reference to its first argument, 791 and an rvalue reference to its second argument. */ 792 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); 793 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); 794 return; 795 796 case COND_EXPR: 797 case VEC_COND_EXPR: 798 case VEC_PERM_EXPR: 799 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); 800 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); 801 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); 802 return; 803 804 case CONSTRUCTOR: 805 { 806 /* General aggregate CONSTRUCTORs have been decomposed, but they 807 are still in use as the COMPLEX_EXPR equivalent for vectors. */ 808 constructor_elt *ce; 809 unsigned HOST_WIDE_INT idx; 810 811 /* A volatile constructor is actually TREE_CLOBBER_P, transfer 812 the volatility to the statement, don't use TREE_CLOBBER_P for 813 mirroring the other uses of THIS_VOLATILE in this file. */ 814 if (!(flags & opf_no_vops) 815 && TREE_THIS_VOLATILE (expr)) 816 gimple_set_has_volatile_ops (stmt, true); 817 818 for (idx = 0; 819 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce); 820 idx++) 821 get_expr_operands (stmt, &ce->value, uflags); 822 823 return; 824 } 825 826 case BIT_FIELD_REF: 827 if (!(flags & opf_no_vops) 828 && TREE_THIS_VOLATILE (expr)) 829 gimple_set_has_volatile_ops (stmt, true); 830 /* FALLTHRU */ 831 832 case VIEW_CONVERT_EXPR: 833 do_unary: 834 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); 835 return; 836 837 case COMPOUND_EXPR: 838 case OBJ_TYPE_REF: 839 case ASSERT_EXPR: 840 do_binary: 841 { 842 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); 843 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); 844 return; 845 } 846 847 case DOT_PROD_EXPR: 848 case REALIGN_LOAD_EXPR: 849 case WIDEN_MULT_PLUS_EXPR: 850 case WIDEN_MULT_MINUS_EXPR: 851 case FMA_EXPR: 852 { 853 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); 854 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); 855 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); 856 return; 857 } 858 859 case FUNCTION_DECL: 860 case LABEL_DECL: 861 case CONST_DECL: 862 case CASE_LABEL_EXPR: 863 /* Expressions that make no memory references. */ 864 return; 865 866 default: 867 if (codeclass == tcc_unary) 868 goto do_unary; 869 if (codeclass == tcc_binary || codeclass == tcc_comparison) 870 goto do_binary; 871 if (codeclass == tcc_constant || codeclass == tcc_type) 872 return; 873 } 874 875 /* If we get here, something has gone wrong. */ 876 #ifdef ENABLE_CHECKING 877 fprintf (stderr, "unhandled expression in get_expr_operands():\n"); 878 debug_tree (expr); 879 fputs ("\n", stderr); 880 #endif 881 gcc_unreachable (); 882 } 883 884 885 /* Parse STMT looking for operands. When finished, the various 886 build_* operand vectors will have potential operands in them. */ 887 888 static void 889 parse_ssa_operands (gimple stmt) 890 { 891 enum gimple_code code = gimple_code (stmt); 892 size_t i, n, start = 0; 893 894 switch (code) 895 { 896 case GIMPLE_ASM: 897 get_asm_expr_operands (stmt); 898 break; 899 900 case GIMPLE_TRANSACTION: 901 /* The start of a transaction is a memory barrier. */ 902 add_virtual_operand (stmt, opf_def | opf_use); 903 break; 904 905 case GIMPLE_DEBUG: 906 if (gimple_debug_bind_p (stmt) 907 && gimple_debug_bind_has_value_p (stmt)) 908 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt), 909 opf_use | opf_no_vops); 910 break; 911 912 case GIMPLE_RETURN: 913 append_vuse (gimple_vop (cfun)); 914 goto do_default; 915 916 case GIMPLE_CALL: 917 /* Add call-clobbered operands, if needed. */ 918 maybe_add_call_vops (stmt); 919 /* FALLTHRU */ 920 921 case GIMPLE_ASSIGN: 922 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); 923 start = 1; 924 /* FALLTHRU */ 925 926 default: 927 do_default: 928 n = gimple_num_ops (stmt); 929 for (i = start; i < n; i++) 930 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); 931 break; 932 } 933 } 934 935 936 /* Create an operands cache for STMT. */ 937 938 static void 939 build_ssa_operands (gimple stmt) 940 { 941 /* Initially assume that the statement has no volatile operands. */ 942 gimple_set_has_volatile_ops (stmt, false); 943 944 start_ssa_stmt_operands (); 945 parse_ssa_operands (stmt); 946 finalize_ssa_stmt_operands (stmt); 947 } 948 949 /* Verifies SSA statement operands. */ 950 951 DEBUG_FUNCTION bool 952 verify_ssa_operands (gimple stmt) 953 { 954 use_operand_p use_p; 955 def_operand_p def_p; 956 ssa_op_iter iter; 957 unsigned i; 958 tree use, def; 959 bool volatile_p = gimple_has_volatile_ops (stmt); 960 961 /* build_ssa_operands w/o finalizing them. */ 962 gimple_set_has_volatile_ops (stmt, false); 963 start_ssa_stmt_operands (); 964 parse_ssa_operands (stmt); 965 966 /* Now verify the built operands are the same as present in STMT. */ 967 def = gimple_vdef (stmt); 968 if (def 969 && TREE_CODE (def) == SSA_NAME) 970 def = SSA_NAME_VAR (def); 971 if (build_vdef != def) 972 { 973 error ("virtual definition of statement not up-to-date"); 974 return true; 975 } 976 if (gimple_vdef (stmt) 977 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P 978 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt))) 979 { 980 error ("virtual def operand missing for stmt"); 981 return true; 982 } 983 984 use = gimple_vuse (stmt); 985 if (use 986 && TREE_CODE (use) == SSA_NAME) 987 use = SSA_NAME_VAR (use); 988 if (build_vuse != use) 989 { 990 error ("virtual use of statement not up-to-date"); 991 return true; 992 } 993 if (gimple_vuse (stmt) 994 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P 995 || USE_FROM_PTR (use_p) != gimple_vuse (stmt))) 996 { 997 error ("virtual use operand missing for stmt"); 998 return true; 999 } 1000 1001 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) 1002 { 1003 FOR_EACH_VEC_ELT (build_uses, i, use) 1004 { 1005 if (use_p->use == (tree *)use) 1006 { 1007 build_uses[i] = NULL_TREE; 1008 break; 1009 } 1010 } 1011 if (i == build_uses.length ()) 1012 { 1013 error ("excess use operand for stmt"); 1014 debug_generic_expr (USE_FROM_PTR (use_p)); 1015 return true; 1016 } 1017 } 1018 FOR_EACH_VEC_ELT (build_uses, i, use) 1019 if (use != NULL_TREE) 1020 { 1021 error ("use operand missing for stmt"); 1022 debug_generic_expr (*(tree *)use); 1023 return true; 1024 } 1025 1026 if (gimple_has_volatile_ops (stmt) != volatile_p) 1027 { 1028 error ("stmt volatile flag not up-to-date"); 1029 return true; 1030 } 1031 1032 cleanup_build_arrays (); 1033 return false; 1034 } 1035 1036 1037 /* Releases the operands of STMT back to their freelists, and clears 1038 the stmt operand lists. */ 1039 1040 void 1041 free_stmt_operands (gimple stmt) 1042 { 1043 use_optype_p uses = gimple_use_ops (stmt), last_use; 1044 1045 if (uses) 1046 { 1047 for (last_use = uses; last_use->next; last_use = last_use->next) 1048 delink_imm_use (USE_OP_PTR (last_use)); 1049 delink_imm_use (USE_OP_PTR (last_use)); 1050 last_use->next = gimple_ssa_operands (cfun)->free_uses; 1051 gimple_ssa_operands (cfun)->free_uses = uses; 1052 gimple_set_use_ops (stmt, NULL); 1053 } 1054 1055 if (gimple_has_mem_ops (stmt)) 1056 { 1057 gimple_set_vuse (stmt, NULL_TREE); 1058 gimple_set_vdef (stmt, NULL_TREE); 1059 } 1060 } 1061 1062 1063 /* Get the operands of statement STMT. */ 1064 1065 void 1066 update_stmt_operands (gimple stmt) 1067 { 1068 /* If update_stmt_operands is called before SSA is initialized, do 1069 nothing. */ 1070 if (!ssa_operands_active (cfun)) 1071 return; 1072 1073 timevar_push (TV_TREE_OPS); 1074 1075 /* If the stmt is a noreturn call queue it to be processed by 1076 split_bbs_on_noreturn_calls during cfg cleanup. */ 1077 if (is_gimple_call (stmt) 1078 && gimple_call_noreturn_p (stmt)) 1079 vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt); 1080 1081 gcc_assert (gimple_modified_p (stmt)); 1082 build_ssa_operands (stmt); 1083 gimple_set_modified (stmt, false); 1084 1085 timevar_pop (TV_TREE_OPS); 1086 } 1087 1088 1089 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done 1090 to test the validity of the swap operation. */ 1091 1092 void 1093 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) 1094 { 1095 tree op0, op1; 1096 op0 = *exp0; 1097 op1 = *exp1; 1098 1099 /* If the operand cache is active, attempt to preserve the relative 1100 positions of these two operands in their respective immediate use 1101 lists by adjusting their use pointer to point to the new 1102 operand position. */ 1103 if (ssa_operands_active (cfun) && op0 != op1) 1104 { 1105 use_optype_p use0, use1, ptr; 1106 use0 = use1 = NULL; 1107 1108 /* Find the 2 operands in the cache, if they are there. */ 1109 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) 1110 if (USE_OP_PTR (ptr)->use == exp0) 1111 { 1112 use0 = ptr; 1113 break; 1114 } 1115 1116 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) 1117 if (USE_OP_PTR (ptr)->use == exp1) 1118 { 1119 use1 = ptr; 1120 break; 1121 } 1122 1123 /* And adjust their location to point to the new position of the 1124 operand. */ 1125 if (use0) 1126 USE_OP_PTR (use0)->use = exp1; 1127 if (use1) 1128 USE_OP_PTR (use1)->use = exp0; 1129 } 1130 1131 /* Now swap the data. */ 1132 *exp0 = op1; 1133 *exp1 = op0; 1134 } 1135 1136 1137 /* Scan the immediate_use list for VAR making sure its linked properly. 1138 Return TRUE if there is a problem and emit an error message to F. */ 1139 1140 DEBUG_FUNCTION bool 1141 verify_imm_links (FILE *f, tree var) 1142 { 1143 use_operand_p ptr, prev, list; 1144 int count; 1145 1146 gcc_assert (TREE_CODE (var) == SSA_NAME); 1147 1148 list = &(SSA_NAME_IMM_USE_NODE (var)); 1149 gcc_assert (list->use == NULL); 1150 1151 if (list->prev == NULL) 1152 { 1153 gcc_assert (list->next == NULL); 1154 return false; 1155 } 1156 1157 prev = list; 1158 count = 0; 1159 for (ptr = list->next; ptr != list; ) 1160 { 1161 if (prev != ptr->prev) 1162 goto error; 1163 1164 if (ptr->use == NULL) 1165 goto error; /* 2 roots, or SAFE guard node. */ 1166 else if (*(ptr->use) != var) 1167 goto error; 1168 1169 prev = ptr; 1170 ptr = ptr->next; 1171 1172 /* Avoid infinite loops. 50,000,000 uses probably indicates a 1173 problem. */ 1174 if (count++ > 50000000) 1175 goto error; 1176 } 1177 1178 /* Verify list in the other direction. */ 1179 prev = list; 1180 for (ptr = list->prev; ptr != list; ) 1181 { 1182 if (prev != ptr->next) 1183 goto error; 1184 prev = ptr; 1185 ptr = ptr->prev; 1186 if (count-- < 0) 1187 goto error; 1188 } 1189 1190 if (count != 0) 1191 goto error; 1192 1193 return false; 1194 1195 error: 1196 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) 1197 { 1198 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); 1199 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); 1200 } 1201 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, 1202 (void *)ptr->use); 1203 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); 1204 fprintf(f, "\n"); 1205 return true; 1206 } 1207 1208 1209 /* Dump all the immediate uses to FILE. */ 1210 1211 void 1212 dump_immediate_uses_for (FILE *file, tree var) 1213 { 1214 imm_use_iterator iter; 1215 use_operand_p use_p; 1216 1217 gcc_assert (var && TREE_CODE (var) == SSA_NAME); 1218 1219 print_generic_expr (file, var, TDF_SLIM); 1220 fprintf (file, " : -->"); 1221 if (has_zero_uses (var)) 1222 fprintf (file, " no uses.\n"); 1223 else 1224 if (has_single_use (var)) 1225 fprintf (file, " single use.\n"); 1226 else 1227 fprintf (file, "%d uses.\n", num_imm_uses (var)); 1228 1229 FOR_EACH_IMM_USE_FAST (use_p, iter, var) 1230 { 1231 if (use_p->loc.stmt == NULL && use_p->use == NULL) 1232 fprintf (file, "***end of stmt iterator marker***\n"); 1233 else 1234 if (!is_gimple_reg (USE_FROM_PTR (use_p))) 1235 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); 1236 else 1237 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); 1238 } 1239 fprintf(file, "\n"); 1240 } 1241 1242 1243 /* Dump all the immediate uses to FILE. */ 1244 1245 void 1246 dump_immediate_uses (FILE *file) 1247 { 1248 tree var; 1249 unsigned int x; 1250 1251 fprintf (file, "Immediate_uses: \n\n"); 1252 for (x = 1; x < num_ssa_names; x++) 1253 { 1254 var = ssa_name(x); 1255 if (!var) 1256 continue; 1257 dump_immediate_uses_for (file, var); 1258 } 1259 } 1260 1261 1262 /* Dump def-use edges on stderr. */ 1263 1264 DEBUG_FUNCTION void 1265 debug_immediate_uses (void) 1266 { 1267 dump_immediate_uses (stderr); 1268 } 1269 1270 1271 /* Dump def-use edges on stderr. */ 1272 1273 DEBUG_FUNCTION void 1274 debug_immediate_uses_for (tree var) 1275 { 1276 dump_immediate_uses_for (stderr, var); 1277 } 1278 1279 1280 /* Return true if OP, an SSA name or a DECL is a virtual operand. */ 1281 1282 bool 1283 virtual_operand_p (tree op) 1284 { 1285 if (TREE_CODE (op) == SSA_NAME) 1286 { 1287 op = SSA_NAME_VAR (op); 1288 if (!op) 1289 return false; 1290 } 1291 1292 if (TREE_CODE (op) == VAR_DECL) 1293 return VAR_DECL_IS_VIRTUAL_OPERAND (op); 1294 1295 return false; 1296 } 1297 1298 /* Unlink STMTs virtual definition from the IL by propagating its use. */ 1299 1300 void 1301 unlink_stmt_vdef (gimple stmt) 1302 { 1303 use_operand_p use_p; 1304 imm_use_iterator iter; 1305 gimple use_stmt; 1306 tree vdef = gimple_vdef (stmt); 1307 tree vuse = gimple_vuse (stmt); 1308 1309 if (!vdef 1310 || TREE_CODE (vdef) != SSA_NAME) 1311 return; 1312 1313 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) 1314 { 1315 FOR_EACH_IMM_USE_ON_STMT (use_p, iter) 1316 SET_USE (use_p, vuse); 1317 } 1318 1319 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) 1320 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; 1321 } 1322 1323