1 /* Data flow functions for trees. 2 Copyright (C) 2001-2017 Free Software Foundation, Inc. 3 Contributed by Diego Novillo <dnovillo@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "rtl.h" 26 #include "tree.h" 27 #include "gimple.h" 28 #include "tree-pass.h" 29 #include "ssa.h" 30 #include "tree-pretty-print.h" 31 #include "fold-const.h" 32 #include "stor-layout.h" 33 #include "langhooks.h" 34 #include "gimple-iterator.h" 35 #include "gimple-walk.h" 36 #include "tree-dfa.h" 37 38 /* Build and maintain data flow information for trees. */ 39 40 /* Counters used to display DFA and SSA statistics. */ 41 struct dfa_stats_d 42 { 43 long num_defs; 44 long num_uses; 45 long num_phis; 46 long num_phi_args; 47 size_t max_num_phi_args; 48 long num_vdefs; 49 long num_vuses; 50 }; 51 52 53 /* Local functions. */ 54 static void collect_dfa_stats (struct dfa_stats_d *); 55 56 57 /*--------------------------------------------------------------------------- 58 Dataflow analysis (DFA) routines 59 ---------------------------------------------------------------------------*/ 60 61 /* Renumber all of the gimple stmt uids. */ 62 63 void 64 renumber_gimple_stmt_uids (void) 65 { 66 basic_block bb; 67 68 set_gimple_stmt_max_uid (cfun, 0); 69 FOR_ALL_BB_FN (bb, cfun) 70 { 71 gimple_stmt_iterator bsi; 72 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 73 { 74 gimple *stmt = gsi_stmt (bsi); 75 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 76 } 77 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 78 { 79 gimple *stmt = gsi_stmt (bsi); 80 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 81 } 82 } 83 } 84 85 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks 86 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */ 87 88 void 89 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks) 90 { 91 int i; 92 93 set_gimple_stmt_max_uid (cfun, 0); 94 for (i = 0; i < n_blocks; i++) 95 { 96 basic_block bb = blocks[i]; 97 gimple_stmt_iterator bsi; 98 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 99 { 100 gimple *stmt = gsi_stmt (bsi); 101 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 102 } 103 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 104 { 105 gimple *stmt = gsi_stmt (bsi); 106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 107 } 108 } 109 } 110 111 112 113 /*--------------------------------------------------------------------------- 114 Debugging functions 115 ---------------------------------------------------------------------------*/ 116 117 /* Dump variable VAR and its may-aliases to FILE. */ 118 119 void 120 dump_variable (FILE *file, tree var) 121 { 122 if (TREE_CODE (var) == SSA_NAME) 123 { 124 if (POINTER_TYPE_P (TREE_TYPE (var))) 125 dump_points_to_info_for (file, var); 126 var = SSA_NAME_VAR (var); 127 } 128 129 if (var == NULL_TREE) 130 { 131 fprintf (file, "<nil>"); 132 return; 133 } 134 135 print_generic_expr (file, var, dump_flags); 136 137 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var)); 138 if (DECL_PT_UID (var) != DECL_UID (var)) 139 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var)); 140 141 fprintf (file, ", "); 142 print_generic_expr (file, TREE_TYPE (var), dump_flags); 143 144 if (TREE_ADDRESSABLE (var)) 145 fprintf (file, ", is addressable"); 146 147 if (is_global_var (var)) 148 fprintf (file, ", is global"); 149 150 if (TREE_THIS_VOLATILE (var)) 151 fprintf (file, ", is volatile"); 152 153 if (cfun && ssa_default_def (cfun, var)) 154 { 155 fprintf (file, ", default def: "); 156 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags); 157 } 158 159 if (DECL_INITIAL (var)) 160 { 161 fprintf (file, ", initial: "); 162 print_generic_expr (file, DECL_INITIAL (var), dump_flags); 163 } 164 165 fprintf (file, "\n"); 166 } 167 168 169 /* Dump variable VAR and its may-aliases to stderr. */ 170 171 DEBUG_FUNCTION void 172 debug_variable (tree var) 173 { 174 dump_variable (stderr, var); 175 } 176 177 178 /* Dump various DFA statistics to FILE. */ 179 180 void 181 dump_dfa_stats (FILE *file) 182 { 183 struct dfa_stats_d dfa_stats; 184 185 unsigned long size, total = 0; 186 const char * const fmt_str = "%-30s%-13s%12s\n"; 187 const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n"; 188 const char * const fmt_str_3 = "%-43s%11lu%c\n"; 189 const char *funcname 190 = lang_hooks.decl_printable_name (current_function_decl, 2); 191 192 collect_dfa_stats (&dfa_stats); 193 194 fprintf (file, "\nDFA Statistics for %s\n\n", funcname); 195 196 fprintf (file, "---------------------------------------------------------\n"); 197 fprintf (file, fmt_str, "", " Number of ", "Memory"); 198 fprintf (file, fmt_str, "", " instances ", "used "); 199 fprintf (file, "---------------------------------------------------------\n"); 200 201 size = dfa_stats.num_uses * sizeof (tree *); 202 total += size; 203 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses, 204 SCALE (size), LABEL (size)); 205 206 size = dfa_stats.num_defs * sizeof (tree *); 207 total += size; 208 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs, 209 SCALE (size), LABEL (size)); 210 211 size = dfa_stats.num_vuses * sizeof (tree *); 212 total += size; 213 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses, 214 SCALE (size), LABEL (size)); 215 216 size = dfa_stats.num_vdefs * sizeof (tree *); 217 total += size; 218 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs, 219 SCALE (size), LABEL (size)); 220 221 size = dfa_stats.num_phis * sizeof (struct gphi); 222 total += size; 223 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis, 224 SCALE (size), LABEL (size)); 225 226 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d); 227 total += size; 228 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args, 229 SCALE (size), LABEL (size)); 230 231 fprintf (file, "---------------------------------------------------------\n"); 232 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total), 233 LABEL (total)); 234 fprintf (file, "---------------------------------------------------------\n"); 235 fprintf (file, "\n"); 236 237 if (dfa_stats.num_phis) 238 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n", 239 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis, 240 (long) dfa_stats.max_num_phi_args); 241 242 fprintf (file, "\n"); 243 } 244 245 246 /* Dump DFA statistics on stderr. */ 247 248 DEBUG_FUNCTION void 249 debug_dfa_stats (void) 250 { 251 dump_dfa_stats (stderr); 252 } 253 254 255 /* Collect DFA statistics and store them in the structure pointed to by 256 DFA_STATS_P. */ 257 258 static void 259 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED) 260 { 261 basic_block bb; 262 263 gcc_assert (dfa_stats_p); 264 265 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d)); 266 267 /* Walk all the statements in the function counting references. */ 268 FOR_EACH_BB_FN (bb, cfun) 269 { 270 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si); 271 gsi_next (&si)) 272 { 273 gphi *phi = si.phi (); 274 dfa_stats_p->num_phis++; 275 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi); 276 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args) 277 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi); 278 } 279 280 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si); 281 gsi_next (&si)) 282 { 283 gimple *stmt = gsi_stmt (si); 284 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF); 285 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE); 286 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0; 287 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0; 288 } 289 } 290 } 291 292 293 /*--------------------------------------------------------------------------- 294 Miscellaneous helpers 295 ---------------------------------------------------------------------------*/ 296 297 /* Lookup VAR UID in the default_defs hashtable and return the associated 298 variable. */ 299 300 tree 301 ssa_default_def (struct function *fn, tree var) 302 { 303 struct tree_decl_minimal ind; 304 struct tree_ssa_name in; 305 gcc_assert (VAR_P (var) 306 || TREE_CODE (var) == PARM_DECL 307 || TREE_CODE (var) == RESULT_DECL); 308 309 /* Always NULL_TREE for rtl function dumps. */ 310 if (!fn->gimple_df) 311 return NULL_TREE; 312 313 in.var = (tree)&ind; 314 ind.uid = DECL_UID (var); 315 return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var)); 316 } 317 318 /* Insert the pair VAR's UID, DEF into the default_defs hashtable 319 of function FN. */ 320 321 void 322 set_ssa_default_def (struct function *fn, tree var, tree def) 323 { 324 struct tree_decl_minimal ind; 325 struct tree_ssa_name in; 326 327 gcc_assert (VAR_P (var) 328 || TREE_CODE (var) == PARM_DECL 329 || TREE_CODE (var) == RESULT_DECL); 330 in.var = (tree)&ind; 331 ind.uid = DECL_UID (var); 332 if (!def) 333 { 334 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in, 335 DECL_UID (var), 336 NO_INSERT); 337 if (loc) 338 { 339 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false; 340 DEFAULT_DEFS (fn)->clear_slot (loc); 341 } 342 return; 343 } 344 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var); 345 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in, 346 DECL_UID (var), INSERT); 347 348 /* Default definition might be changed by tail call optimization. */ 349 if (*loc) 350 SSA_NAME_IS_DEFAULT_DEF (*loc) = false; 351 352 /* Mark DEF as the default definition for VAR. */ 353 *loc = def; 354 SSA_NAME_IS_DEFAULT_DEF (def) = true; 355 } 356 357 /* Retrieve or create a default definition for VAR. */ 358 359 tree 360 get_or_create_ssa_default_def (struct function *fn, tree var) 361 { 362 tree ddef = ssa_default_def (fn, var); 363 if (ddef == NULL_TREE) 364 { 365 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ()); 366 set_ssa_default_def (fn, var, ddef); 367 } 368 return ddef; 369 } 370 371 372 /* If EXP is a handled component reference for a structure, return the 373 base variable. The access range is delimited by bit positions *POFFSET and 374 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either 375 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE 376 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is 377 true, the storage order of the reference is reversed. */ 378 379 tree 380 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, 381 HOST_WIDE_INT *psize, 382 HOST_WIDE_INT *pmax_size, 383 bool *preverse) 384 { 385 offset_int bitsize = -1; 386 offset_int maxsize; 387 tree size_tree = NULL_TREE; 388 offset_int bit_offset = 0; 389 bool seen_variable_array_ref = false; 390 391 /* First get the final access size and the storage order from just the 392 outermost expression. */ 393 if (TREE_CODE (exp) == COMPONENT_REF) 394 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); 395 else if (TREE_CODE (exp) == BIT_FIELD_REF) 396 size_tree = TREE_OPERAND (exp, 1); 397 else if (!VOID_TYPE_P (TREE_TYPE (exp))) 398 { 399 machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); 400 if (mode == BLKmode) 401 size_tree = TYPE_SIZE (TREE_TYPE (exp)); 402 else 403 bitsize = int (GET_MODE_BITSIZE (mode)); 404 } 405 if (size_tree != NULL_TREE 406 && TREE_CODE (size_tree) == INTEGER_CST) 407 bitsize = wi::to_offset (size_tree); 408 409 *preverse = reverse_storage_order_for_component_p (exp); 410 411 /* Initially, maxsize is the same as the accessed element size. 412 In the following it will only grow (or become -1). */ 413 maxsize = bitsize; 414 415 /* Compute cumulative bit-offset for nested component-refs and array-refs, 416 and find the ultimate containing object. */ 417 while (1) 418 { 419 switch (TREE_CODE (exp)) 420 { 421 case BIT_FIELD_REF: 422 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2)); 423 break; 424 425 case COMPONENT_REF: 426 { 427 tree field = TREE_OPERAND (exp, 1); 428 tree this_offset = component_ref_field_offset (exp); 429 430 if (this_offset && TREE_CODE (this_offset) == INTEGER_CST) 431 { 432 offset_int woffset = (wi::to_offset (this_offset) 433 << LOG2_BITS_PER_UNIT); 434 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)); 435 bit_offset += woffset; 436 437 /* If we had seen a variable array ref already and we just 438 referenced the last field of a struct or a union member 439 then we have to adjust maxsize by the padding at the end 440 of our field. */ 441 if (seen_variable_array_ref) 442 { 443 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0)); 444 tree next = DECL_CHAIN (field); 445 while (next && TREE_CODE (next) != FIELD_DECL) 446 next = DECL_CHAIN (next); 447 if (!next 448 || TREE_CODE (stype) != RECORD_TYPE) 449 { 450 tree fsize = DECL_SIZE_UNIT (field); 451 tree ssize = TYPE_SIZE_UNIT (stype); 452 if (fsize == NULL 453 || TREE_CODE (fsize) != INTEGER_CST 454 || ssize == NULL 455 || TREE_CODE (ssize) != INTEGER_CST) 456 maxsize = -1; 457 else if (maxsize != -1) 458 { 459 offset_int tem = (wi::to_offset (ssize) 460 - wi::to_offset (fsize)); 461 tem <<= LOG2_BITS_PER_UNIT; 462 tem -= woffset; 463 maxsize += tem; 464 } 465 } 466 /* An component ref with an adjacent field up in the 467 structure hierarchy constrains the size of any variable 468 array ref lower in the access hierarchy. */ 469 else 470 seen_variable_array_ref = false; 471 } 472 } 473 else 474 { 475 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); 476 /* We need to adjust maxsize to the whole structure bitsize. 477 But we can subtract any constant offset seen so far, 478 because that would get us out of the structure otherwise. */ 479 if (maxsize != -1 480 && csize 481 && TREE_CODE (csize) == INTEGER_CST) 482 maxsize = wi::to_offset (csize) - bit_offset; 483 else 484 maxsize = -1; 485 } 486 } 487 break; 488 489 case ARRAY_REF: 490 case ARRAY_RANGE_REF: 491 { 492 tree index = TREE_OPERAND (exp, 1); 493 tree low_bound, unit_size; 494 495 /* If the resulting bit-offset is constant, track it. */ 496 if (TREE_CODE (index) == INTEGER_CST 497 && (low_bound = array_ref_low_bound (exp), 498 TREE_CODE (low_bound) == INTEGER_CST) 499 && (unit_size = array_ref_element_size (exp), 500 TREE_CODE (unit_size) == INTEGER_CST)) 501 { 502 offset_int woffset 503 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound), 504 TYPE_PRECISION (TREE_TYPE (index))); 505 woffset *= wi::to_offset (unit_size); 506 woffset <<= LOG2_BITS_PER_UNIT; 507 bit_offset += woffset; 508 509 /* An array ref with a constant index up in the structure 510 hierarchy will constrain the size of any variable array ref 511 lower in the access hierarchy. */ 512 seen_variable_array_ref = false; 513 } 514 else 515 { 516 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); 517 /* We need to adjust maxsize to the whole array bitsize. 518 But we can subtract any constant offset seen so far, 519 because that would get us outside of the array otherwise. */ 520 if (maxsize != -1 521 && asize 522 && TREE_CODE (asize) == INTEGER_CST) 523 maxsize = wi::to_offset (asize) - bit_offset; 524 else 525 maxsize = -1; 526 527 /* Remember that we have seen an array ref with a variable 528 index. */ 529 seen_variable_array_ref = true; 530 } 531 } 532 break; 533 534 case REALPART_EXPR: 535 break; 536 537 case IMAGPART_EXPR: 538 bit_offset += bitsize; 539 break; 540 541 case VIEW_CONVERT_EXPR: 542 break; 543 544 case TARGET_MEM_REF: 545 /* Via the variable index or index2 we can reach the 546 whole object. Still hand back the decl here. */ 547 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR 548 && (TMR_INDEX (exp) || TMR_INDEX2 (exp))) 549 { 550 exp = TREE_OPERAND (TMR_BASE (exp), 0); 551 bit_offset = 0; 552 maxsize = -1; 553 goto done; 554 } 555 /* Fallthru. */ 556 case MEM_REF: 557 /* We need to deal with variable arrays ending structures such as 558 struct { int length; int a[1]; } x; x.a[d] 559 struct { struct { int a; int b; } a[1]; } x; x.a[d].a 560 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0] 561 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d] 562 where we do not know maxsize for variable index accesses to 563 the array. The simplest way to conservatively deal with this 564 is to punt in the case that offset + maxsize reaches the 565 base type boundary. This needs to include possible trailing 566 padding that is there for alignment purposes. */ 567 if (seen_variable_array_ref 568 && maxsize != -1 569 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE 570 || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST 571 || (bit_offset + maxsize 572 == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp)))))) 573 maxsize = -1; 574 575 /* Hand back the decl for MEM[&decl, off]. */ 576 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) 577 { 578 if (integer_zerop (TREE_OPERAND (exp, 1))) 579 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 580 else 581 { 582 offset_int off = mem_ref_offset (exp); 583 off <<= LOG2_BITS_PER_UNIT; 584 off += bit_offset; 585 if (wi::fits_shwi_p (off)) 586 { 587 bit_offset = off; 588 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 589 } 590 } 591 } 592 goto done; 593 594 default: 595 goto done; 596 } 597 598 exp = TREE_OPERAND (exp, 0); 599 } 600 601 done: 602 if (!wi::fits_shwi_p (bitsize) || wi::neg_p (bitsize)) 603 { 604 *poffset = 0; 605 *psize = -1; 606 *pmax_size = -1; 607 608 return exp; 609 } 610 611 *psize = bitsize.to_shwi (); 612 613 if (!wi::fits_shwi_p (bit_offset)) 614 { 615 *poffset = 0; 616 *pmax_size = -1; 617 618 return exp; 619 } 620 621 /* In case of a decl or constant base object we can do better. */ 622 623 if (DECL_P (exp)) 624 { 625 if (VAR_P (exp) 626 && ((flag_unconstrained_commons && DECL_COMMON (exp)) 627 || (DECL_EXTERNAL (exp) && seen_variable_array_ref))) 628 { 629 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp)); 630 /* If size is unknown, or we have read to the end, assume there 631 may be more to the structure than we are told. */ 632 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE 633 || (seen_variable_array_ref 634 && (sz_tree == NULL_TREE 635 || TREE_CODE (sz_tree) != INTEGER_CST 636 || (bit_offset + maxsize == wi::to_offset (sz_tree))))) 637 maxsize = -1; 638 } 639 /* If maxsize is unknown adjust it according to the size of the 640 base decl. */ 641 else if (maxsize == -1 642 && DECL_SIZE (exp) 643 && TREE_CODE (DECL_SIZE (exp)) == INTEGER_CST) 644 maxsize = wi::to_offset (DECL_SIZE (exp)) - bit_offset; 645 } 646 else if (CONSTANT_CLASS_P (exp)) 647 { 648 /* If maxsize is unknown adjust it according to the size of the 649 base type constant. */ 650 if (maxsize == -1 651 && TYPE_SIZE (TREE_TYPE (exp)) 652 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST) 653 maxsize = (wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))) 654 - bit_offset); 655 } 656 657 /* ??? Due to negative offsets in ARRAY_REF we can end up with 658 negative bit_offset here. We might want to store a zero offset 659 in this case. */ 660 *poffset = bit_offset.to_shwi (); 661 if (!wi::fits_shwi_p (maxsize) || wi::neg_p (maxsize)) 662 *pmax_size = -1; 663 else 664 *pmax_size = maxsize.to_shwi (); 665 666 return exp; 667 } 668 669 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that 670 denotes the starting address of the memory access EXP. 671 Returns NULL_TREE if the offset is not constant or any component 672 is not BITS_PER_UNIT-aligned. 673 VALUEIZE if non-NULL is used to valueize SSA names. It should return 674 its argument or a constant if the argument is known to be constant. */ 675 676 tree 677 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset, 678 tree (*valueize) (tree)) 679 { 680 HOST_WIDE_INT byte_offset = 0; 681 682 /* Compute cumulative byte-offset for nested component-refs and array-refs, 683 and find the ultimate containing object. */ 684 while (1) 685 { 686 switch (TREE_CODE (exp)) 687 { 688 case BIT_FIELD_REF: 689 { 690 HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2)); 691 if (this_off % BITS_PER_UNIT) 692 return NULL_TREE; 693 byte_offset += this_off / BITS_PER_UNIT; 694 } 695 break; 696 697 case COMPONENT_REF: 698 { 699 tree field = TREE_OPERAND (exp, 1); 700 tree this_offset = component_ref_field_offset (exp); 701 HOST_WIDE_INT hthis_offset; 702 703 if (!this_offset 704 || TREE_CODE (this_offset) != INTEGER_CST 705 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)) 706 % BITS_PER_UNIT)) 707 return NULL_TREE; 708 709 hthis_offset = TREE_INT_CST_LOW (this_offset); 710 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)) 711 / BITS_PER_UNIT); 712 byte_offset += hthis_offset; 713 } 714 break; 715 716 case ARRAY_REF: 717 case ARRAY_RANGE_REF: 718 { 719 tree index = TREE_OPERAND (exp, 1); 720 tree low_bound, unit_size; 721 722 if (valueize 723 && TREE_CODE (index) == SSA_NAME) 724 index = (*valueize) (index); 725 726 /* If the resulting bit-offset is constant, track it. */ 727 if (TREE_CODE (index) == INTEGER_CST 728 && (low_bound = array_ref_low_bound (exp), 729 TREE_CODE (low_bound) == INTEGER_CST) 730 && (unit_size = array_ref_element_size (exp), 731 TREE_CODE (unit_size) == INTEGER_CST)) 732 { 733 offset_int woffset 734 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound), 735 TYPE_PRECISION (TREE_TYPE (index))); 736 woffset *= wi::to_offset (unit_size); 737 byte_offset += woffset.to_shwi (); 738 } 739 else 740 return NULL_TREE; 741 } 742 break; 743 744 case REALPART_EXPR: 745 break; 746 747 case IMAGPART_EXPR: 748 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp))); 749 break; 750 751 case VIEW_CONVERT_EXPR: 752 break; 753 754 case MEM_REF: 755 { 756 tree base = TREE_OPERAND (exp, 0); 757 if (valueize 758 && TREE_CODE (base) == SSA_NAME) 759 base = (*valueize) (base); 760 761 /* Hand back the decl for MEM[&decl, off]. */ 762 if (TREE_CODE (base) == ADDR_EXPR) 763 { 764 if (!integer_zerop (TREE_OPERAND (exp, 1))) 765 { 766 offset_int off = mem_ref_offset (exp); 767 byte_offset += off.to_short_addr (); 768 } 769 exp = TREE_OPERAND (base, 0); 770 } 771 goto done; 772 } 773 774 case TARGET_MEM_REF: 775 { 776 tree base = TREE_OPERAND (exp, 0); 777 if (valueize 778 && TREE_CODE (base) == SSA_NAME) 779 base = (*valueize) (base); 780 781 /* Hand back the decl for MEM[&decl, off]. */ 782 if (TREE_CODE (base) == ADDR_EXPR) 783 { 784 if (TMR_INDEX (exp) || TMR_INDEX2 (exp)) 785 return NULL_TREE; 786 if (!integer_zerop (TMR_OFFSET (exp))) 787 { 788 offset_int off = mem_ref_offset (exp); 789 byte_offset += off.to_short_addr (); 790 } 791 exp = TREE_OPERAND (base, 0); 792 } 793 goto done; 794 } 795 796 default: 797 goto done; 798 } 799 800 exp = TREE_OPERAND (exp, 0); 801 } 802 done: 803 804 *poffset = byte_offset; 805 return exp; 806 } 807 808 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that 809 denotes the starting address of the memory access EXP. 810 Returns NULL_TREE if the offset is not constant or any component 811 is not BITS_PER_UNIT-aligned. */ 812 813 tree 814 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset) 815 { 816 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL); 817 } 818 819 /* Returns true if STMT references an SSA_NAME that has 820 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */ 821 822 bool 823 stmt_references_abnormal_ssa_name (gimple *stmt) 824 { 825 ssa_op_iter oi; 826 use_operand_p use_p; 827 828 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE) 829 { 830 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p))) 831 return true; 832 } 833 834 return false; 835 } 836 837 /* If STMT takes any abnormal PHI values as input, replace them with 838 local copies. */ 839 840 void 841 replace_abnormal_ssa_names (gimple *stmt) 842 { 843 ssa_op_iter oi; 844 use_operand_p use_p; 845 846 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE) 847 { 848 tree op = USE_FROM_PTR (use_p); 849 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op)) 850 { 851 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 852 tree new_name = make_ssa_name (TREE_TYPE (op)); 853 gassign *assign = gimple_build_assign (new_name, op); 854 gsi_insert_before (&gsi, assign, GSI_SAME_STMT); 855 SET_USE (use_p, new_name); 856 } 857 } 858 } 859 860 /* Pair of tree and a sorting index, for dump_enumerated_decls. */ 861 struct GTY(()) numbered_tree 862 { 863 tree t; 864 int num; 865 }; 866 867 868 /* Compare two declarations references by their DECL_UID / sequence number. 869 Called via qsort. */ 870 871 static int 872 compare_decls_by_uid (const void *pa, const void *pb) 873 { 874 const numbered_tree *nt_a = ((const numbered_tree *)pa); 875 const numbered_tree *nt_b = ((const numbered_tree *)pb); 876 877 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t)) 878 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t); 879 return nt_a->num - nt_b->num; 880 } 881 882 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */ 883 static tree 884 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data) 885 { 886 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 887 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info; 888 numbered_tree nt; 889 890 if (!DECL_P (*tp)) 891 return NULL_TREE; 892 nt.t = *tp; 893 nt.num = list->length (); 894 list->safe_push (nt); 895 *walk_subtrees = 0; 896 return NULL_TREE; 897 } 898 899 /* Find all the declarations used by the current function, sort them by uid, 900 and emit the sorted list. Each declaration is tagged with a sequence 901 number indicating when it was found during statement / tree walking, 902 so that TDF_NOUID comparisons of anonymous declarations are still 903 meaningful. Where a declaration was encountered more than once, we 904 emit only the sequence number of the first encounter. 905 FILE is the dump file where to output the list and FLAGS is as in 906 print_generic_expr. */ 907 void 908 dump_enumerated_decls (FILE *file, int flags) 909 { 910 basic_block bb; 911 struct walk_stmt_info wi; 912 auto_vec<numbered_tree, 40> decl_list; 913 914 memset (&wi, '\0', sizeof (wi)); 915 wi.info = (void *) &decl_list; 916 FOR_EACH_BB_FN (bb, cfun) 917 { 918 gimple_stmt_iterator gsi; 919 920 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 921 if (!is_gimple_debug (gsi_stmt (gsi))) 922 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi); 923 } 924 decl_list.qsort (compare_decls_by_uid); 925 if (decl_list.length ()) 926 { 927 unsigned ix; 928 numbered_tree *ntp; 929 tree last = NULL_TREE; 930 931 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n", 932 current_function_name ()); 933 FOR_EACH_VEC_ELT (decl_list, ix, ntp) 934 { 935 if (ntp->t == last) 936 continue; 937 fprintf (file, "%d: ", ntp->num); 938 print_generic_decl (file, ntp->t, flags); 939 fprintf (file, "\n"); 940 last = ntp->t; 941 } 942 } 943 } 944