1 /* Data flow functions for trees. 2 Copyright (C) 2001-2020 Free Software Foundation, Inc. 3 Contributed by Diego Novillo <dnovillo@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "rtl.h" 26 #include "tree.h" 27 #include "gimple.h" 28 #include "tree-pass.h" 29 #include "ssa.h" 30 #include "tree-pretty-print.h" 31 #include "fold-const.h" 32 #include "stor-layout.h" 33 #include "langhooks.h" 34 #include "gimple-iterator.h" 35 #include "gimple-walk.h" 36 #include "tree-dfa.h" 37 38 /* Build and maintain data flow information for trees. */ 39 40 /* Counters used to display DFA and SSA statistics. */ 41 struct dfa_stats_d 42 { 43 long num_defs; 44 long num_uses; 45 long num_phis; 46 long num_phi_args; 47 size_t max_num_phi_args; 48 long num_vdefs; 49 long num_vuses; 50 }; 51 52 53 /* Local functions. */ 54 static void collect_dfa_stats (struct dfa_stats_d *); 55 56 57 /*--------------------------------------------------------------------------- 58 Dataflow analysis (DFA) routines 59 ---------------------------------------------------------------------------*/ 60 61 /* Renumber all of the gimple stmt uids. */ 62 63 void 64 renumber_gimple_stmt_uids (struct function *fun) 65 { 66 basic_block bb; 67 68 set_gimple_stmt_max_uid (fun, 0); 69 FOR_ALL_BB_FN (bb, fun) 70 { 71 gimple_stmt_iterator bsi; 72 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 73 { 74 gimple *stmt = gsi_stmt (bsi); 75 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun)); 76 } 77 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 78 { 79 gimple *stmt = gsi_stmt (bsi); 80 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun)); 81 } 82 } 83 } 84 85 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks 86 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */ 87 88 void 89 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks) 90 { 91 int i; 92 93 set_gimple_stmt_max_uid (cfun, 0); 94 for (i = 0; i < n_blocks; i++) 95 { 96 basic_block bb = blocks[i]; 97 gimple_stmt_iterator bsi; 98 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 99 { 100 gimple *stmt = gsi_stmt (bsi); 101 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 102 } 103 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) 104 { 105 gimple *stmt = gsi_stmt (bsi); 106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 107 } 108 } 109 } 110 111 112 113 /*--------------------------------------------------------------------------- 114 Debugging functions 115 ---------------------------------------------------------------------------*/ 116 117 /* Dump variable VAR and its may-aliases to FILE. */ 118 119 void 120 dump_variable (FILE *file, tree var) 121 { 122 if (TREE_CODE (var) == SSA_NAME) 123 { 124 if (POINTER_TYPE_P (TREE_TYPE (var))) 125 dump_points_to_info_for (file, var); 126 var = SSA_NAME_VAR (var); 127 } 128 129 if (var == NULL_TREE) 130 { 131 fprintf (file, "<nil>"); 132 return; 133 } 134 135 print_generic_expr (file, var, dump_flags); 136 137 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var)); 138 if (DECL_PT_UID (var) != DECL_UID (var)) 139 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var)); 140 141 fprintf (file, ", "); 142 print_generic_expr (file, TREE_TYPE (var), dump_flags); 143 144 if (TREE_ADDRESSABLE (var)) 145 fprintf (file, ", is addressable"); 146 147 if (is_global_var (var)) 148 fprintf (file, ", is global"); 149 150 if (TREE_THIS_VOLATILE (var)) 151 fprintf (file, ", is volatile"); 152 153 if (cfun && ssa_default_def (cfun, var)) 154 { 155 fprintf (file, ", default def: "); 156 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags); 157 } 158 159 if (DECL_INITIAL (var)) 160 { 161 fprintf (file, ", initial: "); 162 print_generic_expr (file, DECL_INITIAL (var), dump_flags); 163 } 164 165 fprintf (file, "\n"); 166 } 167 168 169 /* Dump variable VAR and its may-aliases to stderr. */ 170 171 DEBUG_FUNCTION void 172 debug_variable (tree var) 173 { 174 dump_variable (stderr, var); 175 } 176 177 178 /* Dump various DFA statistics to FILE. */ 179 180 void 181 dump_dfa_stats (FILE *file) 182 { 183 struct dfa_stats_d dfa_stats; 184 185 unsigned long size, total = 0; 186 const char * const fmt_str = "%-30s%-13s%12s\n"; 187 const char * const fmt_str_1 = "%-30s%13lu" PRsa (11) "\n"; 188 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n"; 189 const char *funcname 190 = lang_hooks.decl_printable_name (current_function_decl, 2); 191 192 collect_dfa_stats (&dfa_stats); 193 194 fprintf (file, "\nDFA Statistics for %s\n\n", funcname); 195 196 fprintf (file, "---------------------------------------------------------\n"); 197 fprintf (file, fmt_str, "", " Number of ", "Memory"); 198 fprintf (file, fmt_str, "", " instances ", "used "); 199 fprintf (file, "---------------------------------------------------------\n"); 200 201 size = dfa_stats.num_uses * sizeof (tree *); 202 total += size; 203 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses, 204 SIZE_AMOUNT (size)); 205 206 size = dfa_stats.num_defs * sizeof (tree *); 207 total += size; 208 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs, 209 SIZE_AMOUNT (size)); 210 211 size = dfa_stats.num_vuses * sizeof (tree *); 212 total += size; 213 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses, 214 SIZE_AMOUNT (size)); 215 216 size = dfa_stats.num_vdefs * sizeof (tree *); 217 total += size; 218 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs, 219 SIZE_AMOUNT (size)); 220 221 size = dfa_stats.num_phis * sizeof (struct gphi); 222 total += size; 223 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis, 224 SIZE_AMOUNT (size)); 225 226 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d); 227 total += size; 228 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args, 229 SIZE_AMOUNT (size)); 230 231 fprintf (file, "---------------------------------------------------------\n"); 232 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", 233 SIZE_AMOUNT (total)); 234 fprintf (file, "---------------------------------------------------------\n"); 235 fprintf (file, "\n"); 236 237 if (dfa_stats.num_phis) 238 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n", 239 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis, 240 (long) dfa_stats.max_num_phi_args); 241 242 fprintf (file, "\n"); 243 } 244 245 246 /* Dump DFA statistics on stderr. */ 247 248 DEBUG_FUNCTION void 249 debug_dfa_stats (void) 250 { 251 dump_dfa_stats (stderr); 252 } 253 254 255 /* Collect DFA statistics and store them in the structure pointed to by 256 DFA_STATS_P. */ 257 258 static void 259 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED) 260 { 261 basic_block bb; 262 263 gcc_assert (dfa_stats_p); 264 265 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d)); 266 267 /* Walk all the statements in the function counting references. */ 268 FOR_EACH_BB_FN (bb, cfun) 269 { 270 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si); 271 gsi_next (&si)) 272 { 273 gphi *phi = si.phi (); 274 dfa_stats_p->num_phis++; 275 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi); 276 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args) 277 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi); 278 } 279 280 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si); 281 gsi_next (&si)) 282 { 283 gimple *stmt = gsi_stmt (si); 284 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF); 285 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE); 286 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0; 287 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0; 288 } 289 } 290 } 291 292 293 /*--------------------------------------------------------------------------- 294 Miscellaneous helpers 295 ---------------------------------------------------------------------------*/ 296 297 /* Lookup VAR UID in the default_defs hashtable and return the associated 298 variable. */ 299 300 tree 301 ssa_default_def (struct function *fn, tree var) 302 { 303 struct tree_decl_minimal ind; 304 struct tree_ssa_name in; 305 gcc_assert (VAR_P (var) 306 || TREE_CODE (var) == PARM_DECL 307 || TREE_CODE (var) == RESULT_DECL); 308 309 /* Always NULL_TREE for rtl function dumps. */ 310 if (!fn->gimple_df) 311 return NULL_TREE; 312 313 in.var = (tree)&ind; 314 ind.uid = DECL_UID (var); 315 return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var)); 316 } 317 318 /* Insert the pair VAR's UID, DEF into the default_defs hashtable 319 of function FN. */ 320 321 void 322 set_ssa_default_def (struct function *fn, tree var, tree def) 323 { 324 struct tree_decl_minimal ind; 325 struct tree_ssa_name in; 326 327 gcc_assert (VAR_P (var) 328 || TREE_CODE (var) == PARM_DECL 329 || TREE_CODE (var) == RESULT_DECL); 330 in.var = (tree)&ind; 331 ind.uid = DECL_UID (var); 332 if (!def) 333 { 334 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in, 335 DECL_UID (var), 336 NO_INSERT); 337 if (loc) 338 { 339 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false; 340 DEFAULT_DEFS (fn)->clear_slot (loc); 341 } 342 return; 343 } 344 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var); 345 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in, 346 DECL_UID (var), INSERT); 347 348 /* Default definition might be changed by tail call optimization. */ 349 if (*loc) 350 SSA_NAME_IS_DEFAULT_DEF (*loc) = false; 351 352 /* Mark DEF as the default definition for VAR. */ 353 *loc = def; 354 SSA_NAME_IS_DEFAULT_DEF (def) = true; 355 } 356 357 /* Retrieve or create a default definition for VAR. */ 358 359 tree 360 get_or_create_ssa_default_def (struct function *fn, tree var) 361 { 362 tree ddef = ssa_default_def (fn, var); 363 if (ddef == NULL_TREE) 364 { 365 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ()); 366 set_ssa_default_def (fn, var, ddef); 367 } 368 return ddef; 369 } 370 371 372 /* If EXP is a handled component reference for a structure, return the 373 base variable. The access range is delimited by bit positions *POFFSET and 374 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either 375 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE 376 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is 377 true, the storage order of the reference is reversed. */ 378 379 tree 380 get_ref_base_and_extent (tree exp, poly_int64_pod *poffset, 381 poly_int64_pod *psize, 382 poly_int64_pod *pmax_size, 383 bool *preverse) 384 { 385 poly_offset_int bitsize = -1; 386 poly_offset_int maxsize; 387 tree size_tree = NULL_TREE; 388 poly_offset_int bit_offset = 0; 389 bool seen_variable_array_ref = false; 390 391 /* First get the final access size and the storage order from just the 392 outermost expression. */ 393 if (TREE_CODE (exp) == COMPONENT_REF) 394 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); 395 else if (TREE_CODE (exp) == BIT_FIELD_REF) 396 size_tree = TREE_OPERAND (exp, 1); 397 else if (!VOID_TYPE_P (TREE_TYPE (exp))) 398 { 399 machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); 400 if (mode == BLKmode) 401 size_tree = TYPE_SIZE (TREE_TYPE (exp)); 402 else 403 bitsize = GET_MODE_BITSIZE (mode); 404 } 405 if (size_tree != NULL_TREE 406 && poly_int_tree_p (size_tree)) 407 bitsize = wi::to_poly_offset (size_tree); 408 409 *preverse = reverse_storage_order_for_component_p (exp); 410 411 /* Initially, maxsize is the same as the accessed element size. 412 In the following it will only grow (or become -1). */ 413 maxsize = bitsize; 414 415 /* Compute cumulative bit-offset for nested component-refs and array-refs, 416 and find the ultimate containing object. */ 417 while (1) 418 { 419 switch (TREE_CODE (exp)) 420 { 421 case BIT_FIELD_REF: 422 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2)); 423 break; 424 425 case COMPONENT_REF: 426 { 427 tree field = TREE_OPERAND (exp, 1); 428 tree this_offset = component_ref_field_offset (exp); 429 430 if (this_offset && poly_int_tree_p (this_offset)) 431 { 432 poly_offset_int woffset = (wi::to_poly_offset (this_offset) 433 << LOG2_BITS_PER_UNIT); 434 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)); 435 bit_offset += woffset; 436 437 /* If we had seen a variable array ref already and we just 438 referenced the last field of a struct or a union member 439 then we have to adjust maxsize by the padding at the end 440 of our field. */ 441 if (seen_variable_array_ref) 442 { 443 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0)); 444 tree next = DECL_CHAIN (field); 445 while (next && TREE_CODE (next) != FIELD_DECL) 446 next = DECL_CHAIN (next); 447 if (!next 448 || TREE_CODE (stype) != RECORD_TYPE) 449 { 450 tree fsize = DECL_SIZE_UNIT (field); 451 tree ssize = TYPE_SIZE_UNIT (stype); 452 if (fsize == NULL 453 || !poly_int_tree_p (fsize) 454 || ssize == NULL 455 || !poly_int_tree_p (ssize)) 456 maxsize = -1; 457 else if (known_size_p (maxsize)) 458 { 459 poly_offset_int tem 460 = (wi::to_poly_offset (ssize) 461 - wi::to_poly_offset (fsize)); 462 tem <<= LOG2_BITS_PER_UNIT; 463 tem -= woffset; 464 maxsize += tem; 465 } 466 } 467 /* An component ref with an adjacent field up in the 468 structure hierarchy constrains the size of any variable 469 array ref lower in the access hierarchy. */ 470 else 471 seen_variable_array_ref = false; 472 } 473 } 474 else 475 { 476 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); 477 /* We need to adjust maxsize to the whole structure bitsize. 478 But we can subtract any constant offset seen so far, 479 because that would get us out of the structure otherwise. */ 480 if (known_size_p (maxsize) 481 && csize 482 && poly_int_tree_p (csize)) 483 maxsize = wi::to_poly_offset (csize) - bit_offset; 484 else 485 maxsize = -1; 486 } 487 } 488 break; 489 490 case ARRAY_REF: 491 case ARRAY_RANGE_REF: 492 { 493 tree index = TREE_OPERAND (exp, 1); 494 tree low_bound, unit_size; 495 496 /* If the resulting bit-offset is constant, track it. */ 497 if (poly_int_tree_p (index) 498 && (low_bound = array_ref_low_bound (exp), 499 poly_int_tree_p (low_bound)) 500 && (unit_size = array_ref_element_size (exp), 501 TREE_CODE (unit_size) == INTEGER_CST)) 502 { 503 poly_offset_int woffset 504 = wi::sext (wi::to_poly_offset (index) 505 - wi::to_poly_offset (low_bound), 506 TYPE_PRECISION (sizetype)); 507 woffset *= wi::to_offset (unit_size); 508 woffset <<= LOG2_BITS_PER_UNIT; 509 bit_offset += woffset; 510 511 /* An array ref with a constant index up in the structure 512 hierarchy will constrain the size of any variable array ref 513 lower in the access hierarchy. */ 514 seen_variable_array_ref = false; 515 } 516 else 517 { 518 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))); 519 /* We need to adjust maxsize to the whole array bitsize. 520 But we can subtract any constant offset seen so far, 521 because that would get us outside of the array otherwise. */ 522 if (known_size_p (maxsize) 523 && asize 524 && poly_int_tree_p (asize)) 525 maxsize = wi::to_poly_offset (asize) - bit_offset; 526 else 527 maxsize = -1; 528 529 /* Remember that we have seen an array ref with a variable 530 index. */ 531 seen_variable_array_ref = true; 532 533 wide_int min, max; 534 if (TREE_CODE (index) == SSA_NAME 535 && (low_bound = array_ref_low_bound (exp), 536 poly_int_tree_p (low_bound)) 537 && (unit_size = array_ref_element_size (exp), 538 TREE_CODE (unit_size) == INTEGER_CST) 539 && get_range_info (index, &min, &max) == VR_RANGE) 540 { 541 poly_offset_int lbound = wi::to_poly_offset (low_bound); 542 /* Try to constrain maxsize with range information. */ 543 offset_int omax 544 = offset_int::from (max, TYPE_SIGN (TREE_TYPE (index))); 545 if (known_lt (lbound, omax)) 546 { 547 poly_offset_int rmaxsize; 548 rmaxsize = (omax - lbound + 1) 549 * wi::to_offset (unit_size) << LOG2_BITS_PER_UNIT; 550 if (!known_size_p (maxsize) 551 || known_lt (rmaxsize, maxsize)) 552 { 553 /* If we know an upper bound below the declared 554 one this is no longer variable. */ 555 if (known_size_p (maxsize)) 556 seen_variable_array_ref = false; 557 maxsize = rmaxsize; 558 } 559 } 560 /* Try to adjust bit_offset with range information. */ 561 offset_int omin 562 = offset_int::from (min, TYPE_SIGN (TREE_TYPE (index))); 563 if (known_le (lbound, omin)) 564 { 565 poly_offset_int woffset 566 = wi::sext (omin - lbound, 567 TYPE_PRECISION (sizetype)); 568 woffset *= wi::to_offset (unit_size); 569 woffset <<= LOG2_BITS_PER_UNIT; 570 bit_offset += woffset; 571 if (known_size_p (maxsize)) 572 maxsize -= woffset; 573 } 574 } 575 } 576 } 577 break; 578 579 case REALPART_EXPR: 580 break; 581 582 case IMAGPART_EXPR: 583 bit_offset += bitsize; 584 break; 585 586 case VIEW_CONVERT_EXPR: 587 break; 588 589 case TARGET_MEM_REF: 590 /* Via the variable index or index2 we can reach the 591 whole object. Still hand back the decl here. */ 592 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR 593 && (TMR_INDEX (exp) || TMR_INDEX2 (exp))) 594 { 595 exp = TREE_OPERAND (TMR_BASE (exp), 0); 596 bit_offset = 0; 597 maxsize = -1; 598 goto done; 599 } 600 /* Fallthru. */ 601 case MEM_REF: 602 /* We need to deal with variable arrays ending structures such as 603 struct { int length; int a[1]; } x; x.a[d] 604 struct { struct { int a; int b; } a[1]; } x; x.a[d].a 605 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0] 606 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d] 607 where we do not know maxsize for variable index accesses to 608 the array. The simplest way to conservatively deal with this 609 is to punt in the case that offset + maxsize reaches the 610 base type boundary. This needs to include possible trailing 611 padding that is there for alignment purposes. */ 612 if (seen_variable_array_ref 613 && known_size_p (maxsize) 614 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE 615 || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))) 616 || (maybe_eq 617 (bit_offset + maxsize, 618 wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))))))) 619 maxsize = -1; 620 621 /* Hand back the decl for MEM[&decl, off]. */ 622 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) 623 { 624 if (integer_zerop (TREE_OPERAND (exp, 1))) 625 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 626 else 627 { 628 poly_offset_int off = mem_ref_offset (exp); 629 off <<= LOG2_BITS_PER_UNIT; 630 off += bit_offset; 631 poly_int64 off_hwi; 632 if (off.to_shwi (&off_hwi)) 633 { 634 bit_offset = off_hwi; 635 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 636 } 637 } 638 } 639 goto done; 640 641 default: 642 goto done; 643 } 644 645 exp = TREE_OPERAND (exp, 0); 646 } 647 648 done: 649 if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0)) 650 { 651 *poffset = 0; 652 *psize = -1; 653 *pmax_size = -1; 654 655 return exp; 656 } 657 658 /* ??? Due to negative offsets in ARRAY_REF we can end up with 659 negative bit_offset here. We might want to store a zero offset 660 in this case. */ 661 if (!bit_offset.to_shwi (poffset)) 662 { 663 *poffset = 0; 664 *pmax_size = -1; 665 666 return exp; 667 } 668 669 /* In case of a decl or constant base object we can do better. */ 670 671 if (DECL_P (exp)) 672 { 673 if (VAR_P (exp) 674 && ((flag_unconstrained_commons && DECL_COMMON (exp)) 675 || (DECL_EXTERNAL (exp) && seen_variable_array_ref))) 676 { 677 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp)); 678 /* If size is unknown, or we have read to the end, assume there 679 may be more to the structure than we are told. */ 680 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE 681 || (seen_variable_array_ref 682 && (sz_tree == NULL_TREE 683 || !poly_int_tree_p (sz_tree) 684 || maybe_eq (bit_offset + maxsize, 685 wi::to_poly_offset (sz_tree))))) 686 maxsize = -1; 687 } 688 /* If maxsize is unknown adjust it according to the size of the 689 base decl. */ 690 else if (!known_size_p (maxsize) 691 && DECL_SIZE (exp) 692 && poly_int_tree_p (DECL_SIZE (exp))) 693 maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset; 694 } 695 else if (CONSTANT_CLASS_P (exp)) 696 { 697 /* If maxsize is unknown adjust it according to the size of the 698 base type constant. */ 699 if (!known_size_p (maxsize) 700 && TYPE_SIZE (TREE_TYPE (exp)) 701 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))) 702 maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))) 703 - bit_offset); 704 } 705 706 if (!maxsize.to_shwi (pmax_size) 707 || maybe_lt (*pmax_size, 0) 708 || !endpoint_representable_p (*poffset, *pmax_size)) 709 *pmax_size = -1; 710 711 /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't 712 check for such overflows individually and assume it works. */ 713 if (!endpoint_representable_p (*poffset, *psize)) 714 { 715 *poffset = 0; 716 *psize = -1; 717 *pmax_size = -1; 718 719 return exp; 720 } 721 722 return exp; 723 } 724 725 /* Like get_ref_base_and_extent, but for cases in which we only care 726 about constant-width accesses at constant offsets. Return null 727 if the access is anything else. */ 728 729 tree 730 get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset, 731 HOST_WIDE_INT *psize, bool *preverse) 732 { 733 poly_int64 offset, size, max_size; 734 HOST_WIDE_INT const_offset, const_size; 735 bool reverse; 736 tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size, 737 &reverse); 738 if (!offset.is_constant (&const_offset) 739 || !size.is_constant (&const_size) 740 || const_offset < 0 741 || !known_size_p (max_size) 742 || maybe_ne (max_size, const_size)) 743 return NULL_TREE; 744 745 *poffset = const_offset; 746 *psize = const_size; 747 *preverse = reverse; 748 return decl; 749 } 750 751 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that 752 denotes the starting address of the memory access EXP. 753 Returns NULL_TREE if the offset is not constant or any component 754 is not BITS_PER_UNIT-aligned. 755 VALUEIZE if non-NULL is used to valueize SSA names. It should return 756 its argument or a constant if the argument is known to be constant. */ 757 758 tree 759 get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset, 760 tree (*valueize) (tree)) 761 { 762 poly_int64 byte_offset = 0; 763 764 /* Compute cumulative byte-offset for nested component-refs and array-refs, 765 and find the ultimate containing object. */ 766 while (1) 767 { 768 switch (TREE_CODE (exp)) 769 { 770 case BIT_FIELD_REF: 771 { 772 poly_int64 this_byte_offset; 773 poly_uint64 this_bit_offset; 774 if (!poly_int_tree_p (TREE_OPERAND (exp, 2), &this_bit_offset) 775 || !multiple_p (this_bit_offset, BITS_PER_UNIT, 776 &this_byte_offset)) 777 return NULL_TREE; 778 byte_offset += this_byte_offset; 779 } 780 break; 781 782 case COMPONENT_REF: 783 { 784 tree field = TREE_OPERAND (exp, 1); 785 tree this_offset = component_ref_field_offset (exp); 786 poly_int64 hthis_offset; 787 788 if (!this_offset 789 || !poly_int_tree_p (this_offset, &hthis_offset) 790 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)) 791 % BITS_PER_UNIT)) 792 return NULL_TREE; 793 794 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)) 795 / BITS_PER_UNIT); 796 byte_offset += hthis_offset; 797 } 798 break; 799 800 case ARRAY_REF: 801 case ARRAY_RANGE_REF: 802 { 803 tree index = TREE_OPERAND (exp, 1); 804 tree low_bound, unit_size; 805 806 if (valueize 807 && TREE_CODE (index) == SSA_NAME) 808 index = (*valueize) (index); 809 810 /* If the resulting bit-offset is constant, track it. */ 811 if (poly_int_tree_p (index) 812 && (low_bound = array_ref_low_bound (exp), 813 poly_int_tree_p (low_bound)) 814 && (unit_size = array_ref_element_size (exp), 815 TREE_CODE (unit_size) == INTEGER_CST)) 816 { 817 poly_offset_int woffset 818 = wi::sext (wi::to_poly_offset (index) 819 - wi::to_poly_offset (low_bound), 820 TYPE_PRECISION (sizetype)); 821 woffset *= wi::to_offset (unit_size); 822 byte_offset += woffset.force_shwi (); 823 } 824 else 825 return NULL_TREE; 826 } 827 break; 828 829 case REALPART_EXPR: 830 break; 831 832 case IMAGPART_EXPR: 833 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp))); 834 break; 835 836 case VIEW_CONVERT_EXPR: 837 break; 838 839 case MEM_REF: 840 { 841 tree base = TREE_OPERAND (exp, 0); 842 if (valueize 843 && TREE_CODE (base) == SSA_NAME) 844 base = (*valueize) (base); 845 846 /* Hand back the decl for MEM[&decl, off]. */ 847 if (TREE_CODE (base) == ADDR_EXPR) 848 { 849 if (!integer_zerop (TREE_OPERAND (exp, 1))) 850 { 851 poly_offset_int off = mem_ref_offset (exp); 852 byte_offset += off.force_shwi (); 853 } 854 exp = TREE_OPERAND (base, 0); 855 } 856 goto done; 857 } 858 859 case TARGET_MEM_REF: 860 { 861 tree base = TREE_OPERAND (exp, 0); 862 if (valueize 863 && TREE_CODE (base) == SSA_NAME) 864 base = (*valueize) (base); 865 866 /* Hand back the decl for MEM[&decl, off]. */ 867 if (TREE_CODE (base) == ADDR_EXPR) 868 { 869 if (TMR_INDEX (exp) || TMR_INDEX2 (exp)) 870 return NULL_TREE; 871 if (!integer_zerop (TMR_OFFSET (exp))) 872 { 873 poly_offset_int off = mem_ref_offset (exp); 874 byte_offset += off.force_shwi (); 875 } 876 exp = TREE_OPERAND (base, 0); 877 } 878 goto done; 879 } 880 881 default: 882 goto done; 883 } 884 885 exp = TREE_OPERAND (exp, 0); 886 } 887 done: 888 889 *poffset = byte_offset; 890 return exp; 891 } 892 893 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that 894 denotes the starting address of the memory access EXP. 895 Returns NULL_TREE if the offset is not constant or any component 896 is not BITS_PER_UNIT-aligned. */ 897 898 tree 899 get_addr_base_and_unit_offset (tree exp, poly_int64_pod *poffset) 900 { 901 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL); 902 } 903 904 /* Returns true if STMT references an SSA_NAME that has 905 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */ 906 907 bool 908 stmt_references_abnormal_ssa_name (gimple *stmt) 909 { 910 ssa_op_iter oi; 911 use_operand_p use_p; 912 913 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE) 914 { 915 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p))) 916 return true; 917 } 918 919 return false; 920 } 921 922 /* If STMT takes any abnormal PHI values as input, replace them with 923 local copies. */ 924 925 void 926 replace_abnormal_ssa_names (gimple *stmt) 927 { 928 ssa_op_iter oi; 929 use_operand_p use_p; 930 931 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE) 932 { 933 tree op = USE_FROM_PTR (use_p); 934 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op)) 935 { 936 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 937 tree new_name = make_ssa_name (TREE_TYPE (op)); 938 gassign *assign = gimple_build_assign (new_name, op); 939 gsi_insert_before (&gsi, assign, GSI_SAME_STMT); 940 SET_USE (use_p, new_name); 941 } 942 } 943 } 944 945 /* Pair of tree and a sorting index, for dump_enumerated_decls. */ 946 struct GTY(()) numbered_tree 947 { 948 tree t; 949 int num; 950 }; 951 952 953 /* Compare two declarations references by their DECL_UID / sequence number. 954 Called via qsort. */ 955 956 static int 957 compare_decls_by_uid (const void *pa, const void *pb) 958 { 959 const numbered_tree *nt_a = ((const numbered_tree *)pa); 960 const numbered_tree *nt_b = ((const numbered_tree *)pb); 961 962 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t)) 963 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t); 964 return nt_a->num - nt_b->num; 965 } 966 967 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */ 968 static tree 969 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data) 970 { 971 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 972 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info; 973 numbered_tree nt; 974 975 if (!DECL_P (*tp)) 976 return NULL_TREE; 977 nt.t = *tp; 978 nt.num = list->length (); 979 list->safe_push (nt); 980 *walk_subtrees = 0; 981 return NULL_TREE; 982 } 983 984 /* Find all the declarations used by the current function, sort them by uid, 985 and emit the sorted list. Each declaration is tagged with a sequence 986 number indicating when it was found during statement / tree walking, 987 so that TDF_NOUID comparisons of anonymous declarations are still 988 meaningful. Where a declaration was encountered more than once, we 989 emit only the sequence number of the first encounter. 990 FILE is the dump file where to output the list and FLAGS is as in 991 print_generic_expr. */ 992 void 993 dump_enumerated_decls (FILE *file, dump_flags_t flags) 994 { 995 if (!cfun->cfg) 996 return; 997 998 basic_block bb; 999 struct walk_stmt_info wi; 1000 auto_vec<numbered_tree, 40> decl_list; 1001 1002 memset (&wi, '\0', sizeof (wi)); 1003 wi.info = (void *) &decl_list; 1004 FOR_EACH_BB_FN (bb, cfun) 1005 { 1006 gimple_stmt_iterator gsi; 1007 1008 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1009 if (!is_gimple_debug (gsi_stmt (gsi))) 1010 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi); 1011 } 1012 decl_list.qsort (compare_decls_by_uid); 1013 if (decl_list.length ()) 1014 { 1015 unsigned ix; 1016 numbered_tree *ntp; 1017 tree last = NULL_TREE; 1018 1019 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n", 1020 current_function_name ()); 1021 FOR_EACH_VEC_ELT (decl_list, ix, ntp) 1022 { 1023 if (ntp->t == last) 1024 continue; 1025 fprintf (file, "%d: ", ntp->num); 1026 print_generic_decl (file, ntp->t, flags); 1027 fprintf (file, "\n"); 1028 last = ntp->t; 1029 } 1030 } 1031 } 1032