1 /* Tree based points-to analysis 2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 3 Free Software Foundation, Inc. 4 Contributed by Daniel Berlin <dberlin@dberlin.org> 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify 9 under the terms of the GNU General Public License as published by 10 the Free Software Foundation; either version 3 of the License, or 11 (at your option) any later version. 12 13 GCC is distributed in the hope that it will be useful, 14 but WITHOUT ANY WARRANTY; without even the implied warranty of 15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 GNU General Public License for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "tm.h" 26 #include "ggc.h" 27 #include "obstack.h" 28 #include "bitmap.h" 29 #include "flags.h" 30 #include "rtl.h" 31 #include "tm_p.h" 32 #include "hard-reg-set.h" 33 #include "basic-block.h" 34 #include "output.h" 35 #include "tree.h" 36 #include "tree-flow.h" 37 #include "tree-inline.h" 38 #include "varray.h" 39 #include "diagnostic.h" 40 #include "toplev.h" 41 #include "gimple.h" 42 #include "hashtab.h" 43 #include "function.h" 44 #include "cgraph.h" 45 #include "tree-pass.h" 46 #include "timevar.h" 47 #include "alloc-pool.h" 48 #include "splay-tree.h" 49 #include "params.h" 50 #include "cgraph.h" 51 #include "alias.h" 52 #include "pointer-set.h" 53 54 /* The idea behind this analyzer is to generate set constraints from the 55 program, then solve the resulting constraints in order to generate the 56 points-to sets. 57 58 Set constraints are a way of modeling program analysis problems that 59 involve sets. They consist of an inclusion constraint language, 60 describing the variables (each variable is a set) and operations that 61 are involved on the variables, and a set of rules that derive facts 62 from these operations. To solve a system of set constraints, you derive 63 all possible facts under the rules, which gives you the correct sets 64 as a consequence. 65 66 See "Efficient Field-sensitive pointer analysis for C" by "David 67 J. Pearce and Paul H. J. Kelly and Chris Hankin, at 68 http://citeseer.ist.psu.edu/pearce04efficient.html 69 70 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines 71 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at 72 http://citeseer.ist.psu.edu/heintze01ultrafast.html 73 74 There are three types of real constraint expressions, DEREF, 75 ADDRESSOF, and SCALAR. Each constraint expression consists 76 of a constraint type, a variable, and an offset. 77 78 SCALAR is a constraint expression type used to represent x, whether 79 it appears on the LHS or the RHS of a statement. 80 DEREF is a constraint expression type used to represent *x, whether 81 it appears on the LHS or the RHS of a statement. 82 ADDRESSOF is a constraint expression used to represent &x, whether 83 it appears on the LHS or the RHS of a statement. 84 85 Each pointer variable in the program is assigned an integer id, and 86 each field of a structure variable is assigned an integer id as well. 87 88 Structure variables are linked to their list of fields through a "next 89 field" in each variable that points to the next field in offset 90 order. 91 Each variable for a structure field has 92 93 1. "size", that tells the size in bits of that field. 94 2. "fullsize, that tells the size in bits of the entire structure. 95 3. "offset", that tells the offset in bits from the beginning of the 96 structure to this field. 97 98 Thus, 99 struct f 100 { 101 int a; 102 int b; 103 } foo; 104 int *bar; 105 106 looks like 107 108 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b 109 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL 110 bar -> id 3, size 32, offset 0, fullsize 32, next NULL 111 112 113 In order to solve the system of set constraints, the following is 114 done: 115 116 1. Each constraint variable x has a solution set associated with it, 117 Sol(x). 118 119 2. Constraints are separated into direct, copy, and complex. 120 Direct constraints are ADDRESSOF constraints that require no extra 121 processing, such as P = &Q 122 Copy constraints are those of the form P = Q. 123 Complex constraints are all the constraints involving dereferences 124 and offsets (including offsetted copies). 125 126 3. All direct constraints of the form P = &Q are processed, such 127 that Q is added to Sol(P) 128 129 4. All complex constraints for a given constraint variable are stored in a 130 linked list attached to that variable's node. 131 132 5. A directed graph is built out of the copy constraints. Each 133 constraint variable is a node in the graph, and an edge from 134 Q to P is added for each copy constraint of the form P = Q 135 136 6. The graph is then walked, and solution sets are 137 propagated along the copy edges, such that an edge from Q to P 138 causes Sol(P) <- Sol(P) union Sol(Q). 139 140 7. As we visit each node, all complex constraints associated with 141 that node are processed by adding appropriate copy edges to the graph, or the 142 appropriate variables to the solution set. 143 144 8. The process of walking the graph is iterated until no solution 145 sets change. 146 147 Prior to walking the graph in steps 6 and 7, We perform static 148 cycle elimination on the constraint graph, as well 149 as off-line variable substitution. 150 151 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted 152 on and turned into anything), but isn't. You can just see what offset 153 inside the pointed-to struct it's going to access. 154 155 TODO: Constant bounded arrays can be handled as if they were structs of the 156 same number of elements. 157 158 TODO: Modeling heap and incoming pointers becomes much better if we 159 add fields to them as we discover them, which we could do. 160 161 TODO: We could handle unions, but to be honest, it's probably not 162 worth the pain or slowdown. */ 163 164 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map))) 165 htab_t heapvar_for_stmt; 166 167 static bool use_field_sensitive = true; 168 static int in_ipa_mode = 0; 169 170 /* Used for predecessor bitmaps. */ 171 static bitmap_obstack predbitmap_obstack; 172 173 /* Used for points-to sets. */ 174 static bitmap_obstack pta_obstack; 175 176 /* Used for oldsolution members of variables. */ 177 static bitmap_obstack oldpta_obstack; 178 179 /* Used for per-solver-iteration bitmaps. */ 180 static bitmap_obstack iteration_obstack; 181 182 static unsigned int create_variable_info_for (tree, const char *); 183 typedef struct constraint_graph *constraint_graph_t; 184 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool); 185 186 struct constraint; 187 typedef struct constraint *constraint_t; 188 189 DEF_VEC_P(constraint_t); 190 DEF_VEC_ALLOC_P(constraint_t,heap); 191 192 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \ 193 if (a) \ 194 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d) 195 196 static struct constraint_stats 197 { 198 unsigned int total_vars; 199 unsigned int nonpointer_vars; 200 unsigned int unified_vars_static; 201 unsigned int unified_vars_dynamic; 202 unsigned int iterations; 203 unsigned int num_edges; 204 unsigned int num_implicit_edges; 205 unsigned int points_to_sets_created; 206 } stats; 207 208 struct variable_info 209 { 210 /* ID of this variable */ 211 unsigned int id; 212 213 /* True if this is a variable created by the constraint analysis, such as 214 heap variables and constraints we had to break up. */ 215 unsigned int is_artificial_var : 1; 216 217 /* True if this is a special variable whose solution set should not be 218 changed. */ 219 unsigned int is_special_var : 1; 220 221 /* True for variables whose size is not known or variable. */ 222 unsigned int is_unknown_size_var : 1; 223 224 /* True for (sub-)fields that represent a whole variable. */ 225 unsigned int is_full_var : 1; 226 227 /* True if this is a heap variable. */ 228 unsigned int is_heap_var : 1; 229 230 /* True if this is a variable tracking a restrict pointer source. */ 231 unsigned int is_restrict_var : 1; 232 233 /* True if this field may contain pointers. */ 234 unsigned int may_have_pointers : 1; 235 236 /* True if this represents a global variable. */ 237 unsigned int is_global_var : 1; 238 239 /* A link to the variable for the next field in this structure. */ 240 struct variable_info *next; 241 242 /* Offset of this variable, in bits, from the base variable */ 243 unsigned HOST_WIDE_INT offset; 244 245 /* Size of the variable, in bits. */ 246 unsigned HOST_WIDE_INT size; 247 248 /* Full size of the base variable, in bits. */ 249 unsigned HOST_WIDE_INT fullsize; 250 251 /* Name of this variable */ 252 const char *name; 253 254 /* Tree that this variable is associated with. */ 255 tree decl; 256 257 /* Points-to set for this variable. */ 258 bitmap solution; 259 260 /* Old points-to set for this variable. */ 261 bitmap oldsolution; 262 }; 263 typedef struct variable_info *varinfo_t; 264 265 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT); 266 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t, 267 unsigned HOST_WIDE_INT); 268 static varinfo_t lookup_vi_for_tree (tree); 269 static inline bool type_can_have_subvars (const_tree); 270 271 /* Pool of variable info structures. */ 272 static alloc_pool variable_info_pool; 273 274 DEF_VEC_P(varinfo_t); 275 276 DEF_VEC_ALLOC_P(varinfo_t, heap); 277 278 /* Table of variable info structures for constraint variables. 279 Indexed directly by variable info id. */ 280 static VEC(varinfo_t,heap) *varmap; 281 282 /* Return the varmap element N */ 283 284 static inline varinfo_t 285 get_varinfo (unsigned int n) 286 { 287 return VEC_index (varinfo_t, varmap, n); 288 } 289 290 /* Static IDs for the special variables. */ 291 enum { nothing_id = 0, anything_id = 1, readonly_id = 2, 292 escaped_id = 3, nonlocal_id = 4, callused_id = 5, 293 storedanything_id = 6, integer_id = 7 }; 294 295 struct GTY(()) heapvar_map { 296 struct tree_map map; 297 unsigned HOST_WIDE_INT offset; 298 }; 299 300 static int 301 heapvar_map_eq (const void *p1, const void *p2) 302 { 303 const struct heapvar_map *h1 = (const struct heapvar_map *)p1; 304 const struct heapvar_map *h2 = (const struct heapvar_map *)p2; 305 return (h1->map.base.from == h2->map.base.from 306 && h1->offset == h2->offset); 307 } 308 309 static unsigned int 310 heapvar_map_hash (struct heapvar_map *h) 311 { 312 return iterative_hash_host_wide_int (h->offset, 313 htab_hash_pointer (h->map.base.from)); 314 } 315 316 /* Lookup a heap var for FROM, and return it if we find one. */ 317 318 static tree 319 heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset) 320 { 321 struct heapvar_map *h, in; 322 in.map.base.from = from; 323 in.offset = offset; 324 h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in, 325 heapvar_map_hash (&in)); 326 if (h) 327 return h->map.to; 328 return NULL_TREE; 329 } 330 331 /* Insert a mapping FROM->TO in the heap var for statement 332 hashtable. */ 333 334 static void 335 heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to) 336 { 337 struct heapvar_map *h; 338 void **loc; 339 340 h = GGC_NEW (struct heapvar_map); 341 h->map.base.from = from; 342 h->offset = offset; 343 h->map.hash = heapvar_map_hash (h); 344 h->map.to = to; 345 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT); 346 gcc_assert (*loc == NULL); 347 *(struct heapvar_map **) loc = h; 348 } 349 350 /* Return a new variable info structure consisting for a variable 351 named NAME, and using constraint graph node NODE. Append it 352 to the vector of variable info structures. */ 353 354 static varinfo_t 355 new_var_info (tree t, const char *name) 356 { 357 unsigned index = VEC_length (varinfo_t, varmap); 358 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool); 359 360 ret->id = index; 361 ret->name = name; 362 ret->decl = t; 363 /* Vars without decl are artificial and do not have sub-variables. */ 364 ret->is_artificial_var = (t == NULL_TREE); 365 ret->is_special_var = false; 366 ret->is_unknown_size_var = false; 367 ret->is_full_var = (t == NULL_TREE); 368 ret->is_heap_var = false; 369 ret->is_restrict_var = false; 370 ret->may_have_pointers = true; 371 ret->is_global_var = (t == NULL_TREE); 372 if (t && DECL_P (t)) 373 ret->is_global_var = (is_global_var (t) 374 /* We have to treat even local register variables 375 as escape points. */ 376 || (TREE_CODE (t) == VAR_DECL 377 && DECL_HARD_REGISTER (t))); 378 ret->solution = BITMAP_ALLOC (&pta_obstack); 379 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack); 380 ret->next = NULL; 381 382 VEC_safe_push (varinfo_t, heap, varmap, ret); 383 384 return ret; 385 } 386 387 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type; 388 389 /* An expression that appears in a constraint. */ 390 391 struct constraint_expr 392 { 393 /* Constraint type. */ 394 constraint_expr_type type; 395 396 /* Variable we are referring to in the constraint. */ 397 unsigned int var; 398 399 /* Offset, in bits, of this constraint from the beginning of 400 variables it ends up referring to. 401 402 IOW, in a deref constraint, we would deref, get the result set, 403 then add OFFSET to each member. */ 404 HOST_WIDE_INT offset; 405 }; 406 407 /* Use 0x8000... as special unknown offset. */ 408 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1)) 409 410 typedef struct constraint_expr ce_s; 411 DEF_VEC_O(ce_s); 412 DEF_VEC_ALLOC_O(ce_s, heap); 413 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool); 414 static void get_constraint_for (tree, VEC(ce_s, heap) **); 415 static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **); 416 static void do_deref (VEC (ce_s, heap) **); 417 418 /* Our set constraints are made up of two constraint expressions, one 419 LHS, and one RHS. 420 421 As described in the introduction, our set constraints each represent an 422 operation between set valued variables. 423 */ 424 struct constraint 425 { 426 struct constraint_expr lhs; 427 struct constraint_expr rhs; 428 }; 429 430 /* List of constraints that we use to build the constraint graph from. */ 431 432 static VEC(constraint_t,heap) *constraints; 433 static alloc_pool constraint_pool; 434 435 /* The constraint graph is represented as an array of bitmaps 436 containing successor nodes. */ 437 438 struct constraint_graph 439 { 440 /* Size of this graph, which may be different than the number of 441 nodes in the variable map. */ 442 unsigned int size; 443 444 /* Explicit successors of each node. */ 445 bitmap *succs; 446 447 /* Implicit predecessors of each node (Used for variable 448 substitution). */ 449 bitmap *implicit_preds; 450 451 /* Explicit predecessors of each node (Used for variable substitution). */ 452 bitmap *preds; 453 454 /* Indirect cycle representatives, or -1 if the node has no indirect 455 cycles. */ 456 int *indirect_cycles; 457 458 /* Representative node for a node. rep[a] == a unless the node has 459 been unified. */ 460 unsigned int *rep; 461 462 /* Equivalence class representative for a label. This is used for 463 variable substitution. */ 464 int *eq_rep; 465 466 /* Pointer equivalence label for a node. All nodes with the same 467 pointer equivalence label can be unified together at some point 468 (either during constraint optimization or after the constraint 469 graph is built). */ 470 unsigned int *pe; 471 472 /* Pointer equivalence representative for a label. This is used to 473 handle nodes that are pointer equivalent but not location 474 equivalent. We can unite these once the addressof constraints 475 are transformed into initial points-to sets. */ 476 int *pe_rep; 477 478 /* Pointer equivalence label for each node, used during variable 479 substitution. */ 480 unsigned int *pointer_label; 481 482 /* Location equivalence label for each node, used during location 483 equivalence finding. */ 484 unsigned int *loc_label; 485 486 /* Pointed-by set for each node, used during location equivalence 487 finding. This is pointed-by rather than pointed-to, because it 488 is constructed using the predecessor graph. */ 489 bitmap *pointed_by; 490 491 /* Points to sets for pointer equivalence. This is *not* the actual 492 points-to sets for nodes. */ 493 bitmap *points_to; 494 495 /* Bitmap of nodes where the bit is set if the node is a direct 496 node. Used for variable substitution. */ 497 sbitmap direct_nodes; 498 499 /* Bitmap of nodes where the bit is set if the node is address 500 taken. Used for variable substitution. */ 501 bitmap address_taken; 502 503 /* Vector of complex constraints for each graph node. Complex 504 constraints are those involving dereferences or offsets that are 505 not 0. */ 506 VEC(constraint_t,heap) **complex; 507 }; 508 509 static constraint_graph_t graph; 510 511 /* During variable substitution and the offline version of indirect 512 cycle finding, we create nodes to represent dereferences and 513 address taken constraints. These represent where these start and 514 end. */ 515 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap)) 516 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1)) 517 518 /* Return the representative node for NODE, if NODE has been unioned 519 with another NODE. 520 This function performs path compression along the way to finding 521 the representative. */ 522 523 static unsigned int 524 find (unsigned int node) 525 { 526 gcc_assert (node < graph->size); 527 if (graph->rep[node] != node) 528 return graph->rep[node] = find (graph->rep[node]); 529 return node; 530 } 531 532 /* Union the TO and FROM nodes to the TO nodes. 533 Note that at some point in the future, we may want to do 534 union-by-rank, in which case we are going to have to return the 535 node we unified to. */ 536 537 static bool 538 unite (unsigned int to, unsigned int from) 539 { 540 gcc_assert (to < graph->size && from < graph->size); 541 if (to != from && graph->rep[from] != to) 542 { 543 graph->rep[from] = to; 544 return true; 545 } 546 return false; 547 } 548 549 /* Create a new constraint consisting of LHS and RHS expressions. */ 550 551 static constraint_t 552 new_constraint (const struct constraint_expr lhs, 553 const struct constraint_expr rhs) 554 { 555 constraint_t ret = (constraint_t) pool_alloc (constraint_pool); 556 ret->lhs = lhs; 557 ret->rhs = rhs; 558 return ret; 559 } 560 561 /* Print out constraint C to FILE. */ 562 563 static void 564 dump_constraint (FILE *file, constraint_t c) 565 { 566 if (c->lhs.type == ADDRESSOF) 567 fprintf (file, "&"); 568 else if (c->lhs.type == DEREF) 569 fprintf (file, "*"); 570 fprintf (file, "%s", get_varinfo (c->lhs.var)->name); 571 if (c->lhs.offset == UNKNOWN_OFFSET) 572 fprintf (file, " + UNKNOWN"); 573 else if (c->lhs.offset != 0) 574 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset); 575 fprintf (file, " = "); 576 if (c->rhs.type == ADDRESSOF) 577 fprintf (file, "&"); 578 else if (c->rhs.type == DEREF) 579 fprintf (file, "*"); 580 fprintf (file, "%s", get_varinfo (c->rhs.var)->name); 581 if (c->rhs.offset == UNKNOWN_OFFSET) 582 fprintf (file, " + UNKNOWN"); 583 else if (c->rhs.offset != 0) 584 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset); 585 fprintf (file, "\n"); 586 } 587 588 589 void debug_constraint (constraint_t); 590 void debug_constraints (void); 591 void debug_constraint_graph (void); 592 void debug_solution_for_var (unsigned int); 593 void debug_sa_points_to_info (void); 594 595 /* Print out constraint C to stderr. */ 596 597 void 598 debug_constraint (constraint_t c) 599 { 600 dump_constraint (stderr, c); 601 } 602 603 /* Print out all constraints to FILE */ 604 605 static void 606 dump_constraints (FILE *file) 607 { 608 int i; 609 constraint_t c; 610 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++) 611 dump_constraint (file, c); 612 } 613 614 /* Print out all constraints to stderr. */ 615 616 void 617 debug_constraints (void) 618 { 619 dump_constraints (stderr); 620 } 621 622 /* Print out to FILE the edge in the constraint graph that is created by 623 constraint c. The edge may have a label, depending on the type of 624 constraint that it represents. If complex1, e.g: a = *b, then the label 625 is "=*", if complex2, e.g: *a = b, then the label is "*=", if 626 complex with an offset, e.g: a = b + 8, then the label is "+". 627 Otherwise the edge has no label. */ 628 629 static void 630 dump_constraint_edge (FILE *file, constraint_t c) 631 { 632 if (c->rhs.type != ADDRESSOF) 633 { 634 const char *src = get_varinfo (c->rhs.var)->name; 635 const char *dst = get_varinfo (c->lhs.var)->name; 636 fprintf (file, " \"%s\" -> \"%s\" ", src, dst); 637 /* Due to preprocessing of constraints, instructions like *a = *b are 638 illegal; thus, we do not have to handle such cases. */ 639 if (c->lhs.type == DEREF) 640 fprintf (file, " [ label=\"*=\" ] ;\n"); 641 else if (c->rhs.type == DEREF) 642 fprintf (file, " [ label=\"=*\" ] ;\n"); 643 else 644 { 645 /* We must check the case where the constraint is an offset. 646 In this case, it is treated as a complex constraint. */ 647 if (c->rhs.offset != c->lhs.offset) 648 fprintf (file, " [ label=\"+\" ] ;\n"); 649 else 650 fprintf (file, " ;\n"); 651 } 652 } 653 } 654 655 /* Print the constraint graph in dot format. */ 656 657 static void 658 dump_constraint_graph (FILE *file) 659 { 660 unsigned int i=0, size; 661 constraint_t c; 662 663 /* Only print the graph if it has already been initialized: */ 664 if (!graph) 665 return; 666 667 /* Print the constraints used to produce the constraint graph. The 668 constraints will be printed as comments in the dot file: */ 669 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n"); 670 dump_constraints (file); 671 fprintf (file, "*/\n"); 672 673 /* Prints the header of the dot file: */ 674 fprintf (file, "\n\n// The constraint graph in dot format:\n"); 675 fprintf (file, "strict digraph {\n"); 676 fprintf (file, " node [\n shape = box\n ]\n"); 677 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n"); 678 fprintf (file, "\n // List of nodes in the constraint graph:\n"); 679 680 /* The next lines print the nodes in the graph. In order to get the 681 number of nodes in the graph, we must choose the minimum between the 682 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not 683 yet been initialized, then graph->size == 0, otherwise we must only 684 read nodes that have an entry in VEC (varinfo_t, varmap). */ 685 size = VEC_length (varinfo_t, varmap); 686 size = size < graph->size ? size : graph->size; 687 for (i = 0; i < size; i++) 688 { 689 const char *name = get_varinfo (graph->rep[i])->name; 690 fprintf (file, " \"%s\" ;\n", name); 691 } 692 693 /* Go over the list of constraints printing the edges in the constraint 694 graph. */ 695 fprintf (file, "\n // The constraint edges:\n"); 696 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++) 697 if (c) 698 dump_constraint_edge (file, c); 699 700 /* Prints the tail of the dot file. By now, only the closing bracket. */ 701 fprintf (file, "}\n\n\n"); 702 } 703 704 /* Print out the constraint graph to stderr. */ 705 706 void 707 debug_constraint_graph (void) 708 { 709 dump_constraint_graph (stderr); 710 } 711 712 /* SOLVER FUNCTIONS 713 714 The solver is a simple worklist solver, that works on the following 715 algorithm: 716 717 sbitmap changed_nodes = all zeroes; 718 changed_count = 0; 719 For each node that is not already collapsed: 720 changed_count++; 721 set bit in changed nodes 722 723 while (changed_count > 0) 724 { 725 compute topological ordering for constraint graph 726 727 find and collapse cycles in the constraint graph (updating 728 changed if necessary) 729 730 for each node (n) in the graph in topological order: 731 changed_count--; 732 733 Process each complex constraint associated with the node, 734 updating changed if necessary. 735 736 For each outgoing edge from n, propagate the solution from n to 737 the destination of the edge, updating changed as necessary. 738 739 } */ 740 741 /* Return true if two constraint expressions A and B are equal. */ 742 743 static bool 744 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b) 745 { 746 return a.type == b.type && a.var == b.var && a.offset == b.offset; 747 } 748 749 /* Return true if constraint expression A is less than constraint expression 750 B. This is just arbitrary, but consistent, in order to give them an 751 ordering. */ 752 753 static bool 754 constraint_expr_less (struct constraint_expr a, struct constraint_expr b) 755 { 756 if (a.type == b.type) 757 { 758 if (a.var == b.var) 759 return a.offset < b.offset; 760 else 761 return a.var < b.var; 762 } 763 else 764 return a.type < b.type; 765 } 766 767 /* Return true if constraint A is less than constraint B. This is just 768 arbitrary, but consistent, in order to give them an ordering. */ 769 770 static bool 771 constraint_less (const constraint_t a, const constraint_t b) 772 { 773 if (constraint_expr_less (a->lhs, b->lhs)) 774 return true; 775 else if (constraint_expr_less (b->lhs, a->lhs)) 776 return false; 777 else 778 return constraint_expr_less (a->rhs, b->rhs); 779 } 780 781 /* Return true if two constraints A and B are equal. */ 782 783 static bool 784 constraint_equal (struct constraint a, struct constraint b) 785 { 786 return constraint_expr_equal (a.lhs, b.lhs) 787 && constraint_expr_equal (a.rhs, b.rhs); 788 } 789 790 791 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */ 792 793 static constraint_t 794 constraint_vec_find (VEC(constraint_t,heap) *vec, 795 struct constraint lookfor) 796 { 797 unsigned int place; 798 constraint_t found; 799 800 if (vec == NULL) 801 return NULL; 802 803 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less); 804 if (place >= VEC_length (constraint_t, vec)) 805 return NULL; 806 found = VEC_index (constraint_t, vec, place); 807 if (!constraint_equal (*found, lookfor)) 808 return NULL; 809 return found; 810 } 811 812 /* Union two constraint vectors, TO and FROM. Put the result in TO. */ 813 814 static void 815 constraint_set_union (VEC(constraint_t,heap) **to, 816 VEC(constraint_t,heap) **from) 817 { 818 int i; 819 constraint_t c; 820 821 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++) 822 { 823 if (constraint_vec_find (*to, *c) == NULL) 824 { 825 unsigned int place = VEC_lower_bound (constraint_t, *to, c, 826 constraint_less); 827 VEC_safe_insert (constraint_t, heap, *to, place, c); 828 } 829 } 830 } 831 832 /* Expands the solution in SET to all sub-fields of variables included. 833 Union the expanded result into RESULT. */ 834 835 static void 836 solution_set_expand (bitmap result, bitmap set) 837 { 838 bitmap_iterator bi; 839 bitmap vars = NULL; 840 unsigned j; 841 842 /* In a first pass record all variables we need to add all 843 sub-fields off. This avoids quadratic behavior. */ 844 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi) 845 { 846 varinfo_t v = get_varinfo (j); 847 if (v->is_artificial_var 848 || v->is_full_var) 849 continue; 850 v = lookup_vi_for_tree (v->decl); 851 if (vars == NULL) 852 vars = BITMAP_ALLOC (NULL); 853 bitmap_set_bit (vars, v->id); 854 } 855 856 /* In the second pass now do the addition to the solution and 857 to speed up solving add it to the delta as well. */ 858 if (vars != NULL) 859 { 860 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi) 861 { 862 varinfo_t v = get_varinfo (j); 863 for (; v != NULL; v = v->next) 864 bitmap_set_bit (result, v->id); 865 } 866 BITMAP_FREE (vars); 867 } 868 } 869 870 /* Take a solution set SET, add OFFSET to each member of the set, and 871 overwrite SET with the result when done. */ 872 873 static void 874 solution_set_add (bitmap set, HOST_WIDE_INT offset) 875 { 876 bitmap result = BITMAP_ALLOC (&iteration_obstack); 877 unsigned int i; 878 bitmap_iterator bi; 879 880 /* If the offset is unknown we have to expand the solution to 881 all subfields. */ 882 if (offset == UNKNOWN_OFFSET) 883 { 884 solution_set_expand (set, set); 885 return; 886 } 887 888 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi) 889 { 890 varinfo_t vi = get_varinfo (i); 891 892 /* If this is a variable with just one field just set its bit 893 in the result. */ 894 if (vi->is_artificial_var 895 || vi->is_unknown_size_var 896 || vi->is_full_var) 897 bitmap_set_bit (result, i); 898 else 899 { 900 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset; 901 902 /* If the offset makes the pointer point to before the 903 variable use offset zero for the field lookup. */ 904 if (offset < 0 905 && fieldoffset > vi->offset) 906 fieldoffset = 0; 907 908 if (offset != 0) 909 vi = first_or_preceding_vi_for_offset (vi, fieldoffset); 910 911 bitmap_set_bit (result, vi->id); 912 /* If the result is not exactly at fieldoffset include the next 913 field as well. See get_constraint_for_ptr_offset for more 914 rationale. */ 915 if (vi->offset != fieldoffset 916 && vi->next != NULL) 917 bitmap_set_bit (result, vi->next->id); 918 } 919 } 920 921 bitmap_copy (set, result); 922 BITMAP_FREE (result); 923 } 924 925 /* Union solution sets TO and FROM, and add INC to each member of FROM in the 926 process. */ 927 928 static bool 929 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc) 930 { 931 if (inc == 0) 932 return bitmap_ior_into (to, from); 933 else 934 { 935 bitmap tmp; 936 bool res; 937 938 tmp = BITMAP_ALLOC (&iteration_obstack); 939 bitmap_copy (tmp, from); 940 solution_set_add (tmp, inc); 941 res = bitmap_ior_into (to, tmp); 942 BITMAP_FREE (tmp); 943 return res; 944 } 945 } 946 947 /* Insert constraint C into the list of complex constraints for graph 948 node VAR. */ 949 950 static void 951 insert_into_complex (constraint_graph_t graph, 952 unsigned int var, constraint_t c) 953 { 954 VEC (constraint_t, heap) *complex = graph->complex[var]; 955 unsigned int place = VEC_lower_bound (constraint_t, complex, c, 956 constraint_less); 957 958 /* Only insert constraints that do not already exist. */ 959 if (place >= VEC_length (constraint_t, complex) 960 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place))) 961 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c); 962 } 963 964 965 /* Condense two variable nodes into a single variable node, by moving 966 all associated info from SRC to TO. */ 967 968 static void 969 merge_node_constraints (constraint_graph_t graph, unsigned int to, 970 unsigned int from) 971 { 972 unsigned int i; 973 constraint_t c; 974 975 gcc_assert (find (from) == to); 976 977 /* Move all complex constraints from src node into to node */ 978 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++) 979 { 980 /* In complex constraints for node src, we may have either 981 a = *src, and *src = a, or an offseted constraint which are 982 always added to the rhs node's constraints. */ 983 984 if (c->rhs.type == DEREF) 985 c->rhs.var = to; 986 else if (c->lhs.type == DEREF) 987 c->lhs.var = to; 988 else 989 c->rhs.var = to; 990 } 991 constraint_set_union (&graph->complex[to], &graph->complex[from]); 992 VEC_free (constraint_t, heap, graph->complex[from]); 993 graph->complex[from] = NULL; 994 } 995 996 997 /* Remove edges involving NODE from GRAPH. */ 998 999 static void 1000 clear_edges_for_node (constraint_graph_t graph, unsigned int node) 1001 { 1002 if (graph->succs[node]) 1003 BITMAP_FREE (graph->succs[node]); 1004 } 1005 1006 /* Merge GRAPH nodes FROM and TO into node TO. */ 1007 1008 static void 1009 merge_graph_nodes (constraint_graph_t graph, unsigned int to, 1010 unsigned int from) 1011 { 1012 if (graph->indirect_cycles[from] != -1) 1013 { 1014 /* If we have indirect cycles with the from node, and we have 1015 none on the to node, the to node has indirect cycles from the 1016 from node now that they are unified. 1017 If indirect cycles exist on both, unify the nodes that they 1018 are in a cycle with, since we know they are in a cycle with 1019 each other. */ 1020 if (graph->indirect_cycles[to] == -1) 1021 graph->indirect_cycles[to] = graph->indirect_cycles[from]; 1022 } 1023 1024 /* Merge all the successor edges. */ 1025 if (graph->succs[from]) 1026 { 1027 if (!graph->succs[to]) 1028 graph->succs[to] = BITMAP_ALLOC (&pta_obstack); 1029 bitmap_ior_into (graph->succs[to], 1030 graph->succs[from]); 1031 } 1032 1033 clear_edges_for_node (graph, from); 1034 } 1035 1036 1037 /* Add an indirect graph edge to GRAPH, going from TO to FROM if 1038 it doesn't exist in the graph already. */ 1039 1040 static void 1041 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to, 1042 unsigned int from) 1043 { 1044 if (to == from) 1045 return; 1046 1047 if (!graph->implicit_preds[to]) 1048 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack); 1049 1050 if (bitmap_set_bit (graph->implicit_preds[to], from)) 1051 stats.num_implicit_edges++; 1052 } 1053 1054 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if 1055 it doesn't exist in the graph already. 1056 Return false if the edge already existed, true otherwise. */ 1057 1058 static void 1059 add_pred_graph_edge (constraint_graph_t graph, unsigned int to, 1060 unsigned int from) 1061 { 1062 if (!graph->preds[to]) 1063 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack); 1064 bitmap_set_bit (graph->preds[to], from); 1065 } 1066 1067 /* Add a graph edge to GRAPH, going from FROM to TO if 1068 it doesn't exist in the graph already. 1069 Return false if the edge already existed, true otherwise. */ 1070 1071 static bool 1072 add_graph_edge (constraint_graph_t graph, unsigned int to, 1073 unsigned int from) 1074 { 1075 if (to == from) 1076 { 1077 return false; 1078 } 1079 else 1080 { 1081 bool r = false; 1082 1083 if (!graph->succs[from]) 1084 graph->succs[from] = BITMAP_ALLOC (&pta_obstack); 1085 if (bitmap_set_bit (graph->succs[from], to)) 1086 { 1087 r = true; 1088 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE) 1089 stats.num_edges++; 1090 } 1091 return r; 1092 } 1093 } 1094 1095 1096 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */ 1097 1098 static bool 1099 valid_graph_edge (constraint_graph_t graph, unsigned int src, 1100 unsigned int dest) 1101 { 1102 return (graph->succs[dest] 1103 && bitmap_bit_p (graph->succs[dest], src)); 1104 } 1105 1106 /* Initialize the constraint graph structure to contain SIZE nodes. */ 1107 1108 static void 1109 init_graph (unsigned int size) 1110 { 1111 unsigned int j; 1112 1113 graph = XCNEW (struct constraint_graph); 1114 graph->size = size; 1115 graph->succs = XCNEWVEC (bitmap, graph->size); 1116 graph->indirect_cycles = XNEWVEC (int, graph->size); 1117 graph->rep = XNEWVEC (unsigned int, graph->size); 1118 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size); 1119 graph->pe = XCNEWVEC (unsigned int, graph->size); 1120 graph->pe_rep = XNEWVEC (int, graph->size); 1121 1122 for (j = 0; j < graph->size; j++) 1123 { 1124 graph->rep[j] = j; 1125 graph->pe_rep[j] = -1; 1126 graph->indirect_cycles[j] = -1; 1127 } 1128 } 1129 1130 /* Build the constraint graph, adding only predecessor edges right now. */ 1131 1132 static void 1133 build_pred_graph (void) 1134 { 1135 int i; 1136 constraint_t c; 1137 unsigned int j; 1138 1139 graph->implicit_preds = XCNEWVEC (bitmap, graph->size); 1140 graph->preds = XCNEWVEC (bitmap, graph->size); 1141 graph->pointer_label = XCNEWVEC (unsigned int, graph->size); 1142 graph->loc_label = XCNEWVEC (unsigned int, graph->size); 1143 graph->pointed_by = XCNEWVEC (bitmap, graph->size); 1144 graph->points_to = XCNEWVEC (bitmap, graph->size); 1145 graph->eq_rep = XNEWVEC (int, graph->size); 1146 graph->direct_nodes = sbitmap_alloc (graph->size); 1147 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack); 1148 sbitmap_zero (graph->direct_nodes); 1149 1150 for (j = 0; j < FIRST_REF_NODE; j++) 1151 { 1152 if (!get_varinfo (j)->is_special_var) 1153 SET_BIT (graph->direct_nodes, j); 1154 } 1155 1156 for (j = 0; j < graph->size; j++) 1157 graph->eq_rep[j] = -1; 1158 1159 for (j = 0; j < VEC_length (varinfo_t, varmap); j++) 1160 graph->indirect_cycles[j] = -1; 1161 1162 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++) 1163 { 1164 struct constraint_expr lhs = c->lhs; 1165 struct constraint_expr rhs = c->rhs; 1166 unsigned int lhsvar = lhs.var; 1167 unsigned int rhsvar = rhs.var; 1168 1169 if (lhs.type == DEREF) 1170 { 1171 /* *x = y. */ 1172 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) 1173 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar); 1174 } 1175 else if (rhs.type == DEREF) 1176 { 1177 /* x = *y */ 1178 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) 1179 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar); 1180 else 1181 RESET_BIT (graph->direct_nodes, lhsvar); 1182 } 1183 else if (rhs.type == ADDRESSOF) 1184 { 1185 varinfo_t v; 1186 1187 /* x = &y */ 1188 if (graph->points_to[lhsvar] == NULL) 1189 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack); 1190 bitmap_set_bit (graph->points_to[lhsvar], rhsvar); 1191 1192 if (graph->pointed_by[rhsvar] == NULL) 1193 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack); 1194 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar); 1195 1196 /* Implicitly, *x = y */ 1197 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar); 1198 1199 /* All related variables are no longer direct nodes. */ 1200 RESET_BIT (graph->direct_nodes, rhsvar); 1201 v = get_varinfo (rhsvar); 1202 if (!v->is_full_var) 1203 { 1204 v = lookup_vi_for_tree (v->decl); 1205 do 1206 { 1207 RESET_BIT (graph->direct_nodes, v->id); 1208 v = v->next; 1209 } 1210 while (v != NULL); 1211 } 1212 bitmap_set_bit (graph->address_taken, rhsvar); 1213 } 1214 else if (lhsvar > anything_id 1215 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) 1216 { 1217 /* x = y */ 1218 add_pred_graph_edge (graph, lhsvar, rhsvar); 1219 /* Implicitly, *x = *y */ 1220 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, 1221 FIRST_REF_NODE + rhsvar); 1222 } 1223 else if (lhs.offset != 0 || rhs.offset != 0) 1224 { 1225 if (rhs.offset != 0) 1226 RESET_BIT (graph->direct_nodes, lhs.var); 1227 else if (lhs.offset != 0) 1228 RESET_BIT (graph->direct_nodes, rhs.var); 1229 } 1230 } 1231 } 1232 1233 /* Build the constraint graph, adding successor edges. */ 1234 1235 static void 1236 build_succ_graph (void) 1237 { 1238 unsigned i, t; 1239 constraint_t c; 1240 1241 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++) 1242 { 1243 struct constraint_expr lhs; 1244 struct constraint_expr rhs; 1245 unsigned int lhsvar; 1246 unsigned int rhsvar; 1247 1248 if (!c) 1249 continue; 1250 1251 lhs = c->lhs; 1252 rhs = c->rhs; 1253 lhsvar = find (lhs.var); 1254 rhsvar = find (rhs.var); 1255 1256 if (lhs.type == DEREF) 1257 { 1258 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) 1259 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar); 1260 } 1261 else if (rhs.type == DEREF) 1262 { 1263 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) 1264 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar); 1265 } 1266 else if (rhs.type == ADDRESSOF) 1267 { 1268 /* x = &y */ 1269 gcc_assert (find (rhs.var) == rhs.var); 1270 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar); 1271 } 1272 else if (lhsvar > anything_id 1273 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) 1274 { 1275 add_graph_edge (graph, lhsvar, rhsvar); 1276 } 1277 } 1278 1279 /* Add edges from STOREDANYTHING to all non-direct nodes that can 1280 receive pointers. */ 1281 t = find (storedanything_id); 1282 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i) 1283 { 1284 if (!TEST_BIT (graph->direct_nodes, i) 1285 && get_varinfo (i)->may_have_pointers) 1286 add_graph_edge (graph, find (i), t); 1287 } 1288 1289 /* Everything stored to ANYTHING also potentially escapes. */ 1290 add_graph_edge (graph, find (escaped_id), t); 1291 } 1292 1293 1294 /* Changed variables on the last iteration. */ 1295 static unsigned int changed_count; 1296 static sbitmap changed; 1297 1298 /* Strongly Connected Component visitation info. */ 1299 1300 struct scc_info 1301 { 1302 sbitmap visited; 1303 sbitmap deleted; 1304 unsigned int *dfs; 1305 unsigned int *node_mapping; 1306 int current_index; 1307 VEC(unsigned,heap) *scc_stack; 1308 }; 1309 1310 1311 /* Recursive routine to find strongly connected components in GRAPH. 1312 SI is the SCC info to store the information in, and N is the id of current 1313 graph node we are processing. 1314 1315 This is Tarjan's strongly connected component finding algorithm, as 1316 modified by Nuutila to keep only non-root nodes on the stack. 1317 The algorithm can be found in "On finding the strongly connected 1318 connected components in a directed graph" by Esko Nuutila and Eljas 1319 Soisalon-Soininen, in Information Processing Letters volume 49, 1320 number 1, pages 9-14. */ 1321 1322 static void 1323 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n) 1324 { 1325 unsigned int i; 1326 bitmap_iterator bi; 1327 unsigned int my_dfs; 1328 1329 SET_BIT (si->visited, n); 1330 si->dfs[n] = si->current_index ++; 1331 my_dfs = si->dfs[n]; 1332 1333 /* Visit all the successors. */ 1334 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi) 1335 { 1336 unsigned int w; 1337 1338 if (i > LAST_REF_NODE) 1339 break; 1340 1341 w = find (i); 1342 if (TEST_BIT (si->deleted, w)) 1343 continue; 1344 1345 if (!TEST_BIT (si->visited, w)) 1346 scc_visit (graph, si, w); 1347 { 1348 unsigned int t = find (w); 1349 unsigned int nnode = find (n); 1350 gcc_assert (nnode == n); 1351 1352 if (si->dfs[t] < si->dfs[nnode]) 1353 si->dfs[n] = si->dfs[t]; 1354 } 1355 } 1356 1357 /* See if any components have been identified. */ 1358 if (si->dfs[n] == my_dfs) 1359 { 1360 if (VEC_length (unsigned, si->scc_stack) > 0 1361 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs) 1362 { 1363 bitmap scc = BITMAP_ALLOC (NULL); 1364 unsigned int lowest_node; 1365 bitmap_iterator bi; 1366 1367 bitmap_set_bit (scc, n); 1368 1369 while (VEC_length (unsigned, si->scc_stack) != 0 1370 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs) 1371 { 1372 unsigned int w = VEC_pop (unsigned, si->scc_stack); 1373 1374 bitmap_set_bit (scc, w); 1375 } 1376 1377 lowest_node = bitmap_first_set_bit (scc); 1378 gcc_assert (lowest_node < FIRST_REF_NODE); 1379 1380 /* Collapse the SCC nodes into a single node, and mark the 1381 indirect cycles. */ 1382 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi) 1383 { 1384 if (i < FIRST_REF_NODE) 1385 { 1386 if (unite (lowest_node, i)) 1387 unify_nodes (graph, lowest_node, i, false); 1388 } 1389 else 1390 { 1391 unite (lowest_node, i); 1392 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node; 1393 } 1394 } 1395 } 1396 SET_BIT (si->deleted, n); 1397 } 1398 else 1399 VEC_safe_push (unsigned, heap, si->scc_stack, n); 1400 } 1401 1402 /* Unify node FROM into node TO, updating the changed count if 1403 necessary when UPDATE_CHANGED is true. */ 1404 1405 static void 1406 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from, 1407 bool update_changed) 1408 { 1409 1410 gcc_assert (to != from && find (to) == to); 1411 if (dump_file && (dump_flags & TDF_DETAILS)) 1412 fprintf (dump_file, "Unifying %s to %s\n", 1413 get_varinfo (from)->name, 1414 get_varinfo (to)->name); 1415 1416 if (update_changed) 1417 stats.unified_vars_dynamic++; 1418 else 1419 stats.unified_vars_static++; 1420 1421 merge_graph_nodes (graph, to, from); 1422 merge_node_constraints (graph, to, from); 1423 1424 /* Mark TO as changed if FROM was changed. If TO was already marked 1425 as changed, decrease the changed count. */ 1426 1427 if (update_changed && TEST_BIT (changed, from)) 1428 { 1429 RESET_BIT (changed, from); 1430 if (!TEST_BIT (changed, to)) 1431 SET_BIT (changed, to); 1432 else 1433 { 1434 gcc_assert (changed_count > 0); 1435 changed_count--; 1436 } 1437 } 1438 if (get_varinfo (from)->solution) 1439 { 1440 /* If the solution changes because of the merging, we need to mark 1441 the variable as changed. */ 1442 if (bitmap_ior_into (get_varinfo (to)->solution, 1443 get_varinfo (from)->solution)) 1444 { 1445 if (update_changed && !TEST_BIT (changed, to)) 1446 { 1447 SET_BIT (changed, to); 1448 changed_count++; 1449 } 1450 } 1451 1452 BITMAP_FREE (get_varinfo (from)->solution); 1453 BITMAP_FREE (get_varinfo (from)->oldsolution); 1454 1455 if (stats.iterations > 0) 1456 { 1457 BITMAP_FREE (get_varinfo (to)->oldsolution); 1458 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack); 1459 } 1460 } 1461 if (valid_graph_edge (graph, to, to)) 1462 { 1463 if (graph->succs[to]) 1464 bitmap_clear_bit (graph->succs[to], to); 1465 } 1466 } 1467 1468 /* Information needed to compute the topological ordering of a graph. */ 1469 1470 struct topo_info 1471 { 1472 /* sbitmap of visited nodes. */ 1473 sbitmap visited; 1474 /* Array that stores the topological order of the graph, *in 1475 reverse*. */ 1476 VEC(unsigned,heap) *topo_order; 1477 }; 1478 1479 1480 /* Initialize and return a topological info structure. */ 1481 1482 static struct topo_info * 1483 init_topo_info (void) 1484 { 1485 size_t size = graph->size; 1486 struct topo_info *ti = XNEW (struct topo_info); 1487 ti->visited = sbitmap_alloc (size); 1488 sbitmap_zero (ti->visited); 1489 ti->topo_order = VEC_alloc (unsigned, heap, 1); 1490 return ti; 1491 } 1492 1493 1494 /* Free the topological sort info pointed to by TI. */ 1495 1496 static void 1497 free_topo_info (struct topo_info *ti) 1498 { 1499 sbitmap_free (ti->visited); 1500 VEC_free (unsigned, heap, ti->topo_order); 1501 free (ti); 1502 } 1503 1504 /* Visit the graph in topological order, and store the order in the 1505 topo_info structure. */ 1506 1507 static void 1508 topo_visit (constraint_graph_t graph, struct topo_info *ti, 1509 unsigned int n) 1510 { 1511 bitmap_iterator bi; 1512 unsigned int j; 1513 1514 SET_BIT (ti->visited, n); 1515 1516 if (graph->succs[n]) 1517 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi) 1518 { 1519 if (!TEST_BIT (ti->visited, j)) 1520 topo_visit (graph, ti, j); 1521 } 1522 1523 VEC_safe_push (unsigned, heap, ti->topo_order, n); 1524 } 1525 1526 /* Process a constraint C that represents x = *(y + off), using DELTA as the 1527 starting solution for y. */ 1528 1529 static void 1530 do_sd_constraint (constraint_graph_t graph, constraint_t c, 1531 bitmap delta) 1532 { 1533 unsigned int lhs = c->lhs.var; 1534 bool flag = false; 1535 bitmap sol = get_varinfo (lhs)->solution; 1536 unsigned int j; 1537 bitmap_iterator bi; 1538 HOST_WIDE_INT roffset = c->rhs.offset; 1539 1540 /* Our IL does not allow this. */ 1541 gcc_assert (c->lhs.offset == 0); 1542 1543 /* If the solution of Y contains anything it is good enough to transfer 1544 this to the LHS. */ 1545 if (bitmap_bit_p (delta, anything_id)) 1546 { 1547 flag |= bitmap_set_bit (sol, anything_id); 1548 goto done; 1549 } 1550 1551 /* If we do not know at with offset the rhs is dereferenced compute 1552 the reachability set of DELTA, conservatively assuming it is 1553 dereferenced at all valid offsets. */ 1554 if (roffset == UNKNOWN_OFFSET) 1555 { 1556 solution_set_expand (delta, delta); 1557 /* No further offset processing is necessary. */ 1558 roffset = 0; 1559 } 1560 1561 /* For each variable j in delta (Sol(y)), add 1562 an edge in the graph from j to x, and union Sol(j) into Sol(x). */ 1563 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) 1564 { 1565 varinfo_t v = get_varinfo (j); 1566 HOST_WIDE_INT fieldoffset = v->offset + roffset; 1567 unsigned int t; 1568 1569 if (v->is_full_var) 1570 fieldoffset = v->offset; 1571 else if (roffset != 0) 1572 v = first_vi_for_offset (v, fieldoffset); 1573 /* If the access is outside of the variable we can ignore it. */ 1574 if (!v) 1575 continue; 1576 1577 do 1578 { 1579 t = find (v->id); 1580 1581 /* Adding edges from the special vars is pointless. 1582 They don't have sets that can change. */ 1583 if (get_varinfo (t)->is_special_var) 1584 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution); 1585 /* Merging the solution from ESCAPED needlessly increases 1586 the set. Use ESCAPED as representative instead. */ 1587 else if (v->id == escaped_id) 1588 flag |= bitmap_set_bit (sol, escaped_id); 1589 else if (add_graph_edge (graph, lhs, t)) 1590 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution); 1591 1592 /* If the variable is not exactly at the requested offset 1593 we have to include the next one. */ 1594 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset 1595 || v->next == NULL) 1596 break; 1597 1598 v = v->next; 1599 fieldoffset = v->offset; 1600 } 1601 while (1); 1602 } 1603 1604 done: 1605 /* If the LHS solution changed, mark the var as changed. */ 1606 if (flag) 1607 { 1608 get_varinfo (lhs)->solution = sol; 1609 if (!TEST_BIT (changed, lhs)) 1610 { 1611 SET_BIT (changed, lhs); 1612 changed_count++; 1613 } 1614 } 1615 } 1616 1617 /* Process a constraint C that represents *(x + off) = y using DELTA 1618 as the starting solution for x. */ 1619 1620 static void 1621 do_ds_constraint (constraint_t c, bitmap delta) 1622 { 1623 unsigned int rhs = c->rhs.var; 1624 bitmap sol = get_varinfo (rhs)->solution; 1625 unsigned int j; 1626 bitmap_iterator bi; 1627 HOST_WIDE_INT loff = c->lhs.offset; 1628 1629 /* Our IL does not allow this. */ 1630 gcc_assert (c->rhs.offset == 0); 1631 1632 /* If the solution of y contains ANYTHING simply use the ANYTHING 1633 solution. This avoids needlessly increasing the points-to sets. */ 1634 if (bitmap_bit_p (sol, anything_id)) 1635 sol = get_varinfo (find (anything_id))->solution; 1636 1637 /* If the solution for x contains ANYTHING we have to merge the 1638 solution of y into all pointer variables which we do via 1639 STOREDANYTHING. */ 1640 if (bitmap_bit_p (delta, anything_id)) 1641 { 1642 unsigned t = find (storedanything_id); 1643 if (add_graph_edge (graph, t, rhs)) 1644 { 1645 if (bitmap_ior_into (get_varinfo (t)->solution, sol)) 1646 { 1647 if (!TEST_BIT (changed, t)) 1648 { 1649 SET_BIT (changed, t); 1650 changed_count++; 1651 } 1652 } 1653 } 1654 return; 1655 } 1656 1657 /* If we do not know at with offset the rhs is dereferenced compute 1658 the reachability set of DELTA, conservatively assuming it is 1659 dereferenced at all valid offsets. */ 1660 if (loff == UNKNOWN_OFFSET) 1661 { 1662 solution_set_expand (delta, delta); 1663 loff = 0; 1664 } 1665 1666 /* For each member j of delta (Sol(x)), add an edge from y to j and 1667 union Sol(y) into Sol(j) */ 1668 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) 1669 { 1670 varinfo_t v = get_varinfo (j); 1671 unsigned int t; 1672 HOST_WIDE_INT fieldoffset = v->offset + loff; 1673 1674 /* If v is a global variable then this is an escape point. */ 1675 if (v->is_global_var) 1676 { 1677 t = find (escaped_id); 1678 if (add_graph_edge (graph, t, rhs) 1679 && bitmap_ior_into (get_varinfo (t)->solution, sol) 1680 && !TEST_BIT (changed, t)) 1681 { 1682 SET_BIT (changed, t); 1683 changed_count++; 1684 } 1685 } 1686 1687 if (v->is_special_var) 1688 continue; 1689 1690 if (v->is_full_var) 1691 fieldoffset = v->offset; 1692 else if (loff != 0) 1693 v = first_vi_for_offset (v, fieldoffset); 1694 /* If the access is outside of the variable we can ignore it. */ 1695 if (!v) 1696 continue; 1697 1698 do 1699 { 1700 if (v->may_have_pointers) 1701 { 1702 t = find (v->id); 1703 if (add_graph_edge (graph, t, rhs) 1704 && bitmap_ior_into (get_varinfo (t)->solution, sol) 1705 && !TEST_BIT (changed, t)) 1706 { 1707 SET_BIT (changed, t); 1708 changed_count++; 1709 } 1710 } 1711 1712 /* If the variable is not exactly at the requested offset 1713 we have to include the next one. */ 1714 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset 1715 || v->next == NULL) 1716 break; 1717 1718 v = v->next; 1719 fieldoffset = v->offset; 1720 } 1721 while (1); 1722 } 1723 } 1724 1725 /* Handle a non-simple (simple meaning requires no iteration), 1726 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */ 1727 1728 static void 1729 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta) 1730 { 1731 if (c->lhs.type == DEREF) 1732 { 1733 if (c->rhs.type == ADDRESSOF) 1734 { 1735 gcc_unreachable(); 1736 } 1737 else 1738 { 1739 /* *x = y */ 1740 do_ds_constraint (c, delta); 1741 } 1742 } 1743 else if (c->rhs.type == DEREF) 1744 { 1745 /* x = *y */ 1746 if (!(get_varinfo (c->lhs.var)->is_special_var)) 1747 do_sd_constraint (graph, c, delta); 1748 } 1749 else 1750 { 1751 bitmap tmp; 1752 bitmap solution; 1753 bool flag = false; 1754 1755 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR); 1756 solution = get_varinfo (c->rhs.var)->solution; 1757 tmp = get_varinfo (c->lhs.var)->solution; 1758 1759 flag = set_union_with_increment (tmp, solution, c->rhs.offset); 1760 1761 if (flag) 1762 { 1763 get_varinfo (c->lhs.var)->solution = tmp; 1764 if (!TEST_BIT (changed, c->lhs.var)) 1765 { 1766 SET_BIT (changed, c->lhs.var); 1767 changed_count++; 1768 } 1769 } 1770 } 1771 } 1772 1773 /* Initialize and return a new SCC info structure. */ 1774 1775 static struct scc_info * 1776 init_scc_info (size_t size) 1777 { 1778 struct scc_info *si = XNEW (struct scc_info); 1779 size_t i; 1780 1781 si->current_index = 0; 1782 si->visited = sbitmap_alloc (size); 1783 sbitmap_zero (si->visited); 1784 si->deleted = sbitmap_alloc (size); 1785 sbitmap_zero (si->deleted); 1786 si->node_mapping = XNEWVEC (unsigned int, size); 1787 si->dfs = XCNEWVEC (unsigned int, size); 1788 1789 for (i = 0; i < size; i++) 1790 si->node_mapping[i] = i; 1791 1792 si->scc_stack = VEC_alloc (unsigned, heap, 1); 1793 return si; 1794 } 1795 1796 /* Free an SCC info structure pointed to by SI */ 1797 1798 static void 1799 free_scc_info (struct scc_info *si) 1800 { 1801 sbitmap_free (si->visited); 1802 sbitmap_free (si->deleted); 1803 free (si->node_mapping); 1804 free (si->dfs); 1805 VEC_free (unsigned, heap, si->scc_stack); 1806 free (si); 1807 } 1808 1809 1810 /* Find indirect cycles in GRAPH that occur, using strongly connected 1811 components, and note them in the indirect cycles map. 1812 1813 This technique comes from Ben Hardekopf and Calvin Lin, 1814 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of 1815 Lines of Code", submitted to PLDI 2007. */ 1816 1817 static void 1818 find_indirect_cycles (constraint_graph_t graph) 1819 { 1820 unsigned int i; 1821 unsigned int size = graph->size; 1822 struct scc_info *si = init_scc_info (size); 1823 1824 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ ) 1825 if (!TEST_BIT (si->visited, i) && find (i) == i) 1826 scc_visit (graph, si, i); 1827 1828 free_scc_info (si); 1829 } 1830 1831 /* Compute a topological ordering for GRAPH, and store the result in the 1832 topo_info structure TI. */ 1833 1834 static void 1835 compute_topo_order (constraint_graph_t graph, 1836 struct topo_info *ti) 1837 { 1838 unsigned int i; 1839 unsigned int size = graph->size; 1840 1841 for (i = 0; i != size; ++i) 1842 if (!TEST_BIT (ti->visited, i) && find (i) == i) 1843 topo_visit (graph, ti, i); 1844 } 1845 1846 /* Structure used to for hash value numbering of pointer equivalence 1847 classes. */ 1848 1849 typedef struct equiv_class_label 1850 { 1851 hashval_t hashcode; 1852 unsigned int equivalence_class; 1853 bitmap labels; 1854 } *equiv_class_label_t; 1855 typedef const struct equiv_class_label *const_equiv_class_label_t; 1856 1857 /* A hashtable for mapping a bitmap of labels->pointer equivalence 1858 classes. */ 1859 static htab_t pointer_equiv_class_table; 1860 1861 /* A hashtable for mapping a bitmap of labels->location equivalence 1862 classes. */ 1863 static htab_t location_equiv_class_table; 1864 1865 /* Hash function for a equiv_class_label_t */ 1866 1867 static hashval_t 1868 equiv_class_label_hash (const void *p) 1869 { 1870 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p; 1871 return ecl->hashcode; 1872 } 1873 1874 /* Equality function for two equiv_class_label_t's. */ 1875 1876 static int 1877 equiv_class_label_eq (const void *p1, const void *p2) 1878 { 1879 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1; 1880 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2; 1881 return (eql1->hashcode == eql2->hashcode 1882 && bitmap_equal_p (eql1->labels, eql2->labels)); 1883 } 1884 1885 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it 1886 contains. */ 1887 1888 static unsigned int 1889 equiv_class_lookup (htab_t table, bitmap labels) 1890 { 1891 void **slot; 1892 struct equiv_class_label ecl; 1893 1894 ecl.labels = labels; 1895 ecl.hashcode = bitmap_hash (labels); 1896 1897 slot = htab_find_slot_with_hash (table, &ecl, 1898 ecl.hashcode, NO_INSERT); 1899 if (!slot) 1900 return 0; 1901 else 1902 return ((equiv_class_label_t) *slot)->equivalence_class; 1903 } 1904 1905 1906 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS 1907 to TABLE. */ 1908 1909 static void 1910 equiv_class_add (htab_t table, unsigned int equivalence_class, 1911 bitmap labels) 1912 { 1913 void **slot; 1914 equiv_class_label_t ecl = XNEW (struct equiv_class_label); 1915 1916 ecl->labels = labels; 1917 ecl->equivalence_class = equivalence_class; 1918 ecl->hashcode = bitmap_hash (labels); 1919 1920 slot = htab_find_slot_with_hash (table, ecl, 1921 ecl->hashcode, INSERT); 1922 gcc_assert (!*slot); 1923 *slot = (void *) ecl; 1924 } 1925 1926 /* Perform offline variable substitution. 1927 1928 This is a worst case quadratic time way of identifying variables 1929 that must have equivalent points-to sets, including those caused by 1930 static cycles, and single entry subgraphs, in the constraint graph. 1931 1932 The technique is described in "Exploiting Pointer and Location 1933 Equivalence to Optimize Pointer Analysis. In the 14th International 1934 Static Analysis Symposium (SAS), August 2007." It is known as the 1935 "HU" algorithm, and is equivalent to value numbering the collapsed 1936 constraint graph including evaluating unions. 1937 1938 The general method of finding equivalence classes is as follows: 1939 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints. 1940 Initialize all non-REF nodes to be direct nodes. 1941 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh 1942 variable} 1943 For each constraint containing the dereference, we also do the same 1944 thing. 1945 1946 We then compute SCC's in the graph and unify nodes in the same SCC, 1947 including pts sets. 1948 1949 For each non-collapsed node x: 1950 Visit all unvisited explicit incoming edges. 1951 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y 1952 where y->x. 1953 Lookup the equivalence class for pts(x). 1954 If we found one, equivalence_class(x) = found class. 1955 Otherwise, equivalence_class(x) = new class, and new_class is 1956 added to the lookup table. 1957 1958 All direct nodes with the same equivalence class can be replaced 1959 with a single representative node. 1960 All unlabeled nodes (label == 0) are not pointers and all edges 1961 involving them can be eliminated. 1962 We perform these optimizations during rewrite_constraints 1963 1964 In addition to pointer equivalence class finding, we also perform 1965 location equivalence class finding. This is the set of variables 1966 that always appear together in points-to sets. We use this to 1967 compress the size of the points-to sets. */ 1968 1969 /* Current maximum pointer equivalence class id. */ 1970 static int pointer_equiv_class; 1971 1972 /* Current maximum location equivalence class id. */ 1973 static int location_equiv_class; 1974 1975 /* Recursive routine to find strongly connected components in GRAPH, 1976 and label it's nodes with DFS numbers. */ 1977 1978 static void 1979 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n) 1980 { 1981 unsigned int i; 1982 bitmap_iterator bi; 1983 unsigned int my_dfs; 1984 1985 gcc_assert (si->node_mapping[n] == n); 1986 SET_BIT (si->visited, n); 1987 si->dfs[n] = si->current_index ++; 1988 my_dfs = si->dfs[n]; 1989 1990 /* Visit all the successors. */ 1991 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) 1992 { 1993 unsigned int w = si->node_mapping[i]; 1994 1995 if (TEST_BIT (si->deleted, w)) 1996 continue; 1997 1998 if (!TEST_BIT (si->visited, w)) 1999 condense_visit (graph, si, w); 2000 { 2001 unsigned int t = si->node_mapping[w]; 2002 unsigned int nnode = si->node_mapping[n]; 2003 gcc_assert (nnode == n); 2004 2005 if (si->dfs[t] < si->dfs[nnode]) 2006 si->dfs[n] = si->dfs[t]; 2007 } 2008 } 2009 2010 /* Visit all the implicit predecessors. */ 2011 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi) 2012 { 2013 unsigned int w = si->node_mapping[i]; 2014 2015 if (TEST_BIT (si->deleted, w)) 2016 continue; 2017 2018 if (!TEST_BIT (si->visited, w)) 2019 condense_visit (graph, si, w); 2020 { 2021 unsigned int t = si->node_mapping[w]; 2022 unsigned int nnode = si->node_mapping[n]; 2023 gcc_assert (nnode == n); 2024 2025 if (si->dfs[t] < si->dfs[nnode]) 2026 si->dfs[n] = si->dfs[t]; 2027 } 2028 } 2029 2030 /* See if any components have been identified. */ 2031 if (si->dfs[n] == my_dfs) 2032 { 2033 while (VEC_length (unsigned, si->scc_stack) != 0 2034 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs) 2035 { 2036 unsigned int w = VEC_pop (unsigned, si->scc_stack); 2037 si->node_mapping[w] = n; 2038 2039 if (!TEST_BIT (graph->direct_nodes, w)) 2040 RESET_BIT (graph->direct_nodes, n); 2041 2042 /* Unify our nodes. */ 2043 if (graph->preds[w]) 2044 { 2045 if (!graph->preds[n]) 2046 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack); 2047 bitmap_ior_into (graph->preds[n], graph->preds[w]); 2048 } 2049 if (graph->implicit_preds[w]) 2050 { 2051 if (!graph->implicit_preds[n]) 2052 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack); 2053 bitmap_ior_into (graph->implicit_preds[n], 2054 graph->implicit_preds[w]); 2055 } 2056 if (graph->points_to[w]) 2057 { 2058 if (!graph->points_to[n]) 2059 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack); 2060 bitmap_ior_into (graph->points_to[n], 2061 graph->points_to[w]); 2062 } 2063 } 2064 SET_BIT (si->deleted, n); 2065 } 2066 else 2067 VEC_safe_push (unsigned, heap, si->scc_stack, n); 2068 } 2069 2070 /* Label pointer equivalences. */ 2071 2072 static void 2073 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n) 2074 { 2075 unsigned int i; 2076 bitmap_iterator bi; 2077 SET_BIT (si->visited, n); 2078 2079 if (!graph->points_to[n]) 2080 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack); 2081 2082 /* Label and union our incoming edges's points to sets. */ 2083 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) 2084 { 2085 unsigned int w = si->node_mapping[i]; 2086 if (!TEST_BIT (si->visited, w)) 2087 label_visit (graph, si, w); 2088 2089 /* Skip unused edges */ 2090 if (w == n || graph->pointer_label[w] == 0) 2091 continue; 2092 2093 if (graph->points_to[w]) 2094 bitmap_ior_into(graph->points_to[n], graph->points_to[w]); 2095 } 2096 /* Indirect nodes get fresh variables. */ 2097 if (!TEST_BIT (graph->direct_nodes, n)) 2098 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n); 2099 2100 if (!bitmap_empty_p (graph->points_to[n])) 2101 { 2102 unsigned int label = equiv_class_lookup (pointer_equiv_class_table, 2103 graph->points_to[n]); 2104 if (!label) 2105 { 2106 label = pointer_equiv_class++; 2107 equiv_class_add (pointer_equiv_class_table, 2108 label, graph->points_to[n]); 2109 } 2110 graph->pointer_label[n] = label; 2111 } 2112 } 2113 2114 /* Perform offline variable substitution, discovering equivalence 2115 classes, and eliminating non-pointer variables. */ 2116 2117 static struct scc_info * 2118 perform_var_substitution (constraint_graph_t graph) 2119 { 2120 unsigned int i; 2121 unsigned int size = graph->size; 2122 struct scc_info *si = init_scc_info (size); 2123 2124 bitmap_obstack_initialize (&iteration_obstack); 2125 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash, 2126 equiv_class_label_eq, free); 2127 location_equiv_class_table = htab_create (511, equiv_class_label_hash, 2128 equiv_class_label_eq, free); 2129 pointer_equiv_class = 1; 2130 location_equiv_class = 1; 2131 2132 /* Condense the nodes, which means to find SCC's, count incoming 2133 predecessors, and unite nodes in SCC's. */ 2134 for (i = 0; i < FIRST_REF_NODE; i++) 2135 if (!TEST_BIT (si->visited, si->node_mapping[i])) 2136 condense_visit (graph, si, si->node_mapping[i]); 2137 2138 sbitmap_zero (si->visited); 2139 /* Actually the label the nodes for pointer equivalences */ 2140 for (i = 0; i < FIRST_REF_NODE; i++) 2141 if (!TEST_BIT (si->visited, si->node_mapping[i])) 2142 label_visit (graph, si, si->node_mapping[i]); 2143 2144 /* Calculate location equivalence labels. */ 2145 for (i = 0; i < FIRST_REF_NODE; i++) 2146 { 2147 bitmap pointed_by; 2148 bitmap_iterator bi; 2149 unsigned int j; 2150 unsigned int label; 2151 2152 if (!graph->pointed_by[i]) 2153 continue; 2154 pointed_by = BITMAP_ALLOC (&iteration_obstack); 2155 2156 /* Translate the pointed-by mapping for pointer equivalence 2157 labels. */ 2158 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi) 2159 { 2160 bitmap_set_bit (pointed_by, 2161 graph->pointer_label[si->node_mapping[j]]); 2162 } 2163 /* The original pointed_by is now dead. */ 2164 BITMAP_FREE (graph->pointed_by[i]); 2165 2166 /* Look up the location equivalence label if one exists, or make 2167 one otherwise. */ 2168 label = equiv_class_lookup (location_equiv_class_table, 2169 pointed_by); 2170 if (label == 0) 2171 { 2172 label = location_equiv_class++; 2173 equiv_class_add (location_equiv_class_table, 2174 label, pointed_by); 2175 } 2176 else 2177 { 2178 if (dump_file && (dump_flags & TDF_DETAILS)) 2179 fprintf (dump_file, "Found location equivalence for node %s\n", 2180 get_varinfo (i)->name); 2181 BITMAP_FREE (pointed_by); 2182 } 2183 graph->loc_label[i] = label; 2184 2185 } 2186 2187 if (dump_file && (dump_flags & TDF_DETAILS)) 2188 for (i = 0; i < FIRST_REF_NODE; i++) 2189 { 2190 bool direct_node = TEST_BIT (graph->direct_nodes, i); 2191 fprintf (dump_file, 2192 "Equivalence classes for %s node id %d:%s are pointer: %d" 2193 ", location:%d\n", 2194 direct_node ? "Direct node" : "Indirect node", i, 2195 get_varinfo (i)->name, 2196 graph->pointer_label[si->node_mapping[i]], 2197 graph->loc_label[si->node_mapping[i]]); 2198 } 2199 2200 /* Quickly eliminate our non-pointer variables. */ 2201 2202 for (i = 0; i < FIRST_REF_NODE; i++) 2203 { 2204 unsigned int node = si->node_mapping[i]; 2205 2206 if (graph->pointer_label[node] == 0) 2207 { 2208 if (dump_file && (dump_flags & TDF_DETAILS)) 2209 fprintf (dump_file, 2210 "%s is a non-pointer variable, eliminating edges.\n", 2211 get_varinfo (node)->name); 2212 stats.nonpointer_vars++; 2213 clear_edges_for_node (graph, node); 2214 } 2215 } 2216 2217 return si; 2218 } 2219 2220 /* Free information that was only necessary for variable 2221 substitution. */ 2222 2223 static void 2224 free_var_substitution_info (struct scc_info *si) 2225 { 2226 free_scc_info (si); 2227 free (graph->pointer_label); 2228 free (graph->loc_label); 2229 free (graph->pointed_by); 2230 free (graph->points_to); 2231 free (graph->eq_rep); 2232 sbitmap_free (graph->direct_nodes); 2233 htab_delete (pointer_equiv_class_table); 2234 htab_delete (location_equiv_class_table); 2235 bitmap_obstack_release (&iteration_obstack); 2236 } 2237 2238 /* Return an existing node that is equivalent to NODE, which has 2239 equivalence class LABEL, if one exists. Return NODE otherwise. */ 2240 2241 static unsigned int 2242 find_equivalent_node (constraint_graph_t graph, 2243 unsigned int node, unsigned int label) 2244 { 2245 /* If the address version of this variable is unused, we can 2246 substitute it for anything else with the same label. 2247 Otherwise, we know the pointers are equivalent, but not the 2248 locations, and we can unite them later. */ 2249 2250 if (!bitmap_bit_p (graph->address_taken, node)) 2251 { 2252 gcc_assert (label < graph->size); 2253 2254 if (graph->eq_rep[label] != -1) 2255 { 2256 /* Unify the two variables since we know they are equivalent. */ 2257 if (unite (graph->eq_rep[label], node)) 2258 unify_nodes (graph, graph->eq_rep[label], node, false); 2259 return graph->eq_rep[label]; 2260 } 2261 else 2262 { 2263 graph->eq_rep[label] = node; 2264 graph->pe_rep[label] = node; 2265 } 2266 } 2267 else 2268 { 2269 gcc_assert (label < graph->size); 2270 graph->pe[node] = label; 2271 if (graph->pe_rep[label] == -1) 2272 graph->pe_rep[label] = node; 2273 } 2274 2275 return node; 2276 } 2277 2278 /* Unite pointer equivalent but not location equivalent nodes in 2279 GRAPH. This may only be performed once variable substitution is 2280 finished. */ 2281 2282 static void 2283 unite_pointer_equivalences (constraint_graph_t graph) 2284 { 2285 unsigned int i; 2286 2287 /* Go through the pointer equivalences and unite them to their 2288 representative, if they aren't already. */ 2289 for (i = 0; i < FIRST_REF_NODE; i++) 2290 { 2291 unsigned int label = graph->pe[i]; 2292 if (label) 2293 { 2294 int label_rep = graph->pe_rep[label]; 2295 2296 if (label_rep == -1) 2297 continue; 2298 2299 label_rep = find (label_rep); 2300 if (label_rep >= 0 && unite (label_rep, find (i))) 2301 unify_nodes (graph, label_rep, i, false); 2302 } 2303 } 2304 } 2305 2306 /* Move complex constraints to the GRAPH nodes they belong to. */ 2307 2308 static void 2309 move_complex_constraints (constraint_graph_t graph) 2310 { 2311 int i; 2312 constraint_t c; 2313 2314 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++) 2315 { 2316 if (c) 2317 { 2318 struct constraint_expr lhs = c->lhs; 2319 struct constraint_expr rhs = c->rhs; 2320 2321 if (lhs.type == DEREF) 2322 { 2323 insert_into_complex (graph, lhs.var, c); 2324 } 2325 else if (rhs.type == DEREF) 2326 { 2327 if (!(get_varinfo (lhs.var)->is_special_var)) 2328 insert_into_complex (graph, rhs.var, c); 2329 } 2330 else if (rhs.type != ADDRESSOF && lhs.var > anything_id 2331 && (lhs.offset != 0 || rhs.offset != 0)) 2332 { 2333 insert_into_complex (graph, rhs.var, c); 2334 } 2335 } 2336 } 2337 } 2338 2339 2340 /* Optimize and rewrite complex constraints while performing 2341 collapsing of equivalent nodes. SI is the SCC_INFO that is the 2342 result of perform_variable_substitution. */ 2343 2344 static void 2345 rewrite_constraints (constraint_graph_t graph, 2346 struct scc_info *si) 2347 { 2348 int i; 2349 unsigned int j; 2350 constraint_t c; 2351 2352 for (j = 0; j < graph->size; j++) 2353 gcc_assert (find (j) == j); 2354 2355 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++) 2356 { 2357 struct constraint_expr lhs = c->lhs; 2358 struct constraint_expr rhs = c->rhs; 2359 unsigned int lhsvar = find (lhs.var); 2360 unsigned int rhsvar = find (rhs.var); 2361 unsigned int lhsnode, rhsnode; 2362 unsigned int lhslabel, rhslabel; 2363 2364 lhsnode = si->node_mapping[lhsvar]; 2365 rhsnode = si->node_mapping[rhsvar]; 2366 lhslabel = graph->pointer_label[lhsnode]; 2367 rhslabel = graph->pointer_label[rhsnode]; 2368 2369 /* See if it is really a non-pointer variable, and if so, ignore 2370 the constraint. */ 2371 if (lhslabel == 0) 2372 { 2373 if (dump_file && (dump_flags & TDF_DETAILS)) 2374 { 2375 2376 fprintf (dump_file, "%s is a non-pointer variable," 2377 "ignoring constraint:", 2378 get_varinfo (lhs.var)->name); 2379 dump_constraint (dump_file, c); 2380 } 2381 VEC_replace (constraint_t, constraints, i, NULL); 2382 continue; 2383 } 2384 2385 if (rhslabel == 0) 2386 { 2387 if (dump_file && (dump_flags & TDF_DETAILS)) 2388 { 2389 2390 fprintf (dump_file, "%s is a non-pointer variable," 2391 "ignoring constraint:", 2392 get_varinfo (rhs.var)->name); 2393 dump_constraint (dump_file, c); 2394 } 2395 VEC_replace (constraint_t, constraints, i, NULL); 2396 continue; 2397 } 2398 2399 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel); 2400 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel); 2401 c->lhs.var = lhsvar; 2402 c->rhs.var = rhsvar; 2403 2404 } 2405 } 2406 2407 /* Eliminate indirect cycles involving NODE. Return true if NODE was 2408 part of an SCC, false otherwise. */ 2409 2410 static bool 2411 eliminate_indirect_cycles (unsigned int node) 2412 { 2413 if (graph->indirect_cycles[node] != -1 2414 && !bitmap_empty_p (get_varinfo (node)->solution)) 2415 { 2416 unsigned int i; 2417 VEC(unsigned,heap) *queue = NULL; 2418 int queuepos; 2419 unsigned int to = find (graph->indirect_cycles[node]); 2420 bitmap_iterator bi; 2421 2422 /* We can't touch the solution set and call unify_nodes 2423 at the same time, because unify_nodes is going to do 2424 bitmap unions into it. */ 2425 2426 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi) 2427 { 2428 if (find (i) == i && i != to) 2429 { 2430 if (unite (to, i)) 2431 VEC_safe_push (unsigned, heap, queue, i); 2432 } 2433 } 2434 2435 for (queuepos = 0; 2436 VEC_iterate (unsigned, queue, queuepos, i); 2437 queuepos++) 2438 { 2439 unify_nodes (graph, to, i, true); 2440 } 2441 VEC_free (unsigned, heap, queue); 2442 return true; 2443 } 2444 return false; 2445 } 2446 2447 /* Solve the constraint graph GRAPH using our worklist solver. 2448 This is based on the PW* family of solvers from the "Efficient Field 2449 Sensitive Pointer Analysis for C" paper. 2450 It works by iterating over all the graph nodes, processing the complex 2451 constraints and propagating the copy constraints, until everything stops 2452 changed. This corresponds to steps 6-8 in the solving list given above. */ 2453 2454 static void 2455 solve_graph (constraint_graph_t graph) 2456 { 2457 unsigned int size = graph->size; 2458 unsigned int i; 2459 bitmap pts; 2460 2461 changed_count = 0; 2462 changed = sbitmap_alloc (size); 2463 sbitmap_zero (changed); 2464 2465 /* Mark all initial non-collapsed nodes as changed. */ 2466 for (i = 0; i < size; i++) 2467 { 2468 varinfo_t ivi = get_varinfo (i); 2469 if (find (i) == i && !bitmap_empty_p (ivi->solution) 2470 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i])) 2471 || VEC_length (constraint_t, graph->complex[i]) > 0)) 2472 { 2473 SET_BIT (changed, i); 2474 changed_count++; 2475 } 2476 } 2477 2478 /* Allocate a bitmap to be used to store the changed bits. */ 2479 pts = BITMAP_ALLOC (&pta_obstack); 2480 2481 while (changed_count > 0) 2482 { 2483 unsigned int i; 2484 struct topo_info *ti = init_topo_info (); 2485 stats.iterations++; 2486 2487 bitmap_obstack_initialize (&iteration_obstack); 2488 2489 compute_topo_order (graph, ti); 2490 2491 while (VEC_length (unsigned, ti->topo_order) != 0) 2492 { 2493 2494 i = VEC_pop (unsigned, ti->topo_order); 2495 2496 /* If this variable is not a representative, skip it. */ 2497 if (find (i) != i) 2498 continue; 2499 2500 /* In certain indirect cycle cases, we may merge this 2501 variable to another. */ 2502 if (eliminate_indirect_cycles (i) && find (i) != i) 2503 continue; 2504 2505 /* If the node has changed, we need to process the 2506 complex constraints and outgoing edges again. */ 2507 if (TEST_BIT (changed, i)) 2508 { 2509 unsigned int j; 2510 constraint_t c; 2511 bitmap solution; 2512 VEC(constraint_t,heap) *complex = graph->complex[i]; 2513 bool solution_empty; 2514 2515 RESET_BIT (changed, i); 2516 changed_count--; 2517 2518 /* Compute the changed set of solution bits. */ 2519 bitmap_and_compl (pts, get_varinfo (i)->solution, 2520 get_varinfo (i)->oldsolution); 2521 2522 if (bitmap_empty_p (pts)) 2523 continue; 2524 2525 bitmap_ior_into (get_varinfo (i)->oldsolution, pts); 2526 2527 solution = get_varinfo (i)->solution; 2528 solution_empty = bitmap_empty_p (solution); 2529 2530 /* Process the complex constraints */ 2531 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++) 2532 { 2533 /* XXX: This is going to unsort the constraints in 2534 some cases, which will occasionally add duplicate 2535 constraints during unification. This does not 2536 affect correctness. */ 2537 c->lhs.var = find (c->lhs.var); 2538 c->rhs.var = find (c->rhs.var); 2539 2540 /* The only complex constraint that can change our 2541 solution to non-empty, given an empty solution, 2542 is a constraint where the lhs side is receiving 2543 some set from elsewhere. */ 2544 if (!solution_empty || c->lhs.type != DEREF) 2545 do_complex_constraint (graph, c, pts); 2546 } 2547 2548 solution_empty = bitmap_empty_p (solution); 2549 2550 if (!solution_empty) 2551 { 2552 bitmap_iterator bi; 2553 unsigned eff_escaped_id = find (escaped_id); 2554 2555 /* Propagate solution to all successors. */ 2556 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 2557 0, j, bi) 2558 { 2559 bitmap tmp; 2560 bool flag; 2561 2562 unsigned int to = find (j); 2563 tmp = get_varinfo (to)->solution; 2564 flag = false; 2565 2566 /* Don't try to propagate to ourselves. */ 2567 if (to == i) 2568 continue; 2569 2570 /* If we propagate from ESCAPED use ESCAPED as 2571 placeholder. */ 2572 if (i == eff_escaped_id) 2573 flag = bitmap_set_bit (tmp, escaped_id); 2574 else 2575 flag = set_union_with_increment (tmp, pts, 0); 2576 2577 if (flag) 2578 { 2579 get_varinfo (to)->solution = tmp; 2580 if (!TEST_BIT (changed, to)) 2581 { 2582 SET_BIT (changed, to); 2583 changed_count++; 2584 } 2585 } 2586 } 2587 } 2588 } 2589 } 2590 free_topo_info (ti); 2591 bitmap_obstack_release (&iteration_obstack); 2592 } 2593 2594 BITMAP_FREE (pts); 2595 sbitmap_free (changed); 2596 bitmap_obstack_release (&oldpta_obstack); 2597 } 2598 2599 /* Map from trees to variable infos. */ 2600 static struct pointer_map_t *vi_for_tree; 2601 2602 2603 /* Insert ID as the variable id for tree T in the vi_for_tree map. */ 2604 2605 static void 2606 insert_vi_for_tree (tree t, varinfo_t vi) 2607 { 2608 void **slot = pointer_map_insert (vi_for_tree, t); 2609 gcc_assert (vi); 2610 gcc_assert (*slot == NULL); 2611 *slot = vi; 2612 } 2613 2614 /* Find the variable info for tree T in VI_FOR_TREE. If T does not 2615 exist in the map, return NULL, otherwise, return the varinfo we found. */ 2616 2617 static varinfo_t 2618 lookup_vi_for_tree (tree t) 2619 { 2620 void **slot = pointer_map_contains (vi_for_tree, t); 2621 if (slot == NULL) 2622 return NULL; 2623 2624 return (varinfo_t) *slot; 2625 } 2626 2627 /* Return a printable name for DECL */ 2628 2629 static const char * 2630 alias_get_name (tree decl) 2631 { 2632 const char *res = get_name (decl); 2633 char *temp; 2634 int num_printed = 0; 2635 2636 if (res != NULL) 2637 return res; 2638 2639 res = "NULL"; 2640 if (!dump_file) 2641 return res; 2642 2643 if (TREE_CODE (decl) == SSA_NAME) 2644 { 2645 num_printed = asprintf (&temp, "%s_%u", 2646 alias_get_name (SSA_NAME_VAR (decl)), 2647 SSA_NAME_VERSION (decl)); 2648 } 2649 else if (DECL_P (decl)) 2650 { 2651 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl)); 2652 } 2653 if (num_printed > 0) 2654 { 2655 res = ggc_strdup (temp); 2656 free (temp); 2657 } 2658 return res; 2659 } 2660 2661 /* Find the variable id for tree T in the map. 2662 If T doesn't exist in the map, create an entry for it and return it. */ 2663 2664 static varinfo_t 2665 get_vi_for_tree (tree t) 2666 { 2667 void **slot = pointer_map_contains (vi_for_tree, t); 2668 if (slot == NULL) 2669 return get_varinfo (create_variable_info_for (t, alias_get_name (t))); 2670 2671 return (varinfo_t) *slot; 2672 } 2673 2674 /* Get a scalar constraint expression for a new temporary variable. */ 2675 2676 static struct constraint_expr 2677 new_scalar_tmp_constraint_exp (const char *name) 2678 { 2679 struct constraint_expr tmp; 2680 varinfo_t vi; 2681 2682 vi = new_var_info (NULL_TREE, name); 2683 vi->offset = 0; 2684 vi->size = -1; 2685 vi->fullsize = -1; 2686 vi->is_full_var = 1; 2687 2688 tmp.var = vi->id; 2689 tmp.type = SCALAR; 2690 tmp.offset = 0; 2691 2692 return tmp; 2693 } 2694 2695 /* Get a constraint expression vector from an SSA_VAR_P node. 2696 If address_p is true, the result will be taken its address of. */ 2697 2698 static void 2699 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p) 2700 { 2701 struct constraint_expr cexpr; 2702 varinfo_t vi; 2703 2704 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */ 2705 gcc_assert (SSA_VAR_P (t) || DECL_P (t)); 2706 2707 /* For parameters, get at the points-to set for the actual parm 2708 decl. */ 2709 if (TREE_CODE (t) == SSA_NAME 2710 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL 2711 && SSA_NAME_IS_DEFAULT_DEF (t)) 2712 { 2713 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p); 2714 return; 2715 } 2716 2717 vi = get_vi_for_tree (t); 2718 cexpr.var = vi->id; 2719 cexpr.type = SCALAR; 2720 cexpr.offset = 0; 2721 /* If we determine the result is "anything", and we know this is readonly, 2722 say it points to readonly memory instead. */ 2723 if (cexpr.var == anything_id && TREE_READONLY (t)) 2724 { 2725 gcc_unreachable (); 2726 cexpr.type = ADDRESSOF; 2727 cexpr.var = readonly_id; 2728 } 2729 2730 /* If we are not taking the address of the constraint expr, add all 2731 sub-fiels of the variable as well. */ 2732 if (!address_p 2733 && !vi->is_full_var) 2734 { 2735 for (; vi; vi = vi->next) 2736 { 2737 cexpr.var = vi->id; 2738 VEC_safe_push (ce_s, heap, *results, &cexpr); 2739 } 2740 return; 2741 } 2742 2743 VEC_safe_push (ce_s, heap, *results, &cexpr); 2744 } 2745 2746 /* Process constraint T, performing various simplifications and then 2747 adding it to our list of overall constraints. */ 2748 2749 static void 2750 process_constraint (constraint_t t) 2751 { 2752 struct constraint_expr rhs = t->rhs; 2753 struct constraint_expr lhs = t->lhs; 2754 2755 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap)); 2756 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap)); 2757 2758 /* If we didn't get any useful constraint from the lhs we get 2759 &ANYTHING as fallback from get_constraint_for. Deal with 2760 it here by turning it into *ANYTHING. */ 2761 if (lhs.type == ADDRESSOF 2762 && lhs.var == anything_id) 2763 lhs.type = DEREF; 2764 2765 /* ADDRESSOF on the lhs is invalid. */ 2766 gcc_assert (lhs.type != ADDRESSOF); 2767 2768 /* This can happen in our IR with things like n->a = *p */ 2769 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id) 2770 { 2771 /* Split into tmp = *rhs, *lhs = tmp */ 2772 struct constraint_expr tmplhs; 2773 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp"); 2774 process_constraint (new_constraint (tmplhs, rhs)); 2775 process_constraint (new_constraint (lhs, tmplhs)); 2776 } 2777 else if (rhs.type == ADDRESSOF && lhs.type == DEREF) 2778 { 2779 /* Split into tmp = &rhs, *lhs = tmp */ 2780 struct constraint_expr tmplhs; 2781 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp"); 2782 process_constraint (new_constraint (tmplhs, rhs)); 2783 process_constraint (new_constraint (lhs, tmplhs)); 2784 } 2785 else 2786 { 2787 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0); 2788 VEC_safe_push (constraint_t, heap, constraints, t); 2789 } 2790 } 2791 2792 /* Return the position, in bits, of FIELD_DECL from the beginning of its 2793 structure. */ 2794 2795 static HOST_WIDE_INT 2796 bitpos_of_field (const tree fdecl) 2797 { 2798 2799 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0) 2800 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0)) 2801 return -1; 2802 2803 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT 2804 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl))); 2805 } 2806 2807 2808 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the 2809 resulting constraint expressions in *RESULTS. */ 2810 2811 static void 2812 get_constraint_for_ptr_offset (tree ptr, tree offset, 2813 VEC (ce_s, heap) **results) 2814 { 2815 struct constraint_expr c; 2816 unsigned int j, n; 2817 HOST_WIDE_INT rhsunitoffset, rhsoffset; 2818 2819 /* If we do not do field-sensitive PTA adding offsets to pointers 2820 does not change the points-to solution. */ 2821 if (!use_field_sensitive) 2822 { 2823 get_constraint_for_rhs (ptr, results); 2824 return; 2825 } 2826 2827 /* If the offset is not a non-negative integer constant that fits 2828 in a HOST_WIDE_INT, we have to fall back to a conservative 2829 solution which includes all sub-fields of all pointed-to 2830 variables of ptr. */ 2831 if (offset == NULL_TREE 2832 || !host_integerp (offset, 0)) 2833 rhsoffset = UNKNOWN_OFFSET; 2834 else 2835 { 2836 /* Make sure the bit-offset also fits. */ 2837 rhsunitoffset = TREE_INT_CST_LOW (offset); 2838 rhsoffset = rhsunitoffset * BITS_PER_UNIT; 2839 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT) 2840 rhsoffset = UNKNOWN_OFFSET; 2841 } 2842 2843 get_constraint_for_rhs (ptr, results); 2844 if (rhsoffset == 0) 2845 return; 2846 2847 /* As we are eventually appending to the solution do not use 2848 VEC_iterate here. */ 2849 n = VEC_length (ce_s, *results); 2850 for (j = 0; j < n; j++) 2851 { 2852 varinfo_t curr; 2853 c = *VEC_index (ce_s, *results, j); 2854 curr = get_varinfo (c.var); 2855 2856 if (c.type == ADDRESSOF 2857 /* If this varinfo represents a full variable just use it. */ 2858 && curr->is_full_var) 2859 c.offset = 0; 2860 else if (c.type == ADDRESSOF 2861 /* If we do not know the offset add all subfields. */ 2862 && rhsoffset == UNKNOWN_OFFSET) 2863 { 2864 varinfo_t temp = lookup_vi_for_tree (curr->decl); 2865 do 2866 { 2867 struct constraint_expr c2; 2868 c2.var = temp->id; 2869 c2.type = ADDRESSOF; 2870 c2.offset = 0; 2871 if (c2.var != c.var) 2872 VEC_safe_push (ce_s, heap, *results, &c2); 2873 temp = temp->next; 2874 } 2875 while (temp); 2876 } 2877 else if (c.type == ADDRESSOF) 2878 { 2879 varinfo_t temp; 2880 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset; 2881 2882 /* Search the sub-field which overlaps with the 2883 pointed-to offset. If the result is outside of the variable 2884 we have to provide a conservative result, as the variable is 2885 still reachable from the resulting pointer (even though it 2886 technically cannot point to anything). The last and first 2887 sub-fields are such conservative results. 2888 ??? If we always had a sub-field for &object + 1 then 2889 we could represent this in a more precise way. */ 2890 if (rhsoffset < 0 2891 && curr->offset < offset) 2892 offset = 0; 2893 temp = first_or_preceding_vi_for_offset (curr, offset); 2894 2895 /* If the found variable is not exactly at the pointed to 2896 result, we have to include the next variable in the 2897 solution as well. Otherwise two increments by offset / 2 2898 do not result in the same or a conservative superset 2899 solution. */ 2900 if (temp->offset != offset 2901 && temp->next != NULL) 2902 { 2903 struct constraint_expr c2; 2904 c2.var = temp->next->id; 2905 c2.type = ADDRESSOF; 2906 c2.offset = 0; 2907 VEC_safe_push (ce_s, heap, *results, &c2); 2908 } 2909 c.var = temp->id; 2910 c.offset = 0; 2911 } 2912 else 2913 c.offset = rhsoffset; 2914 2915 VEC_replace (ce_s, *results, j, &c); 2916 } 2917 } 2918 2919 2920 /* Given a COMPONENT_REF T, return the constraint_expr vector for it. 2921 If address_p is true the result will be taken its address of. 2922 If lhs_p is true then the constraint expression is assumed to be used 2923 as the lhs. */ 2924 2925 static void 2926 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results, 2927 bool address_p, bool lhs_p) 2928 { 2929 tree orig_t = t; 2930 HOST_WIDE_INT bitsize = -1; 2931 HOST_WIDE_INT bitmaxsize = -1; 2932 HOST_WIDE_INT bitpos; 2933 tree forzero; 2934 struct constraint_expr *result; 2935 2936 /* Some people like to do cute things like take the address of 2937 &0->a.b */ 2938 forzero = t; 2939 while (handled_component_p (forzero) 2940 || INDIRECT_REF_P (forzero)) 2941 forzero = TREE_OPERAND (forzero, 0); 2942 2943 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero)) 2944 { 2945 struct constraint_expr temp; 2946 2947 temp.offset = 0; 2948 temp.var = integer_id; 2949 temp.type = SCALAR; 2950 VEC_safe_push (ce_s, heap, *results, &temp); 2951 return; 2952 } 2953 2954 /* Handle type-punning through unions. If we are extracting a pointer 2955 from a union via a possibly type-punning access that pointer 2956 points to anything, similar to a conversion of an integer to 2957 a pointer. */ 2958 if (!lhs_p) 2959 { 2960 tree u; 2961 for (u = t; 2962 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF; 2963 u = TREE_OPERAND (u, 0)) 2964 if (TREE_CODE (u) == COMPONENT_REF 2965 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE) 2966 { 2967 struct constraint_expr temp; 2968 2969 temp.offset = 0; 2970 temp.var = anything_id; 2971 temp.type = ADDRESSOF; 2972 VEC_safe_push (ce_s, heap, *results, &temp); 2973 return; 2974 } 2975 } 2976 2977 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize); 2978 2979 /* Pretend to take the address of the base, we'll take care of 2980 adding the required subset of sub-fields below. */ 2981 get_constraint_for_1 (t, results, true, lhs_p); 2982 gcc_assert (VEC_length (ce_s, *results) == 1); 2983 result = VEC_last (ce_s, *results); 2984 2985 if (result->type == SCALAR 2986 && get_varinfo (result->var)->is_full_var) 2987 /* For single-field vars do not bother about the offset. */ 2988 result->offset = 0; 2989 else if (result->type == SCALAR) 2990 { 2991 /* In languages like C, you can access one past the end of an 2992 array. You aren't allowed to dereference it, so we can 2993 ignore this constraint. When we handle pointer subtraction, 2994 we may have to do something cute here. */ 2995 2996 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize 2997 && bitmaxsize != 0) 2998 { 2999 /* It's also not true that the constraint will actually start at the 3000 right offset, it may start in some padding. We only care about 3001 setting the constraint to the first actual field it touches, so 3002 walk to find it. */ 3003 struct constraint_expr cexpr = *result; 3004 varinfo_t curr; 3005 VEC_pop (ce_s, *results); 3006 cexpr.offset = 0; 3007 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next) 3008 { 3009 if (ranges_overlap_p (curr->offset, curr->size, 3010 bitpos, bitmaxsize)) 3011 { 3012 cexpr.var = curr->id; 3013 VEC_safe_push (ce_s, heap, *results, &cexpr); 3014 if (address_p) 3015 break; 3016 } 3017 } 3018 /* If we are going to take the address of this field then 3019 to be able to compute reachability correctly add at least 3020 the last field of the variable. */ 3021 if (address_p 3022 && VEC_length (ce_s, *results) == 0) 3023 { 3024 curr = get_varinfo (cexpr.var); 3025 while (curr->next != NULL) 3026 curr = curr->next; 3027 cexpr.var = curr->id; 3028 VEC_safe_push (ce_s, heap, *results, &cexpr); 3029 } 3030 else 3031 /* Assert that we found *some* field there. The user couldn't be 3032 accessing *only* padding. */ 3033 /* Still the user could access one past the end of an array 3034 embedded in a struct resulting in accessing *only* padding. */ 3035 gcc_assert (VEC_length (ce_s, *results) >= 1 3036 || ref_contains_array_ref (orig_t)); 3037 } 3038 else if (bitmaxsize == 0) 3039 { 3040 if (dump_file && (dump_flags & TDF_DETAILS)) 3041 fprintf (dump_file, "Access to zero-sized part of variable," 3042 "ignoring\n"); 3043 } 3044 else 3045 if (dump_file && (dump_flags & TDF_DETAILS)) 3046 fprintf (dump_file, "Access to past the end of variable, ignoring\n"); 3047 } 3048 else if (result->type == DEREF) 3049 { 3050 /* If we do not know exactly where the access goes say so. Note 3051 that only for non-structure accesses we know that we access 3052 at most one subfiled of any variable. */ 3053 if (bitpos == -1 3054 || bitsize != bitmaxsize 3055 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))) 3056 result->offset = UNKNOWN_OFFSET; 3057 else 3058 result->offset = bitpos; 3059 } 3060 else if (result->type == ADDRESSOF) 3061 { 3062 /* We can end up here for component references on a 3063 VIEW_CONVERT_EXPR <>(&foobar). */ 3064 result->type = SCALAR; 3065 result->var = anything_id; 3066 result->offset = 0; 3067 } 3068 else 3069 gcc_unreachable (); 3070 } 3071 3072 3073 /* Dereference the constraint expression CONS, and return the result. 3074 DEREF (ADDRESSOF) = SCALAR 3075 DEREF (SCALAR) = DEREF 3076 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp)) 3077 This is needed so that we can handle dereferencing DEREF constraints. */ 3078 3079 static void 3080 do_deref (VEC (ce_s, heap) **constraints) 3081 { 3082 struct constraint_expr *c; 3083 unsigned int i = 0; 3084 3085 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++) 3086 { 3087 if (c->type == SCALAR) 3088 c->type = DEREF; 3089 else if (c->type == ADDRESSOF) 3090 c->type = SCALAR; 3091 else if (c->type == DEREF) 3092 { 3093 struct constraint_expr tmplhs; 3094 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp"); 3095 process_constraint (new_constraint (tmplhs, *c)); 3096 c->var = tmplhs.var; 3097 } 3098 else 3099 gcc_unreachable (); 3100 } 3101 } 3102 3103 /* Given a tree T, return the constraint expression for taking the 3104 address of it. */ 3105 3106 static void 3107 get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results) 3108 { 3109 struct constraint_expr *c; 3110 unsigned int i; 3111 3112 get_constraint_for_1 (t, results, true, true); 3113 3114 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++) 3115 { 3116 if (c->type == DEREF) 3117 c->type = SCALAR; 3118 else 3119 c->type = ADDRESSOF; 3120 } 3121 } 3122 3123 /* Given a tree T, return the constraint expression for it. */ 3124 3125 static void 3126 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p, 3127 bool lhs_p) 3128 { 3129 struct constraint_expr temp; 3130 3131 /* x = integer is all glommed to a single variable, which doesn't 3132 point to anything by itself. That is, of course, unless it is an 3133 integer constant being treated as a pointer, in which case, we 3134 will return that this is really the addressof anything. This 3135 happens below, since it will fall into the default case. The only 3136 case we know something about an integer treated like a pointer is 3137 when it is the NULL pointer, and then we just say it points to 3138 NULL. 3139 3140 Do not do that if -fno-delete-null-pointer-checks though, because 3141 in that case *NULL does not fail, so it _should_ alias *anything. 3142 It is not worth adding a new option or renaming the existing one, 3143 since this case is relatively obscure. */ 3144 if ((TREE_CODE (t) == INTEGER_CST 3145 && integer_zerop (t)) 3146 /* The only valid CONSTRUCTORs in gimple with pointer typed 3147 elements are zero-initializer. But in IPA mode we also 3148 process global initializers, so verify at least. */ 3149 || (TREE_CODE (t) == CONSTRUCTOR 3150 && CONSTRUCTOR_NELTS (t) == 0)) 3151 { 3152 if (flag_delete_null_pointer_checks) 3153 temp.var = nothing_id; 3154 else 3155 temp.var = nonlocal_id; 3156 temp.type = ADDRESSOF; 3157 temp.offset = 0; 3158 VEC_safe_push (ce_s, heap, *results, &temp); 3159 return; 3160 } 3161 3162 /* String constants are read-only. */ 3163 if (TREE_CODE (t) == STRING_CST) 3164 { 3165 temp.var = readonly_id; 3166 temp.type = SCALAR; 3167 temp.offset = 0; 3168 VEC_safe_push (ce_s, heap, *results, &temp); 3169 return; 3170 } 3171 3172 switch (TREE_CODE_CLASS (TREE_CODE (t))) 3173 { 3174 case tcc_expression: 3175 { 3176 switch (TREE_CODE (t)) 3177 { 3178 case ADDR_EXPR: 3179 get_constraint_for_address_of (TREE_OPERAND (t, 0), results); 3180 return; 3181 default:; 3182 } 3183 break; 3184 } 3185 case tcc_reference: 3186 { 3187 switch (TREE_CODE (t)) 3188 { 3189 case INDIRECT_REF: 3190 { 3191 struct constraint_expr cs; 3192 varinfo_t vi, curr; 3193 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p, 3194 lhs_p); 3195 do_deref (results); 3196 3197 /* If we are not taking the address then make sure to process 3198 all subvariables we might access. */ 3199 if (address_p) 3200 return; 3201 3202 cs = *VEC_last (ce_s, *results); 3203 if (cs.type == DEREF 3204 && type_can_have_subvars (TREE_TYPE (t))) 3205 { 3206 /* For dereferences this means we have to defer it 3207 to solving time. */ 3208 VEC_last (ce_s, *results)->offset = UNKNOWN_OFFSET; 3209 return; 3210 } 3211 if (cs.type != SCALAR) 3212 return; 3213 3214 vi = get_varinfo (cs.var); 3215 curr = vi->next; 3216 if (!vi->is_full_var 3217 && curr) 3218 { 3219 unsigned HOST_WIDE_INT size; 3220 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1)) 3221 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t))); 3222 else 3223 size = -1; 3224 for (; curr; curr = curr->next) 3225 { 3226 if (curr->offset - vi->offset < size) 3227 { 3228 cs.var = curr->id; 3229 VEC_safe_push (ce_s, heap, *results, &cs); 3230 } 3231 else 3232 break; 3233 } 3234 } 3235 return; 3236 } 3237 case ARRAY_REF: 3238 case ARRAY_RANGE_REF: 3239 case COMPONENT_REF: 3240 get_constraint_for_component_ref (t, results, address_p, lhs_p); 3241 return; 3242 case VIEW_CONVERT_EXPR: 3243 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p, 3244 lhs_p); 3245 return; 3246 /* We are missing handling for TARGET_MEM_REF here. */ 3247 default:; 3248 } 3249 break; 3250 } 3251 case tcc_exceptional: 3252 { 3253 switch (TREE_CODE (t)) 3254 { 3255 case SSA_NAME: 3256 { 3257 get_constraint_for_ssa_var (t, results, address_p); 3258 return; 3259 } 3260 default:; 3261 } 3262 break; 3263 } 3264 case tcc_declaration: 3265 { 3266 get_constraint_for_ssa_var (t, results, address_p); 3267 return; 3268 } 3269 case tcc_constant: 3270 { 3271 /* We cannot refer to automatic variables through constants. */ 3272 temp.type = ADDRESSOF; 3273 temp.var = nonlocal_id; 3274 temp.offset = 0; 3275 VEC_safe_push (ce_s, heap, *results, &temp); 3276 return; 3277 } 3278 default:; 3279 } 3280 3281 /* The default fallback is a constraint from anything. */ 3282 temp.type = ADDRESSOF; 3283 temp.var = anything_id; 3284 temp.offset = 0; 3285 VEC_safe_push (ce_s, heap, *results, &temp); 3286 } 3287 3288 /* Given a gimple tree T, return the constraint expression vector for it. */ 3289 3290 static void 3291 get_constraint_for (tree t, VEC (ce_s, heap) **results) 3292 { 3293 gcc_assert (VEC_length (ce_s, *results) == 0); 3294 3295 get_constraint_for_1 (t, results, false, true); 3296 } 3297 3298 /* Given a gimple tree T, return the constraint expression vector for it 3299 to be used as the rhs of a constraint. */ 3300 3301 static void 3302 get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results) 3303 { 3304 gcc_assert (VEC_length (ce_s, *results) == 0); 3305 3306 get_constraint_for_1 (t, results, false, false); 3307 } 3308 3309 3310 /* Efficiently generates constraints from all entries in *RHSC to all 3311 entries in *LHSC. */ 3312 3313 static void 3314 process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc) 3315 { 3316 struct constraint_expr *lhsp, *rhsp; 3317 unsigned i, j; 3318 3319 if (VEC_length (ce_s, lhsc) <= 1 3320 || VEC_length (ce_s, rhsc) <= 1) 3321 { 3322 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i) 3323 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j) 3324 process_constraint (new_constraint (*lhsp, *rhsp)); 3325 } 3326 else 3327 { 3328 struct constraint_expr tmp; 3329 tmp = new_scalar_tmp_constraint_exp ("allalltmp"); 3330 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i) 3331 process_constraint (new_constraint (tmp, *rhsp)); 3332 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i) 3333 process_constraint (new_constraint (*lhsp, tmp)); 3334 } 3335 } 3336 3337 /* Handle aggregate copies by expanding into copies of the respective 3338 fields of the structures. */ 3339 3340 static void 3341 do_structure_copy (tree lhsop, tree rhsop) 3342 { 3343 struct constraint_expr *lhsp, *rhsp; 3344 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL; 3345 unsigned j; 3346 3347 get_constraint_for (lhsop, &lhsc); 3348 get_constraint_for_rhs (rhsop, &rhsc); 3349 lhsp = VEC_index (ce_s, lhsc, 0); 3350 rhsp = VEC_index (ce_s, rhsc, 0); 3351 if (lhsp->type == DEREF 3352 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id) 3353 || rhsp->type == DEREF) 3354 process_all_all_constraints (lhsc, rhsc); 3355 else if (lhsp->type == SCALAR 3356 && (rhsp->type == SCALAR 3357 || rhsp->type == ADDRESSOF)) 3358 { 3359 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset; 3360 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset; 3361 unsigned k = 0; 3362 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize); 3363 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize); 3364 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);) 3365 { 3366 varinfo_t lhsv, rhsv; 3367 rhsp = VEC_index (ce_s, rhsc, k); 3368 lhsv = get_varinfo (lhsp->var); 3369 rhsv = get_varinfo (rhsp->var); 3370 if (lhsv->may_have_pointers 3371 && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size, 3372 rhsv->offset + lhsoffset, rhsv->size)) 3373 process_constraint (new_constraint (*lhsp, *rhsp)); 3374 if (lhsv->offset + rhsoffset + lhsv->size 3375 > rhsv->offset + lhsoffset + rhsv->size) 3376 { 3377 ++k; 3378 if (k >= VEC_length (ce_s, rhsc)) 3379 break; 3380 } 3381 else 3382 ++j; 3383 } 3384 } 3385 else 3386 gcc_unreachable (); 3387 3388 VEC_free (ce_s, heap, lhsc); 3389 VEC_free (ce_s, heap, rhsc); 3390 } 3391 3392 /* Create constraints ID = { rhsc }. */ 3393 3394 static void 3395 make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc) 3396 { 3397 struct constraint_expr *c; 3398 struct constraint_expr includes; 3399 unsigned int j; 3400 3401 includes.var = id; 3402 includes.offset = 0; 3403 includes.type = SCALAR; 3404 3405 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++) 3406 process_constraint (new_constraint (includes, *c)); 3407 } 3408 3409 /* Create a constraint ID = OP. */ 3410 3411 static void 3412 make_constraint_to (unsigned id, tree op) 3413 { 3414 VEC(ce_s, heap) *rhsc = NULL; 3415 get_constraint_for_rhs (op, &rhsc); 3416 make_constraints_to (id, rhsc); 3417 VEC_free (ce_s, heap, rhsc); 3418 } 3419 3420 /* Create a constraint ID = &FROM. */ 3421 3422 static void 3423 make_constraint_from (varinfo_t vi, int from) 3424 { 3425 struct constraint_expr lhs, rhs; 3426 3427 lhs.var = vi->id; 3428 lhs.offset = 0; 3429 lhs.type = SCALAR; 3430 3431 rhs.var = from; 3432 rhs.offset = 0; 3433 rhs.type = ADDRESSOF; 3434 process_constraint (new_constraint (lhs, rhs)); 3435 } 3436 3437 /* Create a constraint ID = FROM. */ 3438 3439 static void 3440 make_copy_constraint (varinfo_t vi, int from) 3441 { 3442 struct constraint_expr lhs, rhs; 3443 3444 lhs.var = vi->id; 3445 lhs.offset = 0; 3446 lhs.type = SCALAR; 3447 3448 rhs.var = from; 3449 rhs.offset = 0; 3450 rhs.type = SCALAR; 3451 process_constraint (new_constraint (lhs, rhs)); 3452 } 3453 3454 /* Make constraints necessary to make OP escape. */ 3455 3456 static void 3457 make_escape_constraint (tree op) 3458 { 3459 make_constraint_to (escaped_id, op); 3460 } 3461 3462 /* Create a new artificial heap variable with NAME and make a 3463 constraint from it to LHS. Return the created variable. */ 3464 3465 static varinfo_t 3466 make_constraint_from_heapvar (varinfo_t lhs, const char *name) 3467 { 3468 varinfo_t vi; 3469 tree heapvar = heapvar_lookup (lhs->decl, lhs->offset); 3470 3471 if (heapvar == NULL_TREE) 3472 { 3473 var_ann_t ann; 3474 heapvar = create_tmp_var_raw (ptr_type_node, name); 3475 DECL_EXTERNAL (heapvar) = 1; 3476 3477 heapvar_insert (lhs->decl, lhs->offset, heapvar); 3478 3479 ann = get_var_ann (heapvar); 3480 ann->is_heapvar = 1; 3481 } 3482 3483 /* For global vars we need to add a heapvar to the list of referenced 3484 vars of a different function than it was created for originally. */ 3485 if (gimple_referenced_vars (cfun)) 3486 add_referenced_var (heapvar); 3487 3488 vi = new_var_info (heapvar, name); 3489 vi->is_artificial_var = true; 3490 vi->is_heap_var = true; 3491 vi->is_unknown_size_var = true; 3492 vi->offset = 0; 3493 vi->fullsize = ~0; 3494 vi->size = ~0; 3495 vi->is_full_var = true; 3496 insert_vi_for_tree (heapvar, vi); 3497 3498 make_constraint_from (lhs, vi->id); 3499 3500 return vi; 3501 } 3502 3503 /* Create a new artificial heap variable with NAME and make a 3504 constraint from it to LHS. Set flags according to a tag used 3505 for tracking restrict pointers. */ 3506 3507 static void 3508 make_constraint_from_restrict (varinfo_t lhs, const char *name) 3509 { 3510 varinfo_t vi; 3511 vi = make_constraint_from_heapvar (lhs, name); 3512 vi->is_restrict_var = 1; 3513 vi->is_global_var = 0; 3514 vi->is_special_var = 1; 3515 vi->may_have_pointers = 0; 3516 } 3517 3518 /* For non-IPA mode, generate constraints necessary for a call on the 3519 RHS. */ 3520 3521 static void 3522 handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results) 3523 { 3524 struct constraint_expr rhsc; 3525 unsigned i; 3526 3527 for (i = 0; i < gimple_call_num_args (stmt); ++i) 3528 { 3529 tree arg = gimple_call_arg (stmt, i); 3530 3531 /* Find those pointers being passed, and make sure they end up 3532 pointing to anything. */ 3533 make_escape_constraint (arg); 3534 } 3535 3536 /* The static chain escapes as well. */ 3537 if (gimple_call_chain (stmt)) 3538 make_escape_constraint (gimple_call_chain (stmt)); 3539 3540 /* And if we applied NRV the address of the return slot escapes as well. */ 3541 if (gimple_call_return_slot_opt_p (stmt) 3542 && gimple_call_lhs (stmt) != NULL_TREE 3543 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) 3544 { 3545 VEC(ce_s, heap) *tmpc = NULL; 3546 struct constraint_expr lhsc, *c; 3547 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc); 3548 lhsc.var = escaped_id; 3549 lhsc.offset = 0; 3550 lhsc.type = SCALAR; 3551 for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i) 3552 process_constraint (new_constraint (lhsc, *c)); 3553 VEC_free(ce_s, heap, tmpc); 3554 } 3555 3556 /* Regular functions return nonlocal memory. */ 3557 rhsc.var = nonlocal_id; 3558 rhsc.offset = 0; 3559 rhsc.type = SCALAR; 3560 VEC_safe_push (ce_s, heap, *results, &rhsc); 3561 } 3562 3563 /* For non-IPA mode, generate constraints necessary for a call 3564 that returns a pointer and assigns it to LHS. This simply makes 3565 the LHS point to global and escaped variables. */ 3566 3567 static void 3568 handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl) 3569 { 3570 VEC(ce_s, heap) *lhsc = NULL; 3571 3572 get_constraint_for (lhs, &lhsc); 3573 3574 if (flags & ECF_MALLOC) 3575 { 3576 varinfo_t vi; 3577 vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP"); 3578 /* We delay marking allocated storage global until we know if 3579 it escapes. */ 3580 DECL_EXTERNAL (vi->decl) = 0; 3581 vi->is_global_var = 0; 3582 /* If this is not a real malloc call assume the memory was 3583 initialized and thus may point to global memory. All 3584 builtin functions with the malloc attribute behave in a sane way. */ 3585 if (!fndecl 3586 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL) 3587 make_constraint_from (vi, nonlocal_id); 3588 } 3589 else if (VEC_length (ce_s, rhsc) > 0) 3590 { 3591 /* If the store is to a global decl make sure to 3592 add proper escape constraints. */ 3593 lhs = get_base_address (lhs); 3594 if (lhs 3595 && DECL_P (lhs) 3596 && is_global_var (lhs)) 3597 { 3598 struct constraint_expr tmpc; 3599 tmpc.var = escaped_id; 3600 tmpc.offset = 0; 3601 tmpc.type = SCALAR; 3602 VEC_safe_push (ce_s, heap, lhsc, &tmpc); 3603 } 3604 process_all_all_constraints (lhsc, rhsc); 3605 } 3606 VEC_free (ce_s, heap, lhsc); 3607 } 3608 3609 /* For non-IPA mode, generate constraints necessary for a call of a 3610 const function that returns a pointer in the statement STMT. */ 3611 3612 static void 3613 handle_const_call (gimple stmt, VEC(ce_s, heap) **results) 3614 { 3615 struct constraint_expr rhsc; 3616 unsigned int k; 3617 3618 /* Treat nested const functions the same as pure functions as far 3619 as the static chain is concerned. */ 3620 if (gimple_call_chain (stmt)) 3621 { 3622 make_constraint_to (callused_id, gimple_call_chain (stmt)); 3623 rhsc.var = callused_id; 3624 rhsc.offset = 0; 3625 rhsc.type = SCALAR; 3626 VEC_safe_push (ce_s, heap, *results, &rhsc); 3627 } 3628 3629 /* May return arguments. */ 3630 for (k = 0; k < gimple_call_num_args (stmt); ++k) 3631 { 3632 tree arg = gimple_call_arg (stmt, k); 3633 VEC(ce_s, heap) *argc = NULL; 3634 unsigned i; 3635 struct constraint_expr *argp; 3636 get_constraint_for_rhs (arg, &argc); 3637 for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i) 3638 VEC_safe_push (ce_s, heap, *results, argp); 3639 VEC_free(ce_s, heap, argc); 3640 } 3641 3642 /* May return addresses of globals. */ 3643 rhsc.var = nonlocal_id; 3644 rhsc.offset = 0; 3645 rhsc.type = ADDRESSOF; 3646 VEC_safe_push (ce_s, heap, *results, &rhsc); 3647 } 3648 3649 /* For non-IPA mode, generate constraints necessary for a call to a 3650 pure function in statement STMT. */ 3651 3652 static void 3653 handle_pure_call (gimple stmt, VEC(ce_s, heap) **results) 3654 { 3655 struct constraint_expr rhsc; 3656 unsigned i; 3657 bool need_callused = false; 3658 3659 /* Memory reached from pointer arguments is call-used. */ 3660 for (i = 0; i < gimple_call_num_args (stmt); ++i) 3661 { 3662 tree arg = gimple_call_arg (stmt, i); 3663 make_constraint_to (callused_id, arg); 3664 need_callused = true; 3665 } 3666 3667 /* The static chain is used as well. */ 3668 if (gimple_call_chain (stmt)) 3669 { 3670 make_constraint_to (callused_id, gimple_call_chain (stmt)); 3671 need_callused = true; 3672 } 3673 3674 /* Pure functions may return callused and nonlocal memory. */ 3675 if (need_callused) 3676 { 3677 rhsc.var = callused_id; 3678 rhsc.offset = 0; 3679 rhsc.type = SCALAR; 3680 VEC_safe_push (ce_s, heap, *results, &rhsc); 3681 } 3682 rhsc.var = nonlocal_id; 3683 rhsc.offset = 0; 3684 rhsc.type = SCALAR; 3685 VEC_safe_push (ce_s, heap, *results, &rhsc); 3686 } 3687 3688 /* Walk statement T setting up aliasing constraints according to the 3689 references found in T. This function is the main part of the 3690 constraint builder. AI points to auxiliary alias information used 3691 when building alias sets and computing alias grouping heuristics. */ 3692 3693 static void 3694 find_func_aliases (gimple origt) 3695 { 3696 gimple t = origt; 3697 VEC(ce_s, heap) *lhsc = NULL; 3698 VEC(ce_s, heap) *rhsc = NULL; 3699 struct constraint_expr *c; 3700 3701 /* Now build constraints expressions. */ 3702 if (gimple_code (t) == GIMPLE_PHI) 3703 { 3704 size_t i; 3705 unsigned int j; 3706 3707 /* For a phi node, assign all the arguments to 3708 the result. */ 3709 get_constraint_for (gimple_phi_result (t), &lhsc); 3710 for (i = 0; i < gimple_phi_num_args (t); i++) 3711 { 3712 tree strippedrhs = PHI_ARG_DEF (t, i); 3713 3714 STRIP_NOPS (strippedrhs); 3715 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc); 3716 3717 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++) 3718 { 3719 struct constraint_expr *c2; 3720 while (VEC_length (ce_s, rhsc) > 0) 3721 { 3722 c2 = VEC_last (ce_s, rhsc); 3723 process_constraint (new_constraint (*c, *c2)); 3724 VEC_pop (ce_s, rhsc); 3725 } 3726 } 3727 } 3728 } 3729 /* In IPA mode, we need to generate constraints to pass call 3730 arguments through their calls. There are two cases, 3731 either a GIMPLE_CALL returning a value, or just a plain 3732 GIMPLE_CALL when we are not. 3733 3734 In non-ipa mode, we need to generate constraints for each 3735 pointer passed by address. */ 3736 else if (is_gimple_call (t)) 3737 { 3738 tree fndecl = gimple_call_fndecl (t); 3739 if (fndecl != NULL_TREE 3740 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 3741 /* ??? All builtins that are handled here need to be handled 3742 in the alias-oracle query functions explicitly! */ 3743 switch (DECL_FUNCTION_CODE (fndecl)) 3744 { 3745 /* All the following functions return a pointer to the same object 3746 as their first argument points to. The functions do not add 3747 to the ESCAPED solution. The functions make the first argument 3748 pointed to memory point to what the second argument pointed to 3749 memory points to. */ 3750 case BUILT_IN_STRCPY: 3751 case BUILT_IN_STRNCPY: 3752 case BUILT_IN_BCOPY: 3753 case BUILT_IN_MEMCPY: 3754 case BUILT_IN_MEMMOVE: 3755 case BUILT_IN_MEMPCPY: 3756 case BUILT_IN_STPCPY: 3757 case BUILT_IN_STPNCPY: 3758 case BUILT_IN_STRCAT: 3759 case BUILT_IN_STRNCAT: 3760 { 3761 tree res = gimple_call_lhs (t); 3762 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl) 3763 == BUILT_IN_BCOPY ? 1 : 0)); 3764 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl) 3765 == BUILT_IN_BCOPY ? 0 : 1)); 3766 if (res != NULL_TREE) 3767 { 3768 get_constraint_for (res, &lhsc); 3769 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY 3770 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY 3771 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY) 3772 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc); 3773 else 3774 get_constraint_for (dest, &rhsc); 3775 process_all_all_constraints (lhsc, rhsc); 3776 VEC_free (ce_s, heap, lhsc); 3777 VEC_free (ce_s, heap, rhsc); 3778 } 3779 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc); 3780 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc); 3781 do_deref (&lhsc); 3782 do_deref (&rhsc); 3783 process_all_all_constraints (lhsc, rhsc); 3784 VEC_free (ce_s, heap, lhsc); 3785 VEC_free (ce_s, heap, rhsc); 3786 return; 3787 } 3788 case BUILT_IN_MEMSET: 3789 { 3790 tree res = gimple_call_lhs (t); 3791 tree dest = gimple_call_arg (t, 0); 3792 unsigned i; 3793 ce_s *lhsp; 3794 struct constraint_expr ac; 3795 if (res != NULL_TREE) 3796 { 3797 get_constraint_for (res, &lhsc); 3798 get_constraint_for (dest, &rhsc); 3799 process_all_all_constraints (lhsc, rhsc); 3800 VEC_free (ce_s, heap, lhsc); 3801 VEC_free (ce_s, heap, rhsc); 3802 } 3803 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc); 3804 do_deref (&lhsc); 3805 if (flag_delete_null_pointer_checks 3806 && integer_zerop (gimple_call_arg (t, 1))) 3807 { 3808 ac.type = ADDRESSOF; 3809 ac.var = nothing_id; 3810 } 3811 else 3812 { 3813 ac.type = SCALAR; 3814 ac.var = integer_id; 3815 } 3816 ac.offset = 0; 3817 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i) 3818 process_constraint (new_constraint (*lhsp, ac)); 3819 VEC_free (ce_s, heap, lhsc); 3820 return; 3821 } 3822 /* All the following functions do not return pointers, do not 3823 modify the points-to sets of memory reachable from their 3824 arguments and do not add to the ESCAPED solution. */ 3825 case BUILT_IN_SINCOS: 3826 case BUILT_IN_SINCOSF: 3827 case BUILT_IN_SINCOSL: 3828 case BUILT_IN_FREXP: 3829 case BUILT_IN_FREXPF: 3830 case BUILT_IN_FREXPL: 3831 case BUILT_IN_GAMMA_R: 3832 case BUILT_IN_GAMMAF_R: 3833 case BUILT_IN_GAMMAL_R: 3834 case BUILT_IN_LGAMMA_R: 3835 case BUILT_IN_LGAMMAF_R: 3836 case BUILT_IN_LGAMMAL_R: 3837 case BUILT_IN_MODF: 3838 case BUILT_IN_MODFF: 3839 case BUILT_IN_MODFL: 3840 case BUILT_IN_REMQUO: 3841 case BUILT_IN_REMQUOF: 3842 case BUILT_IN_REMQUOL: 3843 case BUILT_IN_FREE: 3844 return; 3845 /* printf-style functions may have hooks to set pointers to 3846 point to somewhere into the generated string. Leave them 3847 for a later excercise... */ 3848 default: 3849 /* Fallthru to general call handling. */; 3850 } 3851 if (!in_ipa_mode 3852 || (fndecl 3853 && !lookup_vi_for_tree (fndecl))) 3854 { 3855 VEC(ce_s, heap) *rhsc = NULL; 3856 int flags = gimple_call_flags (t); 3857 3858 /* Const functions can return their arguments and addresses 3859 of global memory but not of escaped memory. */ 3860 if (flags & (ECF_CONST|ECF_NOVOPS)) 3861 { 3862 if (gimple_call_lhs (t)) 3863 handle_const_call (t, &rhsc); 3864 } 3865 /* Pure functions can return addresses in and of memory 3866 reachable from their arguments, but they are not an escape 3867 point for reachable memory of their arguments. */ 3868 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE)) 3869 handle_pure_call (t, &rhsc); 3870 else 3871 handle_rhs_call (t, &rhsc); 3872 if (gimple_call_lhs (t)) 3873 handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl); 3874 VEC_free (ce_s, heap, rhsc); 3875 } 3876 else 3877 { 3878 tree lhsop; 3879 varinfo_t fi; 3880 int i = 1; 3881 size_t j; 3882 tree decl; 3883 3884 lhsop = gimple_call_lhs (t); 3885 decl = gimple_call_fndecl (t); 3886 3887 /* If we can directly resolve the function being called, do so. 3888 Otherwise, it must be some sort of indirect expression that 3889 we should still be able to handle. */ 3890 if (decl) 3891 fi = get_vi_for_tree (decl); 3892 else 3893 { 3894 decl = gimple_call_fn (t); 3895 fi = get_vi_for_tree (decl); 3896 } 3897 3898 /* Assign all the passed arguments to the appropriate incoming 3899 parameters of the function. */ 3900 for (j = 0; j < gimple_call_num_args (t); j++) 3901 { 3902 struct constraint_expr lhs ; 3903 struct constraint_expr *rhsp; 3904 tree arg = gimple_call_arg (t, j); 3905 3906 get_constraint_for_rhs (arg, &rhsc); 3907 if (TREE_CODE (decl) != FUNCTION_DECL) 3908 { 3909 lhs.type = DEREF; 3910 lhs.var = fi->id; 3911 lhs.offset = i; 3912 } 3913 else 3914 { 3915 lhs.type = SCALAR; 3916 lhs.var = first_vi_for_offset (fi, i)->id; 3917 lhs.offset = 0; 3918 } 3919 while (VEC_length (ce_s, rhsc) != 0) 3920 { 3921 rhsp = VEC_last (ce_s, rhsc); 3922 process_constraint (new_constraint (lhs, *rhsp)); 3923 VEC_pop (ce_s, rhsc); 3924 } 3925 i++; 3926 } 3927 3928 /* If we are returning a value, assign it to the result. */ 3929 if (lhsop) 3930 { 3931 struct constraint_expr rhs; 3932 struct constraint_expr *lhsp; 3933 unsigned int j = 0; 3934 3935 get_constraint_for (lhsop, &lhsc); 3936 if (TREE_CODE (decl) != FUNCTION_DECL) 3937 { 3938 rhs.type = DEREF; 3939 rhs.var = fi->id; 3940 rhs.offset = i; 3941 } 3942 else 3943 { 3944 rhs.type = SCALAR; 3945 rhs.var = first_vi_for_offset (fi, i)->id; 3946 rhs.offset = 0; 3947 } 3948 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) 3949 process_constraint (new_constraint (*lhsp, rhs)); 3950 } 3951 } 3952 } 3953 /* Otherwise, just a regular assignment statement. Only care about 3954 operations with pointer result, others are dealt with as escape 3955 points if they have pointer operands. */ 3956 else if (is_gimple_assign (t)) 3957 { 3958 /* Otherwise, just a regular assignment statement. */ 3959 tree lhsop = gimple_assign_lhs (t); 3960 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL; 3961 3962 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop))) 3963 do_structure_copy (lhsop, rhsop); 3964 else 3965 { 3966 enum tree_code code = gimple_assign_rhs_code (t); 3967 3968 get_constraint_for (lhsop, &lhsc); 3969 3970 if (code == POINTER_PLUS_EXPR) 3971 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t), 3972 gimple_assign_rhs2 (t), &rhsc); 3973 else if (code == BIT_AND_EXPR 3974 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST) 3975 { 3976 /* Aligning a pointer via a BIT_AND_EXPR is offsetting 3977 the pointer. Handle it by offsetting it by UNKNOWN. */ 3978 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t), 3979 NULL_TREE, &rhsc); 3980 } 3981 else if ((CONVERT_EXPR_CODE_P (code) 3982 && !(POINTER_TYPE_P (gimple_expr_type (t)) 3983 && !POINTER_TYPE_P (TREE_TYPE (rhsop)))) 3984 || gimple_assign_single_p (t)) 3985 get_constraint_for_rhs (rhsop, &rhsc); 3986 else if (truth_value_p (code)) 3987 /* Truth value results are not pointer (parts). Or at least 3988 very very unreasonable obfuscation of a part. */ 3989 ; 3990 else 3991 { 3992 /* All other operations are merges. */ 3993 VEC (ce_s, heap) *tmp = NULL; 3994 struct constraint_expr *rhsp; 3995 unsigned i, j; 3996 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc); 3997 for (i = 2; i < gimple_num_ops (t); ++i) 3998 { 3999 get_constraint_for_rhs (gimple_op (t, i), &tmp); 4000 for (j = 0; VEC_iterate (ce_s, tmp, j, rhsp); ++j) 4001 VEC_safe_push (ce_s, heap, rhsc, rhsp); 4002 VEC_truncate (ce_s, tmp, 0); 4003 } 4004 VEC_free (ce_s, heap, tmp); 4005 } 4006 process_all_all_constraints (lhsc, rhsc); 4007 } 4008 /* If there is a store to a global variable the rhs escapes. */ 4009 if ((lhsop = get_base_address (lhsop)) != NULL_TREE 4010 && DECL_P (lhsop) 4011 && is_global_var (lhsop)) 4012 make_escape_constraint (rhsop); 4013 } 4014 /* Handle escapes through return. */ 4015 else if (gimple_code (t) == GIMPLE_RETURN 4016 && gimple_return_retval (t) != NULL_TREE) 4017 { 4018 make_escape_constraint (gimple_return_retval (t)); 4019 } 4020 /* Handle asms conservatively by adding escape constraints to everything. */ 4021 else if (gimple_code (t) == GIMPLE_ASM) 4022 { 4023 unsigned i, noutputs; 4024 const char **oconstraints; 4025 const char *constraint; 4026 bool allows_mem, allows_reg, is_inout; 4027 4028 noutputs = gimple_asm_noutputs (t); 4029 oconstraints = XALLOCAVEC (const char *, noutputs); 4030 4031 for (i = 0; i < noutputs; ++i) 4032 { 4033 tree link = gimple_asm_output_op (t, i); 4034 tree op = TREE_VALUE (link); 4035 4036 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 4037 oconstraints[i] = constraint; 4038 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, 4039 &allows_reg, &is_inout); 4040 4041 /* A memory constraint makes the address of the operand escape. */ 4042 if (!allows_reg && allows_mem) 4043 make_escape_constraint (build_fold_addr_expr (op)); 4044 4045 /* The asm may read global memory, so outputs may point to 4046 any global memory. */ 4047 if (op) 4048 { 4049 VEC(ce_s, heap) *lhsc = NULL; 4050 struct constraint_expr rhsc, *lhsp; 4051 unsigned j; 4052 get_constraint_for (op, &lhsc); 4053 rhsc.var = nonlocal_id; 4054 rhsc.offset = 0; 4055 rhsc.type = SCALAR; 4056 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) 4057 process_constraint (new_constraint (*lhsp, rhsc)); 4058 VEC_free (ce_s, heap, lhsc); 4059 } 4060 } 4061 for (i = 0; i < gimple_asm_ninputs (t); ++i) 4062 { 4063 tree link = gimple_asm_input_op (t, i); 4064 tree op = TREE_VALUE (link); 4065 4066 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 4067 4068 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, 4069 &allows_mem, &allows_reg); 4070 4071 /* A memory constraint makes the address of the operand escape. */ 4072 if (!allows_reg && allows_mem) 4073 make_escape_constraint (build_fold_addr_expr (op)); 4074 /* Strictly we'd only need the constraint to ESCAPED if 4075 the asm clobbers memory, otherwise using CALLUSED 4076 would be enough. */ 4077 else if (op) 4078 make_escape_constraint (op); 4079 } 4080 } 4081 4082 VEC_free (ce_s, heap, rhsc); 4083 VEC_free (ce_s, heap, lhsc); 4084 } 4085 4086 4087 /* Find the first varinfo in the same variable as START that overlaps with 4088 OFFSET. Return NULL if we can't find one. */ 4089 4090 static varinfo_t 4091 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset) 4092 { 4093 /* If the offset is outside of the variable, bail out. */ 4094 if (offset >= start->fullsize) 4095 return NULL; 4096 4097 /* If we cannot reach offset from start, lookup the first field 4098 and start from there. */ 4099 if (start->offset > offset) 4100 start = lookup_vi_for_tree (start->decl); 4101 4102 while (start) 4103 { 4104 /* We may not find a variable in the field list with the actual 4105 offset when when we have glommed a structure to a variable. 4106 In that case, however, offset should still be within the size 4107 of the variable. */ 4108 if (offset >= start->offset 4109 && (offset - start->offset) < start->size) 4110 return start; 4111 4112 start= start->next; 4113 } 4114 4115 return NULL; 4116 } 4117 4118 /* Find the first varinfo in the same variable as START that overlaps with 4119 OFFSET. If there is no such varinfo the varinfo directly preceding 4120 OFFSET is returned. */ 4121 4122 static varinfo_t 4123 first_or_preceding_vi_for_offset (varinfo_t start, 4124 unsigned HOST_WIDE_INT offset) 4125 { 4126 /* If we cannot reach offset from start, lookup the first field 4127 and start from there. */ 4128 if (start->offset > offset) 4129 start = lookup_vi_for_tree (start->decl); 4130 4131 /* We may not find a variable in the field list with the actual 4132 offset when when we have glommed a structure to a variable. 4133 In that case, however, offset should still be within the size 4134 of the variable. 4135 If we got beyond the offset we look for return the field 4136 directly preceding offset which may be the last field. */ 4137 while (start->next 4138 && offset >= start->offset 4139 && !((offset - start->offset) < start->size)) 4140 start = start->next; 4141 4142 return start; 4143 } 4144 4145 4146 /* Insert the varinfo FIELD into the field list for BASE, at the front 4147 of the list. */ 4148 4149 static void 4150 insert_into_field_list (varinfo_t base, varinfo_t field) 4151 { 4152 varinfo_t prev = base; 4153 varinfo_t curr = base->next; 4154 4155 field->next = curr; 4156 prev->next = field; 4157 } 4158 4159 /* Insert the varinfo FIELD into the field list for BASE, ordered by 4160 offset. */ 4161 4162 static void 4163 insert_into_field_list_sorted (varinfo_t base, varinfo_t field) 4164 { 4165 varinfo_t prev = base; 4166 varinfo_t curr = base->next; 4167 4168 if (curr == NULL) 4169 { 4170 prev->next = field; 4171 field->next = NULL; 4172 } 4173 else 4174 { 4175 while (curr) 4176 { 4177 if (field->offset <= curr->offset) 4178 break; 4179 prev = curr; 4180 curr = curr->next; 4181 } 4182 field->next = prev->next; 4183 prev->next = field; 4184 } 4185 } 4186 4187 /* This structure is used during pushing fields onto the fieldstack 4188 to track the offset of the field, since bitpos_of_field gives it 4189 relative to its immediate containing type, and we want it relative 4190 to the ultimate containing object. */ 4191 4192 struct fieldoff 4193 { 4194 /* Offset from the base of the base containing object to this field. */ 4195 HOST_WIDE_INT offset; 4196 4197 /* Size, in bits, of the field. */ 4198 unsigned HOST_WIDE_INT size; 4199 4200 unsigned has_unknown_size : 1; 4201 4202 unsigned must_have_pointers : 1; 4203 4204 unsigned may_have_pointers : 1; 4205 4206 unsigned only_restrict_pointers : 1; 4207 }; 4208 typedef struct fieldoff fieldoff_s; 4209 4210 DEF_VEC_O(fieldoff_s); 4211 DEF_VEC_ALLOC_O(fieldoff_s,heap); 4212 4213 /* qsort comparison function for two fieldoff's PA and PB */ 4214 4215 static int 4216 fieldoff_compare (const void *pa, const void *pb) 4217 { 4218 const fieldoff_s *foa = (const fieldoff_s *)pa; 4219 const fieldoff_s *fob = (const fieldoff_s *)pb; 4220 unsigned HOST_WIDE_INT foasize, fobsize; 4221 4222 if (foa->offset < fob->offset) 4223 return -1; 4224 else if (foa->offset > fob->offset) 4225 return 1; 4226 4227 foasize = foa->size; 4228 fobsize = fob->size; 4229 if (foasize < fobsize) 4230 return -1; 4231 else if (foasize > fobsize) 4232 return 1; 4233 return 0; 4234 } 4235 4236 /* Sort a fieldstack according to the field offset and sizes. */ 4237 static void 4238 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack) 4239 { 4240 qsort (VEC_address (fieldoff_s, fieldstack), 4241 VEC_length (fieldoff_s, fieldstack), 4242 sizeof (fieldoff_s), 4243 fieldoff_compare); 4244 } 4245 4246 /* Return true if T is a type that can have subvars. */ 4247 4248 static inline bool 4249 type_can_have_subvars (const_tree t) 4250 { 4251 /* Aggregates without overlapping fields can have subvars. */ 4252 return TREE_CODE (t) == RECORD_TYPE; 4253 } 4254 4255 /* Return true if V is a tree that we can have subvars for. 4256 Normally, this is any aggregate type. Also complex 4257 types which are not gimple registers can have subvars. */ 4258 4259 static inline bool 4260 var_can_have_subvars (const_tree v) 4261 { 4262 /* Volatile variables should never have subvars. */ 4263 if (TREE_THIS_VOLATILE (v)) 4264 return false; 4265 4266 /* Non decls or memory tags can never have subvars. */ 4267 if (!DECL_P (v)) 4268 return false; 4269 4270 return type_can_have_subvars (TREE_TYPE (v)); 4271 } 4272 4273 /* Return true if T is a type that does contain pointers. */ 4274 4275 static bool 4276 type_must_have_pointers (tree type) 4277 { 4278 if (POINTER_TYPE_P (type)) 4279 return true; 4280 4281 if (TREE_CODE (type) == ARRAY_TYPE) 4282 return type_must_have_pointers (TREE_TYPE (type)); 4283 4284 /* A function or method can have pointers as arguments, so track 4285 those separately. */ 4286 if (TREE_CODE (type) == FUNCTION_TYPE 4287 || TREE_CODE (type) == METHOD_TYPE) 4288 return true; 4289 4290 return false; 4291 } 4292 4293 static bool 4294 field_must_have_pointers (tree t) 4295 { 4296 return type_must_have_pointers (TREE_TYPE (t)); 4297 } 4298 4299 4300 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all 4301 the fields of TYPE onto fieldstack, recording their offsets along 4302 the way. 4303 4304 OFFSET is used to keep track of the offset in this entire 4305 structure, rather than just the immediately containing structure. 4306 Returns the number of fields pushed. */ 4307 4308 static int 4309 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack, 4310 HOST_WIDE_INT offset) 4311 { 4312 tree field; 4313 int count = 0; 4314 4315 if (TREE_CODE (type) != RECORD_TYPE) 4316 return 0; 4317 4318 /* If the vector of fields is growing too big, bail out early. 4319 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make 4320 sure this fails. */ 4321 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE) 4322 return 0; 4323 4324 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) 4325 if (TREE_CODE (field) == FIELD_DECL) 4326 { 4327 bool push = false; 4328 int pushed = 0; 4329 HOST_WIDE_INT foff = bitpos_of_field (field); 4330 4331 if (!var_can_have_subvars (field) 4332 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE 4333 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE) 4334 push = true; 4335 else if (!(pushed = push_fields_onto_fieldstack 4336 (TREE_TYPE (field), fieldstack, offset + foff)) 4337 && (DECL_SIZE (field) 4338 && !integer_zerop (DECL_SIZE (field)))) 4339 /* Empty structures may have actual size, like in C++. So 4340 see if we didn't push any subfields and the size is 4341 nonzero, push the field onto the stack. */ 4342 push = true; 4343 4344 if (push) 4345 { 4346 fieldoff_s *pair = NULL; 4347 bool has_unknown_size = false; 4348 bool must_have_pointers_p; 4349 4350 if (!VEC_empty (fieldoff_s, *fieldstack)) 4351 pair = VEC_last (fieldoff_s, *fieldstack); 4352 4353 if (!pair 4354 && offset + foff != 0) 4355 { 4356 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL); 4357 pair->offset = 0; 4358 pair->size = offset + foff; 4359 pair->has_unknown_size = false; 4360 pair->may_have_pointers = false; 4361 pair->only_restrict_pointers = false; 4362 } 4363 4364 if (!DECL_SIZE (field) 4365 || !host_integerp (DECL_SIZE (field), 1)) 4366 has_unknown_size = true; 4367 4368 /* If adjacent fields do not contain pointers merge them. */ 4369 must_have_pointers_p = field_must_have_pointers (field); 4370 if (pair 4371 && !has_unknown_size 4372 && !must_have_pointers_p 4373 && !pair->must_have_pointers 4374 && !pair->has_unknown_size 4375 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff) 4376 { 4377 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field)); 4378 } 4379 else 4380 { 4381 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL); 4382 pair->offset = offset + foff; 4383 pair->has_unknown_size = has_unknown_size; 4384 if (!has_unknown_size) 4385 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field)); 4386 else 4387 pair->size = -1; 4388 pair->must_have_pointers = must_have_pointers_p; 4389 pair->may_have_pointers = true; 4390 pair->only_restrict_pointers 4391 = (!has_unknown_size 4392 && POINTER_TYPE_P (TREE_TYPE (field)) 4393 && TYPE_RESTRICT (TREE_TYPE (field))); 4394 count++; 4395 } 4396 } 4397 else 4398 count += pushed; 4399 } 4400 4401 return count; 4402 } 4403 4404 /* Count the number of arguments DECL has, and set IS_VARARGS to true 4405 if it is a varargs function. */ 4406 4407 static unsigned int 4408 count_num_arguments (tree decl, bool *is_varargs) 4409 { 4410 unsigned int num = 0; 4411 tree t; 4412 4413 /* Capture named arguments for K&R functions. They do not 4414 have a prototype and thus no TYPE_ARG_TYPES. */ 4415 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t)) 4416 ++num; 4417 4418 /* Check if the function has variadic arguments. */ 4419 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t)) 4420 if (TREE_VALUE (t) == void_type_node) 4421 break; 4422 if (!t) 4423 *is_varargs = true; 4424 4425 return num; 4426 } 4427 4428 /* Creation function node for DECL, using NAME, and return the index 4429 of the variable we've created for the function. */ 4430 4431 static unsigned int 4432 create_function_info_for (tree decl, const char *name) 4433 { 4434 varinfo_t vi; 4435 tree arg; 4436 unsigned int i; 4437 bool is_varargs = false; 4438 4439 /* Create the variable info. */ 4440 4441 vi = new_var_info (decl, name); 4442 vi->offset = 0; 4443 vi->size = 1; 4444 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1; 4445 insert_vi_for_tree (vi->decl, vi); 4446 4447 stats.total_vars++; 4448 4449 /* If it's varargs, we don't know how many arguments it has, so we 4450 can't do much. */ 4451 if (is_varargs) 4452 { 4453 vi->fullsize = ~0; 4454 vi->size = ~0; 4455 vi->is_unknown_size_var = true; 4456 return vi->id; 4457 } 4458 4459 arg = DECL_ARGUMENTS (decl); 4460 4461 /* Set up variables for each argument. */ 4462 for (i = 1; i < vi->fullsize; i++) 4463 { 4464 varinfo_t argvi; 4465 const char *newname; 4466 char *tempname; 4467 tree argdecl = decl; 4468 4469 if (arg) 4470 argdecl = arg; 4471 4472 asprintf (&tempname, "%s.arg%d", name, i-1); 4473 newname = ggc_strdup (tempname); 4474 free (tempname); 4475 4476 argvi = new_var_info (argdecl, newname); 4477 argvi->offset = i; 4478 argvi->size = 1; 4479 argvi->is_full_var = true; 4480 argvi->fullsize = vi->fullsize; 4481 insert_into_field_list_sorted (vi, argvi); 4482 stats.total_vars ++; 4483 if (arg) 4484 { 4485 insert_vi_for_tree (arg, argvi); 4486 arg = TREE_CHAIN (arg); 4487 } 4488 } 4489 4490 /* Create a variable for the return var. */ 4491 if (DECL_RESULT (decl) != NULL 4492 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl)))) 4493 { 4494 varinfo_t resultvi; 4495 const char *newname; 4496 char *tempname; 4497 tree resultdecl = decl; 4498 4499 vi->fullsize ++; 4500 4501 if (DECL_RESULT (decl)) 4502 resultdecl = DECL_RESULT (decl); 4503 4504 asprintf (&tempname, "%s.result", name); 4505 newname = ggc_strdup (tempname); 4506 free (tempname); 4507 4508 resultvi = new_var_info (resultdecl, newname); 4509 resultvi->offset = i; 4510 resultvi->size = 1; 4511 resultvi->fullsize = vi->fullsize; 4512 resultvi->is_full_var = true; 4513 insert_into_field_list_sorted (vi, resultvi); 4514 stats.total_vars ++; 4515 if (DECL_RESULT (decl)) 4516 insert_vi_for_tree (DECL_RESULT (decl), resultvi); 4517 } 4518 4519 return vi->id; 4520 } 4521 4522 4523 /* Return true if FIELDSTACK contains fields that overlap. 4524 FIELDSTACK is assumed to be sorted by offset. */ 4525 4526 static bool 4527 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack) 4528 { 4529 fieldoff_s *fo = NULL; 4530 unsigned int i; 4531 HOST_WIDE_INT lastoffset = -1; 4532 4533 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++) 4534 { 4535 if (fo->offset == lastoffset) 4536 return true; 4537 lastoffset = fo->offset; 4538 } 4539 return false; 4540 } 4541 4542 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP. 4543 This will also create any varinfo structures necessary for fields 4544 of DECL. */ 4545 4546 static unsigned int 4547 create_variable_info_for (tree decl, const char *name) 4548 { 4549 varinfo_t vi; 4550 tree decl_type = TREE_TYPE (decl); 4551 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type); 4552 VEC (fieldoff_s,heap) *fieldstack = NULL; 4553 4554 if (var_can_have_subvars (decl) && use_field_sensitive) 4555 push_fields_onto_fieldstack (decl_type, &fieldstack, 0); 4556 4557 /* If the variable doesn't have subvars, we may end up needing to 4558 sort the field list and create fake variables for all the 4559 fields. */ 4560 vi = new_var_info (decl, name); 4561 vi->offset = 0; 4562 vi->may_have_pointers = true; 4563 if (!declsize 4564 || !host_integerp (declsize, 1)) 4565 { 4566 vi->is_unknown_size_var = true; 4567 vi->fullsize = ~0; 4568 vi->size = ~0; 4569 } 4570 else 4571 { 4572 vi->fullsize = TREE_INT_CST_LOW (declsize); 4573 vi->size = vi->fullsize; 4574 } 4575 4576 insert_vi_for_tree (vi->decl, vi); 4577 if (vi->is_global_var 4578 && (!flag_whole_program || !in_ipa_mode) 4579 && vi->may_have_pointers) 4580 { 4581 if (POINTER_TYPE_P (TREE_TYPE (decl)) 4582 && TYPE_RESTRICT (TREE_TYPE (decl))) 4583 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT"); 4584 make_copy_constraint (vi, nonlocal_id); 4585 } 4586 4587 stats.total_vars++; 4588 if (use_field_sensitive 4589 && !vi->is_unknown_size_var 4590 && var_can_have_subvars (decl) 4591 && VEC_length (fieldoff_s, fieldstack) > 1 4592 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE) 4593 { 4594 fieldoff_s *fo = NULL; 4595 bool notokay = false; 4596 unsigned int i; 4597 4598 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++) 4599 { 4600 if (fo->has_unknown_size 4601 || fo->offset < 0) 4602 { 4603 notokay = true; 4604 break; 4605 } 4606 } 4607 4608 /* We can't sort them if we have a field with a variable sized type, 4609 which will make notokay = true. In that case, we are going to return 4610 without creating varinfos for the fields anyway, so sorting them is a 4611 waste to boot. */ 4612 if (!notokay) 4613 { 4614 sort_fieldstack (fieldstack); 4615 /* Due to some C++ FE issues, like PR 22488, we might end up 4616 what appear to be overlapping fields even though they, 4617 in reality, do not overlap. Until the C++ FE is fixed, 4618 we will simply disable field-sensitivity for these cases. */ 4619 notokay = check_for_overlaps (fieldstack); 4620 } 4621 4622 4623 if (VEC_length (fieldoff_s, fieldstack) != 0) 4624 fo = VEC_index (fieldoff_s, fieldstack, 0); 4625 4626 if (fo == NULL || notokay) 4627 { 4628 vi->is_unknown_size_var = 1; 4629 vi->fullsize = ~0; 4630 vi->size = ~0; 4631 vi->is_full_var = true; 4632 VEC_free (fieldoff_s, heap, fieldstack); 4633 return vi->id; 4634 } 4635 4636 vi->size = fo->size; 4637 vi->offset = fo->offset; 4638 vi->may_have_pointers = fo->may_have_pointers; 4639 if (vi->is_global_var 4640 && (!flag_whole_program || !in_ipa_mode) 4641 && vi->may_have_pointers) 4642 { 4643 if (fo->only_restrict_pointers) 4644 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT"); 4645 } 4646 for (i = VEC_length (fieldoff_s, fieldstack) - 1; 4647 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo); 4648 i--) 4649 { 4650 varinfo_t newvi; 4651 const char *newname = "NULL"; 4652 char *tempname; 4653 4654 if (dump_file) 4655 { 4656 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC 4657 "+" HOST_WIDE_INT_PRINT_DEC, 4658 vi->name, fo->offset, fo->size); 4659 newname = ggc_strdup (tempname); 4660 free (tempname); 4661 } 4662 newvi = new_var_info (decl, newname); 4663 newvi->offset = fo->offset; 4664 newvi->size = fo->size; 4665 newvi->fullsize = vi->fullsize; 4666 newvi->may_have_pointers = fo->may_have_pointers; 4667 insert_into_field_list (vi, newvi); 4668 if ((newvi->is_global_var || TREE_CODE (decl) == PARM_DECL) 4669 && newvi->may_have_pointers) 4670 { 4671 if (fo->only_restrict_pointers) 4672 make_constraint_from_restrict (newvi, "GLOBAL_RESTRICT"); 4673 if (newvi->is_global_var && !in_ipa_mode) 4674 make_copy_constraint (newvi, nonlocal_id); 4675 } 4676 4677 stats.total_vars++; 4678 } 4679 } 4680 else 4681 vi->is_full_var = true; 4682 4683 VEC_free (fieldoff_s, heap, fieldstack); 4684 4685 return vi->id; 4686 } 4687 4688 /* Print out the points-to solution for VAR to FILE. */ 4689 4690 static void 4691 dump_solution_for_var (FILE *file, unsigned int var) 4692 { 4693 varinfo_t vi = get_varinfo (var); 4694 unsigned int i; 4695 bitmap_iterator bi; 4696 4697 if (find (var) != var) 4698 { 4699 varinfo_t vipt = get_varinfo (find (var)); 4700 fprintf (file, "%s = same as %s\n", vi->name, vipt->name); 4701 } 4702 else 4703 { 4704 fprintf (file, "%s = { ", vi->name); 4705 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) 4706 { 4707 fprintf (file, "%s ", get_varinfo (i)->name); 4708 } 4709 fprintf (file, "}\n"); 4710 } 4711 } 4712 4713 /* Print the points-to solution for VAR to stdout. */ 4714 4715 void 4716 debug_solution_for_var (unsigned int var) 4717 { 4718 dump_solution_for_var (stdout, var); 4719 } 4720 4721 /* Create varinfo structures for all of the variables in the 4722 function for intraprocedural mode. */ 4723 4724 static void 4725 intra_create_variable_infos (void) 4726 { 4727 tree t; 4728 4729 /* For each incoming pointer argument arg, create the constraint ARG 4730 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */ 4731 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t)) 4732 { 4733 varinfo_t p; 4734 4735 /* For restrict qualified pointers to objects passed by 4736 reference build a real representative for the pointed-to object. */ 4737 if (DECL_BY_REFERENCE (t) 4738 && POINTER_TYPE_P (TREE_TYPE (t)) 4739 && TYPE_RESTRICT (TREE_TYPE (t))) 4740 { 4741 struct constraint_expr lhsc, rhsc; 4742 varinfo_t vi; 4743 tree heapvar = heapvar_lookup (t, 0); 4744 if (heapvar == NULL_TREE) 4745 { 4746 var_ann_t ann; 4747 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)), 4748 "PARM_NOALIAS"); 4749 DECL_EXTERNAL (heapvar) = 1; 4750 heapvar_insert (t, 0, heapvar); 4751 ann = get_var_ann (heapvar); 4752 ann->is_heapvar = 1; 4753 } 4754 if (gimple_referenced_vars (cfun)) 4755 add_referenced_var (heapvar); 4756 lhsc.var = get_vi_for_tree (t)->id; 4757 lhsc.type = SCALAR; 4758 lhsc.offset = 0; 4759 rhsc.var = (vi = get_vi_for_tree (heapvar))->id; 4760 rhsc.type = ADDRESSOF; 4761 rhsc.offset = 0; 4762 process_constraint (new_constraint (lhsc, rhsc)); 4763 vi->is_restrict_var = 1; 4764 continue; 4765 } 4766 4767 for (p = get_vi_for_tree (t); p; p = p->next) 4768 if (p->may_have_pointers) 4769 make_constraint_from (p, nonlocal_id); 4770 if (POINTER_TYPE_P (TREE_TYPE (t)) 4771 && TYPE_RESTRICT (TREE_TYPE (t))) 4772 make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT"); 4773 } 4774 4775 /* Add a constraint for a result decl that is passed by reference. */ 4776 if (DECL_RESULT (cfun->decl) 4777 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl))) 4778 { 4779 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl)); 4780 4781 for (p = result_vi; p; p = p->next) 4782 make_constraint_from (p, nonlocal_id); 4783 } 4784 4785 /* Add a constraint for the incoming static chain parameter. */ 4786 if (cfun->static_chain_decl != NULL_TREE) 4787 { 4788 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl); 4789 4790 for (p = chain_vi; p; p = p->next) 4791 make_constraint_from (p, nonlocal_id); 4792 } 4793 } 4794 4795 /* Structure used to put solution bitmaps in a hashtable so they can 4796 be shared among variables with the same points-to set. */ 4797 4798 typedef struct shared_bitmap_info 4799 { 4800 bitmap pt_vars; 4801 hashval_t hashcode; 4802 } *shared_bitmap_info_t; 4803 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t; 4804 4805 static htab_t shared_bitmap_table; 4806 4807 /* Hash function for a shared_bitmap_info_t */ 4808 4809 static hashval_t 4810 shared_bitmap_hash (const void *p) 4811 { 4812 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p; 4813 return bi->hashcode; 4814 } 4815 4816 /* Equality function for two shared_bitmap_info_t's. */ 4817 4818 static int 4819 shared_bitmap_eq (const void *p1, const void *p2) 4820 { 4821 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1; 4822 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2; 4823 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars); 4824 } 4825 4826 /* Lookup a bitmap in the shared bitmap hashtable, and return an already 4827 existing instance if there is one, NULL otherwise. */ 4828 4829 static bitmap 4830 shared_bitmap_lookup (bitmap pt_vars) 4831 { 4832 void **slot; 4833 struct shared_bitmap_info sbi; 4834 4835 sbi.pt_vars = pt_vars; 4836 sbi.hashcode = bitmap_hash (pt_vars); 4837 4838 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi, 4839 sbi.hashcode, NO_INSERT); 4840 if (!slot) 4841 return NULL; 4842 else 4843 return ((shared_bitmap_info_t) *slot)->pt_vars; 4844 } 4845 4846 4847 /* Add a bitmap to the shared bitmap hashtable. */ 4848 4849 static void 4850 shared_bitmap_add (bitmap pt_vars) 4851 { 4852 void **slot; 4853 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info); 4854 4855 sbi->pt_vars = pt_vars; 4856 sbi->hashcode = bitmap_hash (pt_vars); 4857 4858 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi, 4859 sbi->hashcode, INSERT); 4860 gcc_assert (!*slot); 4861 *slot = (void *) sbi; 4862 } 4863 4864 4865 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */ 4866 4867 static void 4868 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt) 4869 { 4870 unsigned int i; 4871 bitmap_iterator bi; 4872 4873 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi) 4874 { 4875 varinfo_t vi = get_varinfo (i); 4876 4877 /* The only artificial variables that are allowed in a may-alias 4878 set are heap variables. */ 4879 if (vi->is_artificial_var && !vi->is_heap_var) 4880 continue; 4881 4882 if (TREE_CODE (vi->decl) == VAR_DECL 4883 || TREE_CODE (vi->decl) == PARM_DECL 4884 || TREE_CODE (vi->decl) == RESULT_DECL) 4885 { 4886 /* Add the decl to the points-to set. Note that the points-to 4887 set contains global variables. */ 4888 bitmap_set_bit (into, DECL_UID (vi->decl)); 4889 if (vi->is_global_var) 4890 pt->vars_contains_global = true; 4891 } 4892 } 4893 } 4894 4895 4896 /* Compute the points-to solution *PT for the variable VI. */ 4897 4898 static void 4899 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt) 4900 { 4901 unsigned int i; 4902 bitmap_iterator bi; 4903 bitmap finished_solution; 4904 bitmap result; 4905 varinfo_t vi; 4906 4907 memset (pt, 0, sizeof (struct pt_solution)); 4908 4909 /* This variable may have been collapsed, let's get the real 4910 variable. */ 4911 vi = get_varinfo (find (orig_vi->id)); 4912 4913 /* Translate artificial variables into SSA_NAME_PTR_INFO 4914 attributes. */ 4915 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) 4916 { 4917 varinfo_t vi = get_varinfo (i); 4918 4919 if (vi->is_artificial_var) 4920 { 4921 if (vi->id == nothing_id) 4922 pt->null = 1; 4923 else if (vi->id == escaped_id) 4924 pt->escaped = 1; 4925 else if (vi->id == callused_id) 4926 gcc_unreachable (); 4927 else if (vi->id == nonlocal_id) 4928 pt->nonlocal = 1; 4929 else if (vi->is_heap_var) 4930 /* We represent heapvars in the points-to set properly. */ 4931 ; 4932 else if (vi->id == readonly_id) 4933 /* Nobody cares. */ 4934 ; 4935 else if (vi->id == anything_id 4936 || vi->id == integer_id) 4937 pt->anything = 1; 4938 } 4939 if (vi->is_restrict_var) 4940 pt->vars_contains_restrict = true; 4941 } 4942 4943 /* Instead of doing extra work, simply do not create 4944 elaborate points-to information for pt_anything pointers. */ 4945 if (pt->anything 4946 && (orig_vi->is_artificial_var 4947 || !pt->vars_contains_restrict)) 4948 return; 4949 4950 /* Share the final set of variables when possible. */ 4951 finished_solution = BITMAP_GGC_ALLOC (); 4952 stats.points_to_sets_created++; 4953 4954 set_uids_in_ptset (finished_solution, vi->solution, pt); 4955 result = shared_bitmap_lookup (finished_solution); 4956 if (!result) 4957 { 4958 shared_bitmap_add (finished_solution); 4959 pt->vars = finished_solution; 4960 } 4961 else 4962 { 4963 pt->vars = result; 4964 bitmap_clear (finished_solution); 4965 } 4966 } 4967 4968 /* Given a pointer variable P, fill in its points-to set. */ 4969 4970 static void 4971 find_what_p_points_to (tree p) 4972 { 4973 struct ptr_info_def *pi; 4974 tree lookup_p = p; 4975 varinfo_t vi; 4976 4977 /* For parameters, get at the points-to set for the actual parm 4978 decl. */ 4979 if (TREE_CODE (p) == SSA_NAME 4980 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL 4981 && SSA_NAME_IS_DEFAULT_DEF (p)) 4982 lookup_p = SSA_NAME_VAR (p); 4983 4984 vi = lookup_vi_for_tree (lookup_p); 4985 if (!vi) 4986 return; 4987 4988 pi = get_ptr_info (p); 4989 find_what_var_points_to (vi, &pi->pt); 4990 } 4991 4992 4993 /* Query statistics for points-to solutions. */ 4994 4995 static struct { 4996 unsigned HOST_WIDE_INT pt_solution_includes_may_alias; 4997 unsigned HOST_WIDE_INT pt_solution_includes_no_alias; 4998 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias; 4999 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias; 5000 } pta_stats; 5001 5002 void 5003 dump_pta_stats (FILE *s) 5004 { 5005 fprintf (s, "\nPTA query stats:\n"); 5006 fprintf (s, " pt_solution_includes: " 5007 HOST_WIDE_INT_PRINT_DEC" disambiguations, " 5008 HOST_WIDE_INT_PRINT_DEC" queries\n", 5009 pta_stats.pt_solution_includes_no_alias, 5010 pta_stats.pt_solution_includes_no_alias 5011 + pta_stats.pt_solution_includes_may_alias); 5012 fprintf (s, " pt_solutions_intersect: " 5013 HOST_WIDE_INT_PRINT_DEC" disambiguations, " 5014 HOST_WIDE_INT_PRINT_DEC" queries\n", 5015 pta_stats.pt_solutions_intersect_no_alias, 5016 pta_stats.pt_solutions_intersect_no_alias 5017 + pta_stats.pt_solutions_intersect_may_alias); 5018 } 5019 5020 5021 /* Reset the points-to solution *PT to a conservative default 5022 (point to anything). */ 5023 5024 void 5025 pt_solution_reset (struct pt_solution *pt) 5026 { 5027 memset (pt, 0, sizeof (struct pt_solution)); 5028 pt->anything = true; 5029 } 5030 5031 /* Set the points-to solution *PT to point only to the variables 5032 in VARS. */ 5033 5034 void 5035 pt_solution_set (struct pt_solution *pt, bitmap vars) 5036 { 5037 bitmap_iterator bi; 5038 unsigned i; 5039 5040 memset (pt, 0, sizeof (struct pt_solution)); 5041 pt->vars = vars; 5042 EXECUTE_IF_SET_IN_BITMAP (vars, 0, i, bi) 5043 { 5044 tree var = referenced_var_lookup (i); 5045 if (is_global_var (var)) 5046 { 5047 pt->vars_contains_global = true; 5048 break; 5049 } 5050 } 5051 } 5052 5053 /* Return true if the points-to solution *PT is empty. */ 5054 5055 static bool 5056 pt_solution_empty_p (struct pt_solution *pt) 5057 { 5058 if (pt->anything 5059 || pt->nonlocal) 5060 return false; 5061 5062 if (pt->vars 5063 && !bitmap_empty_p (pt->vars)) 5064 return false; 5065 5066 /* If the solution includes ESCAPED, check if that is empty. */ 5067 if (pt->escaped 5068 && !pt_solution_empty_p (&cfun->gimple_df->escaped)) 5069 return false; 5070 5071 return true; 5072 } 5073 5074 /* Return true if the points-to solution *PT includes global memory. */ 5075 5076 bool 5077 pt_solution_includes_global (struct pt_solution *pt) 5078 { 5079 if (pt->anything 5080 || pt->nonlocal 5081 || pt->vars_contains_global) 5082 return true; 5083 5084 if (pt->escaped) 5085 return pt_solution_includes_global (&cfun->gimple_df->escaped); 5086 5087 return false; 5088 } 5089 5090 /* Return true if the points-to solution *PT includes the variable 5091 declaration DECL. */ 5092 5093 static bool 5094 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl) 5095 { 5096 if (pt->anything) 5097 return true; 5098 5099 if (pt->nonlocal 5100 && is_global_var (decl)) 5101 return true; 5102 5103 if (pt->vars 5104 && bitmap_bit_p (pt->vars, DECL_UID (decl))) 5105 return true; 5106 5107 /* If the solution includes ESCAPED, check it. */ 5108 if (pt->escaped 5109 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl)) 5110 return true; 5111 5112 return false; 5113 } 5114 5115 bool 5116 pt_solution_includes (struct pt_solution *pt, const_tree decl) 5117 { 5118 bool res = pt_solution_includes_1 (pt, decl); 5119 if (res) 5120 ++pta_stats.pt_solution_includes_may_alias; 5121 else 5122 ++pta_stats.pt_solution_includes_no_alias; 5123 return res; 5124 } 5125 5126 /* Return true if both points-to solutions PT1 and PT2 have a non-empty 5127 intersection. */ 5128 5129 static bool 5130 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2) 5131 { 5132 if (pt1->anything || pt2->anything) 5133 return true; 5134 5135 /* If either points to unknown global memory and the other points to 5136 any global memory they alias. */ 5137 if ((pt1->nonlocal 5138 && (pt2->nonlocal 5139 || pt2->vars_contains_global)) 5140 || (pt2->nonlocal 5141 && pt1->vars_contains_global)) 5142 return true; 5143 5144 /* Check the escaped solution if required. */ 5145 if ((pt1->escaped || pt2->escaped) 5146 && !pt_solution_empty_p (&cfun->gimple_df->escaped)) 5147 { 5148 /* If both point to escaped memory and that solution 5149 is not empty they alias. */ 5150 if (pt1->escaped && pt2->escaped) 5151 return true; 5152 5153 /* If either points to escaped memory see if the escaped solution 5154 intersects with the other. */ 5155 if ((pt1->escaped 5156 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2)) 5157 || (pt2->escaped 5158 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1))) 5159 return true; 5160 } 5161 5162 /* Now both pointers alias if their points-to solution intersects. */ 5163 return (pt1->vars 5164 && pt2->vars 5165 && bitmap_intersect_p (pt1->vars, pt2->vars)); 5166 } 5167 5168 bool 5169 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2) 5170 { 5171 bool res = pt_solutions_intersect_1 (pt1, pt2); 5172 if (res) 5173 ++pta_stats.pt_solutions_intersect_may_alias; 5174 else 5175 ++pta_stats.pt_solutions_intersect_no_alias; 5176 return res; 5177 } 5178 5179 /* Return true if both points-to solutions PT1 and PT2 for two restrict 5180 qualified pointers are possibly based on the same pointer. */ 5181 5182 bool 5183 pt_solutions_same_restrict_base (struct pt_solution *pt1, 5184 struct pt_solution *pt2) 5185 { 5186 /* If we deal with points-to solutions of two restrict qualified 5187 pointers solely rely on the pointed-to variable bitmap intersection. 5188 For two pointers that are based on each other the bitmaps will 5189 intersect. */ 5190 if (pt1->vars_contains_restrict 5191 && pt2->vars_contains_restrict) 5192 { 5193 gcc_assert (pt1->vars && pt2->vars); 5194 return bitmap_intersect_p (pt1->vars, pt2->vars); 5195 } 5196 5197 return true; 5198 } 5199 5200 5201 /* Dump points-to information to OUTFILE. */ 5202 5203 static void 5204 dump_sa_points_to_info (FILE *outfile) 5205 { 5206 unsigned int i; 5207 5208 fprintf (outfile, "\nPoints-to sets\n\n"); 5209 5210 if (dump_flags & TDF_STATS) 5211 { 5212 fprintf (outfile, "Stats:\n"); 5213 fprintf (outfile, "Total vars: %d\n", stats.total_vars); 5214 fprintf (outfile, "Non-pointer vars: %d\n", 5215 stats.nonpointer_vars); 5216 fprintf (outfile, "Statically unified vars: %d\n", 5217 stats.unified_vars_static); 5218 fprintf (outfile, "Dynamically unified vars: %d\n", 5219 stats.unified_vars_dynamic); 5220 fprintf (outfile, "Iterations: %d\n", stats.iterations); 5221 fprintf (outfile, "Number of edges: %d\n", stats.num_edges); 5222 fprintf (outfile, "Number of implicit edges: %d\n", 5223 stats.num_implicit_edges); 5224 } 5225 5226 for (i = 0; i < VEC_length (varinfo_t, varmap); i++) 5227 dump_solution_for_var (outfile, i); 5228 } 5229 5230 5231 /* Debug points-to information to stderr. */ 5232 5233 void 5234 debug_sa_points_to_info (void) 5235 { 5236 dump_sa_points_to_info (stderr); 5237 } 5238 5239 5240 /* Initialize the always-existing constraint variables for NULL 5241 ANYTHING, READONLY, and INTEGER */ 5242 5243 static void 5244 init_base_vars (void) 5245 { 5246 struct constraint_expr lhs, rhs; 5247 varinfo_t var_anything; 5248 varinfo_t var_nothing; 5249 varinfo_t var_readonly; 5250 varinfo_t var_escaped; 5251 varinfo_t var_nonlocal; 5252 varinfo_t var_callused; 5253 varinfo_t var_storedanything; 5254 varinfo_t var_integer; 5255 5256 /* Create the NULL variable, used to represent that a variable points 5257 to NULL. */ 5258 var_nothing = new_var_info (NULL_TREE, "NULL"); 5259 gcc_assert (var_nothing->id == nothing_id); 5260 var_nothing->is_artificial_var = 1; 5261 var_nothing->offset = 0; 5262 var_nothing->size = ~0; 5263 var_nothing->fullsize = ~0; 5264 var_nothing->is_special_var = 1; 5265 5266 /* Create the ANYTHING variable, used to represent that a variable 5267 points to some unknown piece of memory. */ 5268 var_anything = new_var_info (NULL_TREE, "ANYTHING"); 5269 gcc_assert (var_anything->id == anything_id); 5270 var_anything->is_artificial_var = 1; 5271 var_anything->size = ~0; 5272 var_anything->offset = 0; 5273 var_anything->next = NULL; 5274 var_anything->fullsize = ~0; 5275 var_anything->is_special_var = 1; 5276 5277 /* Anything points to anything. This makes deref constraints just 5278 work in the presence of linked list and other p = *p type loops, 5279 by saying that *ANYTHING = ANYTHING. */ 5280 lhs.type = SCALAR; 5281 lhs.var = anything_id; 5282 lhs.offset = 0; 5283 rhs.type = ADDRESSOF; 5284 rhs.var = anything_id; 5285 rhs.offset = 0; 5286 5287 /* This specifically does not use process_constraint because 5288 process_constraint ignores all anything = anything constraints, since all 5289 but this one are redundant. */ 5290 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs)); 5291 5292 /* Create the READONLY variable, used to represent that a variable 5293 points to readonly memory. */ 5294 var_readonly = new_var_info (NULL_TREE, "READONLY"); 5295 gcc_assert (var_readonly->id == readonly_id); 5296 var_readonly->is_artificial_var = 1; 5297 var_readonly->offset = 0; 5298 var_readonly->size = ~0; 5299 var_readonly->fullsize = ~0; 5300 var_readonly->next = NULL; 5301 var_readonly->is_special_var = 1; 5302 5303 /* readonly memory points to anything, in order to make deref 5304 easier. In reality, it points to anything the particular 5305 readonly variable can point to, but we don't track this 5306 separately. */ 5307 lhs.type = SCALAR; 5308 lhs.var = readonly_id; 5309 lhs.offset = 0; 5310 rhs.type = ADDRESSOF; 5311 rhs.var = readonly_id; /* FIXME */ 5312 rhs.offset = 0; 5313 process_constraint (new_constraint (lhs, rhs)); 5314 5315 /* Create the ESCAPED variable, used to represent the set of escaped 5316 memory. */ 5317 var_escaped = new_var_info (NULL_TREE, "ESCAPED"); 5318 gcc_assert (var_escaped->id == escaped_id); 5319 var_escaped->is_artificial_var = 1; 5320 var_escaped->offset = 0; 5321 var_escaped->size = ~0; 5322 var_escaped->fullsize = ~0; 5323 var_escaped->is_special_var = 0; 5324 5325 /* Create the NONLOCAL variable, used to represent the set of nonlocal 5326 memory. */ 5327 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL"); 5328 gcc_assert (var_nonlocal->id == nonlocal_id); 5329 var_nonlocal->is_artificial_var = 1; 5330 var_nonlocal->offset = 0; 5331 var_nonlocal->size = ~0; 5332 var_nonlocal->fullsize = ~0; 5333 var_nonlocal->is_special_var = 1; 5334 5335 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */ 5336 lhs.type = SCALAR; 5337 lhs.var = escaped_id; 5338 lhs.offset = 0; 5339 rhs.type = DEREF; 5340 rhs.var = escaped_id; 5341 rhs.offset = 0; 5342 process_constraint (new_constraint (lhs, rhs)); 5343 5344 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the 5345 whole variable escapes. */ 5346 lhs.type = SCALAR; 5347 lhs.var = escaped_id; 5348 lhs.offset = 0; 5349 rhs.type = SCALAR; 5350 rhs.var = escaped_id; 5351 rhs.offset = UNKNOWN_OFFSET; 5352 process_constraint (new_constraint (lhs, rhs)); 5353 5354 /* *ESCAPED = NONLOCAL. This is true because we have to assume 5355 everything pointed to by escaped points to what global memory can 5356 point to. */ 5357 lhs.type = DEREF; 5358 lhs.var = escaped_id; 5359 lhs.offset = 0; 5360 rhs.type = SCALAR; 5361 rhs.var = nonlocal_id; 5362 rhs.offset = 0; 5363 process_constraint (new_constraint (lhs, rhs)); 5364 5365 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because 5366 global memory may point to global memory and escaped memory. */ 5367 lhs.type = SCALAR; 5368 lhs.var = nonlocal_id; 5369 lhs.offset = 0; 5370 rhs.type = ADDRESSOF; 5371 rhs.var = nonlocal_id; 5372 rhs.offset = 0; 5373 process_constraint (new_constraint (lhs, rhs)); 5374 rhs.type = ADDRESSOF; 5375 rhs.var = escaped_id; 5376 rhs.offset = 0; 5377 process_constraint (new_constraint (lhs, rhs)); 5378 5379 /* Create the CALLUSED variable, used to represent the set of call-used 5380 memory. */ 5381 var_callused = new_var_info (NULL_TREE, "CALLUSED"); 5382 gcc_assert (var_callused->id == callused_id); 5383 var_callused->is_artificial_var = 1; 5384 var_callused->offset = 0; 5385 var_callused->size = ~0; 5386 var_callused->fullsize = ~0; 5387 var_callused->is_special_var = 0; 5388 5389 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */ 5390 lhs.type = SCALAR; 5391 lhs.var = callused_id; 5392 lhs.offset = 0; 5393 rhs.type = DEREF; 5394 rhs.var = callused_id; 5395 rhs.offset = 0; 5396 process_constraint (new_constraint (lhs, rhs)); 5397 5398 /* CALLUSED = CALLUSED + UNKNOWN, because if a sub-field is call-used the 5399 whole variable is call-used. */ 5400 lhs.type = SCALAR; 5401 lhs.var = callused_id; 5402 lhs.offset = 0; 5403 rhs.type = SCALAR; 5404 rhs.var = callused_id; 5405 rhs.offset = UNKNOWN_OFFSET; 5406 process_constraint (new_constraint (lhs, rhs)); 5407 5408 /* Create the STOREDANYTHING variable, used to represent the set of 5409 variables stored to *ANYTHING. */ 5410 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING"); 5411 gcc_assert (var_storedanything->id == storedanything_id); 5412 var_storedanything->is_artificial_var = 1; 5413 var_storedanything->offset = 0; 5414 var_storedanything->size = ~0; 5415 var_storedanything->fullsize = ~0; 5416 var_storedanything->is_special_var = 0; 5417 5418 /* Create the INTEGER variable, used to represent that a variable points 5419 to what an INTEGER "points to". */ 5420 var_integer = new_var_info (NULL_TREE, "INTEGER"); 5421 gcc_assert (var_integer->id == integer_id); 5422 var_integer->is_artificial_var = 1; 5423 var_integer->size = ~0; 5424 var_integer->fullsize = ~0; 5425 var_integer->offset = 0; 5426 var_integer->next = NULL; 5427 var_integer->is_special_var = 1; 5428 5429 /* INTEGER = ANYTHING, because we don't know where a dereference of 5430 a random integer will point to. */ 5431 lhs.type = SCALAR; 5432 lhs.var = integer_id; 5433 lhs.offset = 0; 5434 rhs.type = ADDRESSOF; 5435 rhs.var = anything_id; 5436 rhs.offset = 0; 5437 process_constraint (new_constraint (lhs, rhs)); 5438 } 5439 5440 /* Initialize things necessary to perform PTA */ 5441 5442 static void 5443 init_alias_vars (void) 5444 { 5445 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1); 5446 5447 bitmap_obstack_initialize (&pta_obstack); 5448 bitmap_obstack_initialize (&oldpta_obstack); 5449 bitmap_obstack_initialize (&predbitmap_obstack); 5450 5451 constraint_pool = create_alloc_pool ("Constraint pool", 5452 sizeof (struct constraint), 30); 5453 variable_info_pool = create_alloc_pool ("Variable info pool", 5454 sizeof (struct variable_info), 30); 5455 constraints = VEC_alloc (constraint_t, heap, 8); 5456 varmap = VEC_alloc (varinfo_t, heap, 8); 5457 vi_for_tree = pointer_map_create (); 5458 5459 memset (&stats, 0, sizeof (stats)); 5460 shared_bitmap_table = htab_create (511, shared_bitmap_hash, 5461 shared_bitmap_eq, free); 5462 init_base_vars (); 5463 } 5464 5465 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the 5466 predecessor edges. */ 5467 5468 static void 5469 remove_preds_and_fake_succs (constraint_graph_t graph) 5470 { 5471 unsigned int i; 5472 5473 /* Clear the implicit ref and address nodes from the successor 5474 lists. */ 5475 for (i = 0; i < FIRST_REF_NODE; i++) 5476 { 5477 if (graph->succs[i]) 5478 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE, 5479 FIRST_REF_NODE * 2); 5480 } 5481 5482 /* Free the successor list for the non-ref nodes. */ 5483 for (i = FIRST_REF_NODE; i < graph->size; i++) 5484 { 5485 if (graph->succs[i]) 5486 BITMAP_FREE (graph->succs[i]); 5487 } 5488 5489 /* Now reallocate the size of the successor list as, and blow away 5490 the predecessor bitmaps. */ 5491 graph->size = VEC_length (varinfo_t, varmap); 5492 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size); 5493 5494 free (graph->implicit_preds); 5495 graph->implicit_preds = NULL; 5496 free (graph->preds); 5497 graph->preds = NULL; 5498 bitmap_obstack_release (&predbitmap_obstack); 5499 } 5500 5501 /* Initialize the heapvar for statement mapping. */ 5502 5503 static void 5504 init_alias_heapvars (void) 5505 { 5506 if (!heapvar_for_stmt) 5507 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq, 5508 NULL); 5509 } 5510 5511 /* Delete the heapvar for statement mapping. */ 5512 5513 void 5514 delete_alias_heapvars (void) 5515 { 5516 if (heapvar_for_stmt) 5517 htab_delete (heapvar_for_stmt); 5518 heapvar_for_stmt = NULL; 5519 } 5520 5521 /* Solve the constraint set. */ 5522 5523 static void 5524 solve_constraints (void) 5525 { 5526 struct scc_info *si; 5527 5528 if (dump_file) 5529 { 5530 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n"); 5531 dump_constraints (dump_file); 5532 } 5533 5534 if (dump_file) 5535 fprintf (dump_file, 5536 "\nCollapsing static cycles and doing variable " 5537 "substitution\n"); 5538 5539 init_graph (VEC_length (varinfo_t, varmap) * 2); 5540 5541 if (dump_file) 5542 fprintf (dump_file, "Building predecessor graph\n"); 5543 build_pred_graph (); 5544 5545 if (dump_file) 5546 fprintf (dump_file, "Detecting pointer and location " 5547 "equivalences\n"); 5548 si = perform_var_substitution (graph); 5549 5550 if (dump_file) 5551 fprintf (dump_file, "Rewriting constraints and unifying " 5552 "variables\n"); 5553 rewrite_constraints (graph, si); 5554 5555 build_succ_graph (); 5556 free_var_substitution_info (si); 5557 5558 if (dump_file && (dump_flags & TDF_GRAPH)) 5559 dump_constraint_graph (dump_file); 5560 5561 move_complex_constraints (graph); 5562 5563 if (dump_file) 5564 fprintf (dump_file, "Uniting pointer but not location equivalent " 5565 "variables\n"); 5566 unite_pointer_equivalences (graph); 5567 5568 if (dump_file) 5569 fprintf (dump_file, "Finding indirect cycles\n"); 5570 find_indirect_cycles (graph); 5571 5572 /* Implicit nodes and predecessors are no longer necessary at this 5573 point. */ 5574 remove_preds_and_fake_succs (graph); 5575 5576 if (dump_file) 5577 fprintf (dump_file, "Solving graph\n"); 5578 5579 solve_graph (graph); 5580 5581 if (dump_file) 5582 dump_sa_points_to_info (dump_file); 5583 } 5584 5585 /* Create points-to sets for the current function. See the comments 5586 at the start of the file for an algorithmic overview. */ 5587 5588 static void 5589 compute_points_to_sets (void) 5590 { 5591 basic_block bb; 5592 unsigned i; 5593 varinfo_t vi; 5594 5595 timevar_push (TV_TREE_PTA); 5596 5597 init_alias_vars (); 5598 init_alias_heapvars (); 5599 5600 intra_create_variable_infos (); 5601 5602 /* Now walk all statements and derive aliases. */ 5603 FOR_EACH_BB (bb) 5604 { 5605 gimple_stmt_iterator gsi; 5606 5607 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 5608 { 5609 gimple phi = gsi_stmt (gsi); 5610 5611 if (is_gimple_reg (gimple_phi_result (phi))) 5612 find_func_aliases (phi); 5613 } 5614 5615 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 5616 { 5617 gimple stmt = gsi_stmt (gsi); 5618 5619 find_func_aliases (stmt); 5620 } 5621 } 5622 5623 /* From the constraints compute the points-to sets. */ 5624 solve_constraints (); 5625 5626 /* Compute the points-to sets for ESCAPED and CALLUSED used for 5627 call-clobber analysis. */ 5628 find_what_var_points_to (get_varinfo (escaped_id), 5629 &cfun->gimple_df->escaped); 5630 find_what_var_points_to (get_varinfo (callused_id), 5631 &cfun->gimple_df->callused); 5632 5633 /* Make sure the ESCAPED solution (which is used as placeholder in 5634 other solutions) does not reference itself. This simplifies 5635 points-to solution queries. */ 5636 cfun->gimple_df->escaped.escaped = 0; 5637 5638 /* Mark escaped HEAP variables as global. */ 5639 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i) 5640 if (vi->is_heap_var 5641 && !vi->is_restrict_var 5642 && !vi->is_global_var) 5643 DECL_EXTERNAL (vi->decl) = vi->is_global_var 5644 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl); 5645 5646 /* Compute the points-to sets for pointer SSA_NAMEs. */ 5647 for (i = 0; i < num_ssa_names; ++i) 5648 { 5649 tree ptr = ssa_name (i); 5650 if (ptr 5651 && POINTER_TYPE_P (TREE_TYPE (ptr))) 5652 find_what_p_points_to (ptr); 5653 } 5654 5655 timevar_pop (TV_TREE_PTA); 5656 } 5657 5658 5659 /* Delete created points-to sets. */ 5660 5661 static void 5662 delete_points_to_sets (void) 5663 { 5664 unsigned int i; 5665 5666 htab_delete (shared_bitmap_table); 5667 if (dump_file && (dump_flags & TDF_STATS)) 5668 fprintf (dump_file, "Points to sets created:%d\n", 5669 stats.points_to_sets_created); 5670 5671 pointer_map_destroy (vi_for_tree); 5672 bitmap_obstack_release (&pta_obstack); 5673 VEC_free (constraint_t, heap, constraints); 5674 5675 for (i = 0; i < graph->size; i++) 5676 VEC_free (constraint_t, heap, graph->complex[i]); 5677 free (graph->complex); 5678 5679 free (graph->rep); 5680 free (graph->succs); 5681 free (graph->pe); 5682 free (graph->pe_rep); 5683 free (graph->indirect_cycles); 5684 free (graph); 5685 5686 VEC_free (varinfo_t, heap, varmap); 5687 free_alloc_pool (variable_info_pool); 5688 free_alloc_pool (constraint_pool); 5689 } 5690 5691 5692 /* Compute points-to information for every SSA_NAME pointer in the 5693 current function and compute the transitive closure of escaped 5694 variables to re-initialize the call-clobber states of local variables. */ 5695 5696 unsigned int 5697 compute_may_aliases (void) 5698 { 5699 /* For each pointer P_i, determine the sets of variables that P_i may 5700 point-to. Compute the reachability set of escaped and call-used 5701 variables. */ 5702 compute_points_to_sets (); 5703 5704 /* Debugging dumps. */ 5705 if (dump_file) 5706 { 5707 dump_alias_info (dump_file); 5708 5709 if (dump_flags & TDF_DETAILS) 5710 dump_referenced_vars (dump_file); 5711 } 5712 5713 /* Deallocate memory used by aliasing data structures and the internal 5714 points-to solution. */ 5715 delete_points_to_sets (); 5716 5717 gcc_assert (!need_ssa_update_p (cfun)); 5718 5719 return 0; 5720 } 5721 5722 static bool 5723 gate_tree_pta (void) 5724 { 5725 return flag_tree_pta; 5726 } 5727 5728 /* A dummy pass to cause points-to information to be computed via 5729 TODO_rebuild_alias. */ 5730 5731 struct gimple_opt_pass pass_build_alias = 5732 { 5733 { 5734 GIMPLE_PASS, 5735 "alias", /* name */ 5736 gate_tree_pta, /* gate */ 5737 NULL, /* execute */ 5738 NULL, /* sub */ 5739 NULL, /* next */ 5740 0, /* static_pass_number */ 5741 TV_NONE, /* tv_id */ 5742 PROP_cfg | PROP_ssa, /* properties_required */ 5743 0, /* properties_provided */ 5744 0, /* properties_destroyed */ 5745 0, /* todo_flags_start */ 5746 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */ 5747 } 5748 }; 5749 5750 /* A dummy pass to cause points-to information to be computed via 5751 TODO_rebuild_alias. */ 5752 5753 struct gimple_opt_pass pass_build_ealias = 5754 { 5755 { 5756 GIMPLE_PASS, 5757 "ealias", /* name */ 5758 gate_tree_pta, /* gate */ 5759 NULL, /* execute */ 5760 NULL, /* sub */ 5761 NULL, /* next */ 5762 0, /* static_pass_number */ 5763 TV_NONE, /* tv_id */ 5764 PROP_cfg | PROP_ssa, /* properties_required */ 5765 0, /* properties_provided */ 5766 0, /* properties_destroyed */ 5767 0, /* todo_flags_start */ 5768 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */ 5769 } 5770 }; 5771 5772 5773 /* Return true if we should execute IPA PTA. */ 5774 static bool 5775 gate_ipa_pta (void) 5776 { 5777 return (optimize 5778 && flag_ipa_pta 5779 /* Don't bother doing anything if the program has errors. */ 5780 && !(errorcount || sorrycount)); 5781 } 5782 5783 /* Execute the driver for IPA PTA. */ 5784 static unsigned int 5785 ipa_pta_execute (void) 5786 { 5787 struct cgraph_node *node; 5788 5789 in_ipa_mode = 1; 5790 5791 init_alias_heapvars (); 5792 init_alias_vars (); 5793 5794 /* Build the constraints. */ 5795 for (node = cgraph_nodes; node; node = node->next) 5796 { 5797 /* Nodes without a body are not interesting. Especially do not 5798 visit clones at this point for now - we get duplicate decls 5799 there for inline clones at least. */ 5800 if (!gimple_has_body_p (node->decl) 5801 || node->clone_of) 5802 continue; 5803 5804 /* It does not make sense to have graph edges into or out of 5805 externally visible functions. There is no extra information 5806 we can gather from them. */ 5807 if (node->local.externally_visible) 5808 continue; 5809 5810 create_function_info_for (node->decl, 5811 cgraph_node_name (node)); 5812 } 5813 5814 for (node = cgraph_nodes; node; node = node->next) 5815 { 5816 struct function *func; 5817 basic_block bb; 5818 tree old_func_decl; 5819 5820 /* Nodes without a body are not interesting. */ 5821 if (!gimple_has_body_p (node->decl) 5822 || node->clone_of) 5823 continue; 5824 5825 if (dump_file) 5826 fprintf (dump_file, 5827 "Generating constraints for %s\n", 5828 cgraph_node_name (node)); 5829 5830 func = DECL_STRUCT_FUNCTION (node->decl); 5831 old_func_decl = current_function_decl; 5832 push_cfun (func); 5833 current_function_decl = node->decl; 5834 5835 /* For externally visible functions use local constraints for 5836 their arguments. For local functions we see all callers 5837 and thus do not need initial constraints for parameters. */ 5838 if (node->local.externally_visible) 5839 intra_create_variable_infos (); 5840 5841 /* Build constriants for the function body. */ 5842 FOR_EACH_BB_FN (bb, func) 5843 { 5844 gimple_stmt_iterator gsi; 5845 5846 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); 5847 gsi_next (&gsi)) 5848 { 5849 gimple phi = gsi_stmt (gsi); 5850 5851 if (is_gimple_reg (gimple_phi_result (phi))) 5852 find_func_aliases (phi); 5853 } 5854 5855 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 5856 { 5857 gimple stmt = gsi_stmt (gsi); 5858 5859 find_func_aliases (stmt); 5860 } 5861 } 5862 5863 current_function_decl = old_func_decl; 5864 pop_cfun (); 5865 } 5866 5867 /* From the constraints compute the points-to sets. */ 5868 solve_constraints (); 5869 5870 delete_points_to_sets (); 5871 5872 in_ipa_mode = 0; 5873 5874 return 0; 5875 } 5876 5877 struct simple_ipa_opt_pass pass_ipa_pta = 5878 { 5879 { 5880 SIMPLE_IPA_PASS, 5881 "pta", /* name */ 5882 gate_ipa_pta, /* gate */ 5883 ipa_pta_execute, /* execute */ 5884 NULL, /* sub */ 5885 NULL, /* next */ 5886 0, /* static_pass_number */ 5887 TV_IPA_PTA, /* tv_id */ 5888 0, /* properties_required */ 5889 0, /* properties_provided */ 5890 0, /* properties_destroyed */ 5891 0, /* todo_flags_start */ 5892 TODO_update_ssa /* todo_flags_finish */ 5893 } 5894 }; 5895 5896 5897 #include "gt-tree-ssa-structalias.h" 5898