1 /* Utilities for ipa analysis. 2 Copyright (C) 2005-2013 Free Software Foundation, Inc. 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "tree.h" 26 #include "tree-flow.h" 27 #include "tree-inline.h" 28 #include "dumpfile.h" 29 #include "langhooks.h" 30 #include "pointer-set.h" 31 #include "splay-tree.h" 32 #include "ggc.h" 33 #include "ipa-utils.h" 34 #include "ipa-reference.h" 35 #include "gimple.h" 36 #include "cgraph.h" 37 #include "flags.h" 38 #include "diagnostic.h" 39 #include "langhooks.h" 40 41 /* Debugging function for postorder and inorder code. NOTE is a string 42 that is printed before the nodes are printed. ORDER is an array of 43 cgraph_nodes that has COUNT useful nodes in it. */ 44 45 void 46 ipa_print_order (FILE* out, 47 const char * note, 48 struct cgraph_node** order, 49 int count) 50 { 51 int i; 52 fprintf (out, "\n\n ordered call graph: %s\n", note); 53 54 for (i = count - 1; i >= 0; i--) 55 dump_cgraph_node(dump_file, order[i]); 56 fprintf (out, "\n"); 57 fflush(out); 58 } 59 60 61 struct searchc_env { 62 struct cgraph_node **stack; 63 int stack_size; 64 struct cgraph_node **result; 65 int order_pos; 66 splay_tree nodes_marked_new; 67 bool reduce; 68 bool allow_overwritable; 69 int count; 70 }; 71 72 /* This is an implementation of Tarjan's strongly connected region 73 finder as reprinted in Aho Hopcraft and Ullman's The Design and 74 Analysis of Computer Programs (1975) pages 192-193. This version 75 has been customized for cgraph_nodes. The env parameter is because 76 it is recursive and there are no nested functions here. This 77 function should only be called from itself or 78 ipa_reduced_postorder. ENV is a stack env and would be 79 unnecessary if C had nested functions. V is the node to start 80 searching from. */ 81 82 static void 83 searchc (struct searchc_env* env, struct cgraph_node *v, 84 bool (*ignore_edge) (struct cgraph_edge *)) 85 { 86 struct cgraph_edge *edge; 87 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->symbol.aux; 88 89 /* mark node as old */ 90 v_info->new_node = false; 91 splay_tree_remove (env->nodes_marked_new, v->uid); 92 93 v_info->dfn_number = env->count; 94 v_info->low_link = env->count; 95 env->count++; 96 env->stack[(env->stack_size)++] = v; 97 v_info->on_stack = true; 98 99 for (edge = v->callees; edge; edge = edge->next_callee) 100 { 101 struct ipa_dfs_info * w_info; 102 enum availability avail; 103 struct cgraph_node *w = cgraph_function_or_thunk_node (edge->callee, &avail); 104 105 if (!w || (ignore_edge && ignore_edge (edge))) 106 continue; 107 108 if (w->symbol.aux 109 && (avail > AVAIL_OVERWRITABLE 110 || (env->allow_overwritable && avail == AVAIL_OVERWRITABLE))) 111 { 112 w_info = (struct ipa_dfs_info *) w->symbol.aux; 113 if (w_info->new_node) 114 { 115 searchc (env, w, ignore_edge); 116 v_info->low_link = 117 (v_info->low_link < w_info->low_link) ? 118 v_info->low_link : w_info->low_link; 119 } 120 else 121 if ((w_info->dfn_number < v_info->dfn_number) 122 && (w_info->on_stack)) 123 v_info->low_link = 124 (w_info->dfn_number < v_info->low_link) ? 125 w_info->dfn_number : v_info->low_link; 126 } 127 } 128 129 130 if (v_info->low_link == v_info->dfn_number) 131 { 132 struct cgraph_node *last = NULL; 133 struct cgraph_node *x; 134 struct ipa_dfs_info *x_info; 135 do { 136 x = env->stack[--(env->stack_size)]; 137 x_info = (struct ipa_dfs_info *) x->symbol.aux; 138 x_info->on_stack = false; 139 x_info->scc_no = v_info->dfn_number; 140 141 if (env->reduce) 142 { 143 x_info->next_cycle = last; 144 last = x; 145 } 146 else 147 env->result[env->order_pos++] = x; 148 } 149 while (v != x); 150 if (env->reduce) 151 env->result[env->order_pos++] = v; 152 } 153 } 154 155 /* Topsort the call graph by caller relation. Put the result in ORDER. 156 157 The REDUCE flag is true if you want the cycles reduced to single nodes. 158 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real 159 call graph nodes in a reduced node. 160 161 Set ALLOW_OVERWRITABLE if nodes with such availability should be included. 162 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant 163 for the topological sort. */ 164 165 int 166 ipa_reduced_postorder (struct cgraph_node **order, 167 bool reduce, bool allow_overwritable, 168 bool (*ignore_edge) (struct cgraph_edge *)) 169 { 170 struct cgraph_node *node; 171 struct searchc_env env; 172 splay_tree_node result; 173 env.stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); 174 env.stack_size = 0; 175 env.result = order; 176 env.order_pos = 0; 177 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0); 178 env.count = 1; 179 env.reduce = reduce; 180 env.allow_overwritable = allow_overwritable; 181 182 FOR_EACH_DEFINED_FUNCTION (node) 183 { 184 enum availability avail = cgraph_function_body_availability (node); 185 186 if (avail > AVAIL_OVERWRITABLE 187 || (allow_overwritable 188 && (avail == AVAIL_OVERWRITABLE))) 189 { 190 /* Reuse the info if it is already there. */ 191 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->symbol.aux; 192 if (!info) 193 info = XCNEW (struct ipa_dfs_info); 194 info->new_node = true; 195 info->on_stack = false; 196 info->next_cycle = NULL; 197 node->symbol.aux = info; 198 199 splay_tree_insert (env.nodes_marked_new, 200 (splay_tree_key)node->uid, 201 (splay_tree_value)node); 202 } 203 else 204 node->symbol.aux = NULL; 205 } 206 result = splay_tree_min (env.nodes_marked_new); 207 while (result) 208 { 209 node = (struct cgraph_node *)result->value; 210 searchc (&env, node, ignore_edge); 211 result = splay_tree_min (env.nodes_marked_new); 212 } 213 splay_tree_delete (env.nodes_marked_new); 214 free (env.stack); 215 216 return env.order_pos; 217 } 218 219 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call 220 graph nodes. */ 221 222 void 223 ipa_free_postorder_info (void) 224 { 225 struct cgraph_node *node; 226 FOR_EACH_DEFINED_FUNCTION (node) 227 { 228 /* Get rid of the aux information. */ 229 if (node->symbol.aux) 230 { 231 free (node->symbol.aux); 232 node->symbol.aux = NULL; 233 } 234 } 235 } 236 237 /* Get the set of nodes for the cycle in the reduced call graph starting 238 from NODE. */ 239 240 vec<cgraph_node_ptr> 241 ipa_get_nodes_in_cycle (struct cgraph_node *node) 242 { 243 vec<cgraph_node_ptr> v = vNULL; 244 struct ipa_dfs_info *node_dfs_info; 245 while (node) 246 { 247 v.safe_push (node); 248 node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux; 249 node = node_dfs_info->next_cycle; 250 } 251 return v; 252 } 253 254 struct postorder_stack 255 { 256 struct cgraph_node *node; 257 struct cgraph_edge *edge; 258 int ref; 259 }; 260 261 /* Fill array order with all nodes with output flag set in the reverse 262 topological order. Return the number of elements in the array. 263 FIXME: While walking, consider aliases, too. */ 264 265 int 266 ipa_reverse_postorder (struct cgraph_node **order) 267 { 268 struct cgraph_node *node, *node2; 269 int stack_size = 0; 270 int order_pos = 0; 271 struct cgraph_edge *edge; 272 int pass; 273 struct ipa_ref *ref; 274 275 struct postorder_stack *stack = 276 XCNEWVEC (struct postorder_stack, cgraph_n_nodes); 277 278 /* We have to deal with cycles nicely, so use a depth first traversal 279 output algorithm. Ignore the fact that some functions won't need 280 to be output and put them into order as well, so we get dependencies 281 right through inline functions. */ 282 FOR_EACH_FUNCTION (node) 283 node->symbol.aux = NULL; 284 for (pass = 0; pass < 2; pass++) 285 FOR_EACH_FUNCTION (node) 286 if (!node->symbol.aux 287 && (pass 288 || (!node->symbol.address_taken 289 && !node->global.inlined_to 290 && !node->alias && !node->thunk.thunk_p 291 && !cgraph_only_called_directly_p (node)))) 292 { 293 stack_size = 0; 294 stack[stack_size].node = node; 295 stack[stack_size].edge = node->callers; 296 stack[stack_size].ref = 0; 297 node->symbol.aux = (void *)(size_t)1; 298 while (stack_size >= 0) 299 { 300 while (true) 301 { 302 node2 = NULL; 303 while (stack[stack_size].edge && !node2) 304 { 305 edge = stack[stack_size].edge; 306 node2 = edge->caller; 307 stack[stack_size].edge = edge->next_caller; 308 /* Break possible cycles involving always-inline 309 functions by ignoring edges from always-inline 310 functions to non-always-inline functions. */ 311 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->symbol.decl) 312 && !DECL_DISREGARD_INLINE_LIMITS 313 (cgraph_function_node (edge->callee, NULL)->symbol.decl)) 314 node2 = NULL; 315 } 316 for (;ipa_ref_list_referring_iterate (&stack[stack_size].node->symbol.ref_list, 317 stack[stack_size].ref, 318 ref) && !node2; 319 stack[stack_size].ref++) 320 { 321 if (ref->use == IPA_REF_ALIAS) 322 node2 = ipa_ref_referring_node (ref); 323 } 324 if (!node2) 325 break; 326 if (!node2->symbol.aux) 327 { 328 stack[++stack_size].node = node2; 329 stack[stack_size].edge = node2->callers; 330 stack[stack_size].ref = 0; 331 node2->symbol.aux = (void *)(size_t)1; 332 } 333 } 334 order[order_pos++] = stack[stack_size--].node; 335 } 336 } 337 free (stack); 338 FOR_EACH_FUNCTION (node) 339 node->symbol.aux = NULL; 340 return order_pos; 341 } 342 343 344 345 /* Given a memory reference T, will return the variable at the bottom 346 of the access. Unlike get_base_address, this will recurse through 347 INDIRECT_REFS. */ 348 349 tree 350 get_base_var (tree t) 351 { 352 while (!SSA_VAR_P (t) 353 && (!CONSTANT_CLASS_P (t)) 354 && TREE_CODE (t) != LABEL_DECL 355 && TREE_CODE (t) != FUNCTION_DECL 356 && TREE_CODE (t) != CONST_DECL 357 && TREE_CODE (t) != CONSTRUCTOR) 358 { 359 t = TREE_OPERAND (t, 0); 360 } 361 return t; 362 } 363 364 365 /* Create a new cgraph node set. */ 366 367 cgraph_node_set 368 cgraph_node_set_new (void) 369 { 370 cgraph_node_set new_node_set; 371 372 new_node_set = XCNEW (struct cgraph_node_set_def); 373 new_node_set->map = pointer_map_create (); 374 new_node_set->nodes.create (0); 375 return new_node_set; 376 } 377 378 379 /* Add cgraph_node NODE to cgraph_node_set SET. */ 380 381 void 382 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node) 383 { 384 void **slot; 385 386 slot = pointer_map_insert (set->map, node); 387 388 if (*slot) 389 { 390 int index = (size_t) *slot - 1; 391 gcc_checking_assert ((set->nodes[index] 392 == node)); 393 return; 394 } 395 396 *slot = (void *)(size_t) (set->nodes.length () + 1); 397 398 /* Insert into node vector. */ 399 set->nodes.safe_push (node); 400 } 401 402 403 /* Remove cgraph_node NODE from cgraph_node_set SET. */ 404 405 void 406 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node) 407 { 408 void **slot, **last_slot; 409 int index; 410 struct cgraph_node *last_node; 411 412 slot = pointer_map_contains (set->map, node); 413 if (slot == NULL || !*slot) 414 return; 415 416 index = (size_t) *slot - 1; 417 gcc_checking_assert (set->nodes[index] 418 == node); 419 420 /* Remove from vector. We do this by swapping node with the last element 421 of the vector. */ 422 last_node = set->nodes.pop (); 423 if (last_node != node) 424 { 425 last_slot = pointer_map_contains (set->map, last_node); 426 gcc_checking_assert (last_slot && *last_slot); 427 *last_slot = (void *)(size_t) (index + 1); 428 429 /* Move the last element to the original spot of NODE. */ 430 set->nodes[index] = last_node; 431 } 432 433 /* Remove element from hash table. */ 434 *slot = NULL; 435 } 436 437 438 /* Find NODE in SET and return an iterator to it if found. A null iterator 439 is returned if NODE is not in SET. */ 440 441 cgraph_node_set_iterator 442 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node) 443 { 444 void **slot; 445 cgraph_node_set_iterator csi; 446 447 slot = pointer_map_contains (set->map, node); 448 if (slot == NULL || !*slot) 449 csi.index = (unsigned) ~0; 450 else 451 csi.index = (size_t)*slot - 1; 452 csi.set = set; 453 454 return csi; 455 } 456 457 458 /* Dump content of SET to file F. */ 459 460 void 461 dump_cgraph_node_set (FILE *f, cgraph_node_set set) 462 { 463 cgraph_node_set_iterator iter; 464 465 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter)) 466 { 467 struct cgraph_node *node = csi_node (iter); 468 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid); 469 } 470 fprintf (f, "\n"); 471 } 472 473 474 /* Dump content of SET to stderr. */ 475 476 DEBUG_FUNCTION void 477 debug_cgraph_node_set (cgraph_node_set set) 478 { 479 dump_cgraph_node_set (stderr, set); 480 } 481 482 483 /* Free varpool node set. */ 484 485 void 486 free_cgraph_node_set (cgraph_node_set set) 487 { 488 set->nodes.release (); 489 pointer_map_destroy (set->map); 490 free (set); 491 } 492 493 494 /* Create a new varpool node set. */ 495 496 varpool_node_set 497 varpool_node_set_new (void) 498 { 499 varpool_node_set new_node_set; 500 501 new_node_set = XCNEW (struct varpool_node_set_def); 502 new_node_set->map = pointer_map_create (); 503 new_node_set->nodes.create (0); 504 return new_node_set; 505 } 506 507 508 /* Add varpool_node NODE to varpool_node_set SET. */ 509 510 void 511 varpool_node_set_add (varpool_node_set set, struct varpool_node *node) 512 { 513 void **slot; 514 515 slot = pointer_map_insert (set->map, node); 516 517 if (*slot) 518 { 519 int index = (size_t) *slot - 1; 520 gcc_checking_assert ((set->nodes[index] 521 == node)); 522 return; 523 } 524 525 *slot = (void *)(size_t) (set->nodes.length () + 1); 526 527 /* Insert into node vector. */ 528 set->nodes.safe_push (node); 529 } 530 531 532 /* Remove varpool_node NODE from varpool_node_set SET. */ 533 534 void 535 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node) 536 { 537 void **slot, **last_slot; 538 int index; 539 struct varpool_node *last_node; 540 541 slot = pointer_map_contains (set->map, node); 542 if (slot == NULL || !*slot) 543 return; 544 545 index = (size_t) *slot - 1; 546 gcc_checking_assert (set->nodes[index] 547 == node); 548 549 /* Remove from vector. We do this by swapping node with the last element 550 of the vector. */ 551 last_node = set->nodes.pop (); 552 if (last_node != node) 553 { 554 last_slot = pointer_map_contains (set->map, last_node); 555 gcc_checking_assert (last_slot && *last_slot); 556 *last_slot = (void *)(size_t) (index + 1); 557 558 /* Move the last element to the original spot of NODE. */ 559 set->nodes[index] = last_node; 560 } 561 562 /* Remove element from hash table. */ 563 *slot = NULL; 564 } 565 566 567 /* Find NODE in SET and return an iterator to it if found. A null iterator 568 is returned if NODE is not in SET. */ 569 570 varpool_node_set_iterator 571 varpool_node_set_find (varpool_node_set set, struct varpool_node *node) 572 { 573 void **slot; 574 varpool_node_set_iterator vsi; 575 576 slot = pointer_map_contains (set->map, node); 577 if (slot == NULL || !*slot) 578 vsi.index = (unsigned) ~0; 579 else 580 vsi.index = (size_t)*slot - 1; 581 vsi.set = set; 582 583 return vsi; 584 } 585 586 587 /* Dump content of SET to file F. */ 588 589 void 590 dump_varpool_node_set (FILE *f, varpool_node_set set) 591 { 592 varpool_node_set_iterator iter; 593 594 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter)) 595 { 596 struct varpool_node *node = vsi_node (iter); 597 fprintf (f, " %s", varpool_node_name (node)); 598 } 599 fprintf (f, "\n"); 600 } 601 602 603 /* Free varpool node set. */ 604 605 void 606 free_varpool_node_set (varpool_node_set set) 607 { 608 set->nodes.release (); 609 pointer_map_destroy (set->map); 610 free (set); 611 } 612 613 614 /* Dump content of SET to stderr. */ 615 616 DEBUG_FUNCTION void 617 debug_varpool_node_set (varpool_node_set set) 618 { 619 dump_varpool_node_set (stderr, set); 620 } 621