1 /* Callgraph based analysis of static variables. 2 Copyright (C) 2004-2017 Free Software Foundation, Inc. 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 /* This file marks functions as being either const (TREE_READONLY) or 22 pure (DECL_PURE_P). It can also set a variant of these that 23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P). 24 25 This must be run after inlining decisions have been made since 26 otherwise, the local sets will not contain information that is 27 consistent with post inlined state. The global sets are not prone 28 to this problem since they are by definition transitive. */ 29 30 /* The code in this module is called by the ipa pass manager. It 31 should be one of the later passes since it's information is used by 32 the rest of the compilation. */ 33 34 #include "config.h" 35 #include "system.h" 36 #include "coretypes.h" 37 #include "backend.h" 38 #include "target.h" 39 #include "tree.h" 40 #include "gimple.h" 41 #include "tree-pass.h" 42 #include "tree-streamer.h" 43 #include "cgraph.h" 44 #include "diagnostic.h" 45 #include "calls.h" 46 #include "cfganal.h" 47 #include "tree-eh.h" 48 #include "gimple-iterator.h" 49 #include "gimple-walk.h" 50 #include "tree-cfg.h" 51 #include "tree-ssa-loop-niter.h" 52 #include "langhooks.h" 53 #include "ipa-utils.h" 54 #include "gimple-pretty-print.h" 55 #include "cfgloop.h" 56 #include "tree-scalar-evolution.h" 57 #include "intl.h" 58 #include "opts.h" 59 60 /* Lattice values for const and pure functions. Everything starts out 61 being const, then may drop to pure and then neither depending on 62 what is found. */ 63 enum pure_const_state_e 64 { 65 IPA_CONST, 66 IPA_PURE, 67 IPA_NEITHER 68 }; 69 70 const char *pure_const_names[3] = {"const", "pure", "neither"}; 71 72 /* Holder for the const_state. There is one of these per function 73 decl. */ 74 struct funct_state_d 75 { 76 /* See above. */ 77 enum pure_const_state_e pure_const_state; 78 /* What user set here; we can be always sure about this. */ 79 enum pure_const_state_e state_previously_known; 80 bool looping_previously_known; 81 82 /* True if the function could possibly infinite loop. There are a 83 lot of ways that this could be determined. We are pretty 84 conservative here. While it is possible to cse pure and const 85 calls, it is not legal to have dce get rid of the call if there 86 is a possibility that the call could infinite loop since this is 87 a behavioral change. */ 88 bool looping; 89 90 bool can_throw; 91 92 /* If function can call free, munmap or otherwise make previously 93 non-trapping memory accesses trapping. */ 94 bool can_free; 95 }; 96 97 /* State used when we know nothing about function. */ 98 static struct funct_state_d varying_state 99 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true }; 100 101 102 typedef struct funct_state_d * funct_state; 103 104 /* The storage of the funct_state is abstracted because there is the 105 possibility that it may be desirable to move this to the cgraph 106 local info. */ 107 108 /* Array, indexed by cgraph node uid, of function states. */ 109 110 static vec<funct_state> funct_state_vec; 111 112 static bool gate_pure_const (void); 113 114 namespace { 115 116 const pass_data pass_data_ipa_pure_const = 117 { 118 IPA_PASS, /* type */ 119 "pure-const", /* name */ 120 OPTGROUP_NONE, /* optinfo_flags */ 121 TV_IPA_PURE_CONST, /* tv_id */ 122 0, /* properties_required */ 123 0, /* properties_provided */ 124 0, /* properties_destroyed */ 125 0, /* todo_flags_start */ 126 0, /* todo_flags_finish */ 127 }; 128 129 class pass_ipa_pure_const : public ipa_opt_pass_d 130 { 131 public: 132 pass_ipa_pure_const(gcc::context *ctxt); 133 134 /* opt_pass methods: */ 135 bool gate (function *) { return gate_pure_const (); } 136 unsigned int execute (function *fun); 137 138 void register_hooks (void); 139 140 private: 141 bool init_p; 142 143 /* Holders of ipa cgraph hooks: */ 144 struct cgraph_node_hook_list *function_insertion_hook_holder; 145 struct cgraph_2node_hook_list *node_duplication_hook_holder; 146 struct cgraph_node_hook_list *node_removal_hook_holder; 147 148 }; // class pass_ipa_pure_const 149 150 } // anon namespace 151 152 /* Try to guess if function body will always be visible to compiler 153 when compiling the call and whether compiler will be able 154 to propagate the information by itself. */ 155 156 static bool 157 function_always_visible_to_compiler_p (tree decl) 158 { 159 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)); 160 } 161 162 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE 163 is true if the function is known to be finite. The diagnostic is 164 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for 165 OPTION, this function may initialize it and it is always returned 166 by the function. */ 167 168 static hash_set<tree> * 169 suggest_attribute (int option, tree decl, bool known_finite, 170 hash_set<tree> *warned_about, 171 const char * attrib_name) 172 { 173 if (!option_enabled (option, &global_options)) 174 return warned_about; 175 if (TREE_THIS_VOLATILE (decl) 176 || (known_finite && function_always_visible_to_compiler_p (decl))) 177 return warned_about; 178 179 if (!warned_about) 180 warned_about = new hash_set<tree>; 181 if (warned_about->contains (decl)) 182 return warned_about; 183 warned_about->add (decl); 184 warning_at (DECL_SOURCE_LOCATION (decl), 185 option, 186 known_finite 187 ? G_("function might be candidate for attribute %qs") 188 : G_("function might be candidate for attribute %qs" 189 " if it is known to return normally"), attrib_name); 190 return warned_about; 191 } 192 193 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE 194 is true if the function is known to be finite. */ 195 196 static void 197 warn_function_pure (tree decl, bool known_finite) 198 { 199 static hash_set<tree> *warned_about; 200 201 warned_about 202 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl, 203 known_finite, warned_about, "pure"); 204 } 205 206 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE 207 is true if the function is known to be finite. */ 208 209 static void 210 warn_function_const (tree decl, bool known_finite) 211 { 212 static hash_set<tree> *warned_about; 213 warned_about 214 = suggest_attribute (OPT_Wsuggest_attribute_const, decl, 215 known_finite, warned_about, "const"); 216 } 217 218 static void 219 warn_function_noreturn (tree decl) 220 { 221 tree original_decl = decl; 222 223 cgraph_node *node = cgraph_node::get (decl); 224 if (node->instrumentation_clone) 225 decl = node->instrumented_version->decl; 226 227 static hash_set<tree> *warned_about; 228 if (!lang_hooks.missing_noreturn_ok_p (decl) 229 && targetm.warn_func_return (decl)) 230 warned_about 231 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl, 232 true, warned_about, "noreturn"); 233 } 234 235 /* Return true if we have a function state for NODE. */ 236 237 static inline bool 238 has_function_state (struct cgraph_node *node) 239 { 240 if (!funct_state_vec.exists () 241 || funct_state_vec.length () <= (unsigned int)node->uid) 242 return false; 243 return funct_state_vec[node->uid] != NULL; 244 } 245 246 /* Return the function state from NODE. */ 247 248 static inline funct_state 249 get_function_state (struct cgraph_node *node) 250 { 251 if (!funct_state_vec.exists () 252 || funct_state_vec.length () <= (unsigned int)node->uid 253 || !funct_state_vec[node->uid]) 254 /* We might want to put correct previously_known state into varying. */ 255 return &varying_state; 256 return funct_state_vec[node->uid]; 257 } 258 259 /* Set the function state S for NODE. */ 260 261 static inline void 262 set_function_state (struct cgraph_node *node, funct_state s) 263 { 264 if (!funct_state_vec.exists () 265 || funct_state_vec.length () <= (unsigned int)node->uid) 266 funct_state_vec.safe_grow_cleared (node->uid + 1); 267 268 /* If funct_state_vec already contains a funct_state, we have to release 269 it before it's going to be ovewritten. */ 270 if (funct_state_vec[node->uid] != NULL 271 && funct_state_vec[node->uid] != &varying_state) 272 free (funct_state_vec[node->uid]); 273 274 funct_state_vec[node->uid] = s; 275 } 276 277 /* Check to see if the use (or definition when CHECKING_WRITE is true) 278 variable T is legal in a function that is either pure or const. */ 279 280 static inline void 281 check_decl (funct_state local, 282 tree t, bool checking_write, bool ipa) 283 { 284 /* Do not want to do anything with volatile except mark any 285 function that uses one to be not const or pure. */ 286 if (TREE_THIS_VOLATILE (t)) 287 { 288 local->pure_const_state = IPA_NEITHER; 289 if (dump_file) 290 fprintf (dump_file, " Volatile operand is not const/pure"); 291 return; 292 } 293 294 /* Do not care about a local automatic that is not static. */ 295 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) 296 return; 297 298 /* If the variable has the "used" attribute, treat it as if it had a 299 been touched by the devil. */ 300 if (DECL_PRESERVE_P (t)) 301 { 302 local->pure_const_state = IPA_NEITHER; 303 if (dump_file) 304 fprintf (dump_file, " Used static/global variable is not const/pure\n"); 305 return; 306 } 307 308 /* In IPA mode we are not interested in checking actual loads and stores; 309 they will be processed at propagation time using ipa_ref. */ 310 if (ipa) 311 return; 312 313 /* Since we have dealt with the locals and params cases above, if we 314 are CHECKING_WRITE, this cannot be a pure or constant 315 function. */ 316 if (checking_write) 317 { 318 local->pure_const_state = IPA_NEITHER; 319 if (dump_file) 320 fprintf (dump_file, " static/global memory write is not const/pure\n"); 321 return; 322 } 323 324 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t)) 325 { 326 /* Readonly reads are safe. */ 327 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t))) 328 return; /* Read of a constant, do not change the function state. */ 329 else 330 { 331 if (dump_file) 332 fprintf (dump_file, " global memory read is not const\n"); 333 /* Just a regular read. */ 334 if (local->pure_const_state == IPA_CONST) 335 local->pure_const_state = IPA_PURE; 336 } 337 } 338 else 339 { 340 /* Compilation level statics can be read if they are readonly 341 variables. */ 342 if (TREE_READONLY (t)) 343 return; 344 345 if (dump_file) 346 fprintf (dump_file, " static memory read is not const\n"); 347 /* Just a regular read. */ 348 if (local->pure_const_state == IPA_CONST) 349 local->pure_const_state = IPA_PURE; 350 } 351 } 352 353 354 /* Check to see if the use (or definition when CHECKING_WRITE is true) 355 variable T is legal in a function that is either pure or const. */ 356 357 static inline void 358 check_op (funct_state local, tree t, bool checking_write) 359 { 360 t = get_base_address (t); 361 if (t && TREE_THIS_VOLATILE (t)) 362 { 363 local->pure_const_state = IPA_NEITHER; 364 if (dump_file) 365 fprintf (dump_file, " Volatile indirect ref is not const/pure\n"); 366 return; 367 } 368 else if (t 369 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF) 370 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME 371 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0))) 372 { 373 if (dump_file) 374 fprintf (dump_file, " Indirect ref to local memory is OK\n"); 375 return; 376 } 377 else if (checking_write) 378 { 379 local->pure_const_state = IPA_NEITHER; 380 if (dump_file) 381 fprintf (dump_file, " Indirect ref write is not const/pure\n"); 382 return; 383 } 384 else 385 { 386 if (dump_file) 387 fprintf (dump_file, " Indirect ref read is not const\n"); 388 if (local->pure_const_state == IPA_CONST) 389 local->pure_const_state = IPA_PURE; 390 } 391 } 392 393 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */ 394 395 static void 396 state_from_flags (enum pure_const_state_e *state, bool *looping, 397 int flags, bool cannot_lead_to_return) 398 { 399 *looping = false; 400 if (flags & ECF_LOOPING_CONST_OR_PURE) 401 { 402 *looping = true; 403 if (dump_file && (dump_flags & TDF_DETAILS)) 404 fprintf (dump_file, " looping"); 405 } 406 if (flags & ECF_CONST) 407 { 408 *state = IPA_CONST; 409 if (dump_file && (dump_flags & TDF_DETAILS)) 410 fprintf (dump_file, " const\n"); 411 } 412 else if (flags & ECF_PURE) 413 { 414 *state = IPA_PURE; 415 if (dump_file && (dump_flags & TDF_DETAILS)) 416 fprintf (dump_file, " pure\n"); 417 } 418 else if (cannot_lead_to_return) 419 { 420 *state = IPA_PURE; 421 *looping = true; 422 if (dump_file && (dump_flags & TDF_DETAILS)) 423 fprintf (dump_file, " ignoring side effects->pure looping\n"); 424 } 425 else 426 { 427 if (dump_file && (dump_flags & TDF_DETAILS)) 428 fprintf (dump_file, " neither\n"); 429 *state = IPA_NEITHER; 430 *looping = true; 431 } 432 } 433 434 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store 435 into STATE and LOOPING better of the two variants. 436 Be sure to merge looping correctly. IPA_NEITHER functions 437 have looping 0 even if they don't have to return. */ 438 439 static inline void 440 better_state (enum pure_const_state_e *state, bool *looping, 441 enum pure_const_state_e state2, bool looping2) 442 { 443 if (state2 < *state) 444 { 445 if (*state == IPA_NEITHER) 446 *looping = looping2; 447 else 448 *looping = MIN (*looping, looping2); 449 *state = state2; 450 } 451 else if (state2 != IPA_NEITHER) 452 *looping = MIN (*looping, looping2); 453 } 454 455 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store 456 into STATE and LOOPING worse of the two variants. 457 N is the actual node called. */ 458 459 static inline void 460 worse_state (enum pure_const_state_e *state, bool *looping, 461 enum pure_const_state_e state2, bool looping2, 462 struct symtab_node *from, 463 struct symtab_node *to) 464 { 465 /* Consider function: 466 467 bool a(int *p) 468 { 469 return *p==*p; 470 } 471 472 During early optimization we will turn this into: 473 474 bool a(int *p) 475 { 476 return true; 477 } 478 479 Now if this function will be detected as CONST however when interposed it 480 may end up being just pure. We always must assume the worst scenario here. 481 */ 482 if (*state == IPA_CONST && state2 == IPA_CONST 483 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from)) 484 { 485 if (dump_file && (dump_flags & TDF_DETAILS)) 486 fprintf (dump_file, "Dropping state to PURE because call to %s may not " 487 "bind to current def.\n", to->name ()); 488 state2 = IPA_PURE; 489 } 490 *state = MAX (*state, state2); 491 *looping = MAX (*looping, looping2); 492 } 493 494 /* Recognize special cases of builtins that are by themselves not pure or const 495 but function using them is. */ 496 static bool 497 special_builtin_state (enum pure_const_state_e *state, bool *looping, 498 tree callee) 499 { 500 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 501 switch (DECL_FUNCTION_CODE (callee)) 502 { 503 case BUILT_IN_RETURN: 504 case BUILT_IN_UNREACHABLE: 505 case BUILT_IN_ALLOCA: 506 case BUILT_IN_ALLOCA_WITH_ALIGN: 507 case BUILT_IN_STACK_SAVE: 508 case BUILT_IN_STACK_RESTORE: 509 case BUILT_IN_EH_POINTER: 510 case BUILT_IN_EH_FILTER: 511 case BUILT_IN_UNWIND_RESUME: 512 case BUILT_IN_CXA_END_CLEANUP: 513 case BUILT_IN_EH_COPY_VALUES: 514 case BUILT_IN_FRAME_ADDRESS: 515 case BUILT_IN_APPLY: 516 case BUILT_IN_APPLY_ARGS: 517 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT: 518 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT: 519 *looping = false; 520 *state = IPA_CONST; 521 return true; 522 case BUILT_IN_PREFETCH: 523 *looping = true; 524 *state = IPA_CONST; 525 return true; 526 default: 527 break; 528 } 529 return false; 530 } 531 532 /* Check the parameters of a function call to CALL_EXPR to see if 533 there are any references in the parameters that are not allowed for 534 pure or const functions. Also check to see if this is either an 535 indirect call, a call outside the compilation unit, or has special 536 attributes that may also effect the purity. The CALL_EXPR node for 537 the entire call expression. */ 538 539 static void 540 check_call (funct_state local, gcall *call, bool ipa) 541 { 542 int flags = gimple_call_flags (call); 543 tree callee_t = gimple_call_fndecl (call); 544 bool possibly_throws = stmt_could_throw_p (call); 545 bool possibly_throws_externally = (possibly_throws 546 && stmt_can_throw_external (call)); 547 548 if (possibly_throws) 549 { 550 unsigned int i; 551 for (i = 0; i < gimple_num_ops (call); i++) 552 if (gimple_op (call, i) 553 && tree_could_throw_p (gimple_op (call, i))) 554 { 555 if (possibly_throws && cfun->can_throw_non_call_exceptions) 556 { 557 if (dump_file) 558 fprintf (dump_file, " operand can throw; looping\n"); 559 local->looping = true; 560 } 561 if (possibly_throws_externally) 562 { 563 if (dump_file) 564 fprintf (dump_file, " operand can throw externally\n"); 565 local->can_throw = true; 566 } 567 } 568 } 569 570 /* The const and pure flags are set by a variety of places in the 571 compiler (including here). If someone has already set the flags 572 for the callee, (such as for some of the builtins) we will use 573 them, otherwise we will compute our own information. 574 575 Const and pure functions have less clobber effects than other 576 functions so we process these first. Otherwise if it is a call 577 outside the compilation unit or an indirect call we punt. This 578 leaves local calls which will be processed by following the call 579 graph. */ 580 if (callee_t) 581 { 582 enum pure_const_state_e call_state; 583 bool call_looping; 584 585 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL) 586 && !nonfreeing_call_p (call)) 587 local->can_free = true; 588 589 if (special_builtin_state (&call_state, &call_looping, callee_t)) 590 { 591 worse_state (&local->pure_const_state, &local->looping, 592 call_state, call_looping, 593 NULL, NULL); 594 return; 595 } 596 /* When bad things happen to bad functions, they cannot be const 597 or pure. */ 598 if (setjmp_call_p (callee_t)) 599 { 600 if (dump_file) 601 fprintf (dump_file, " setjmp is not const/pure\n"); 602 local->looping = true; 603 local->pure_const_state = IPA_NEITHER; 604 } 605 606 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL) 607 switch (DECL_FUNCTION_CODE (callee_t)) 608 { 609 case BUILT_IN_LONGJMP: 610 case BUILT_IN_NONLOCAL_GOTO: 611 if (dump_file) 612 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n"); 613 local->pure_const_state = IPA_NEITHER; 614 local->looping = true; 615 break; 616 default: 617 break; 618 } 619 } 620 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call)) 621 local->can_free = true; 622 623 /* When not in IPA mode, we can still handle self recursion. */ 624 if (!ipa && callee_t 625 && recursive_call_p (current_function_decl, callee_t)) 626 { 627 if (dump_file) 628 fprintf (dump_file, " Recursive call can loop.\n"); 629 local->looping = true; 630 } 631 /* Either callee is unknown or we are doing local analysis. 632 Look to see if there are any bits available for the callee (such as by 633 declaration or because it is builtin) and process solely on the basis of 634 those bits. Handle internal calls always, those calls don't have 635 corresponding cgraph edges and thus aren't processed during 636 the propagation. */ 637 else if (!ipa || gimple_call_internal_p (call)) 638 { 639 enum pure_const_state_e call_state; 640 bool call_looping; 641 if (possibly_throws && cfun->can_throw_non_call_exceptions) 642 { 643 if (dump_file) 644 fprintf (dump_file, " can throw; looping\n"); 645 local->looping = true; 646 } 647 if (possibly_throws_externally) 648 { 649 if (dump_file) 650 { 651 fprintf (dump_file, " can throw externally to lp %i\n", 652 lookup_stmt_eh_lp (call)); 653 if (callee_t) 654 fprintf (dump_file, " callee:%s\n", 655 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t))); 656 } 657 local->can_throw = true; 658 } 659 if (dump_file && (dump_flags & TDF_DETAILS)) 660 fprintf (dump_file, " checking flags for call:"); 661 state_from_flags (&call_state, &call_looping, flags, 662 ((flags & (ECF_NORETURN | ECF_NOTHROW)) 663 == (ECF_NORETURN | ECF_NOTHROW)) 664 || (!flag_exceptions && (flags & ECF_NORETURN))); 665 worse_state (&local->pure_const_state, &local->looping, 666 call_state, call_looping, NULL, NULL); 667 } 668 /* Direct functions calls are handled by IPA propagation. */ 669 } 670 671 /* Wrapper around check_decl for loads in local more. */ 672 673 static bool 674 check_load (gimple *, tree op, tree, void *data) 675 { 676 if (DECL_P (op)) 677 check_decl ((funct_state)data, op, false, false); 678 else 679 check_op ((funct_state)data, op, false); 680 return false; 681 } 682 683 /* Wrapper around check_decl for stores in local more. */ 684 685 static bool 686 check_store (gimple *, tree op, tree, void *data) 687 { 688 if (DECL_P (op)) 689 check_decl ((funct_state)data, op, true, false); 690 else 691 check_op ((funct_state)data, op, true); 692 return false; 693 } 694 695 /* Wrapper around check_decl for loads in ipa mode. */ 696 697 static bool 698 check_ipa_load (gimple *, tree op, tree, void *data) 699 { 700 if (DECL_P (op)) 701 check_decl ((funct_state)data, op, false, true); 702 else 703 check_op ((funct_state)data, op, false); 704 return false; 705 } 706 707 /* Wrapper around check_decl for stores in ipa mode. */ 708 709 static bool 710 check_ipa_store (gimple *, tree op, tree, void *data) 711 { 712 if (DECL_P (op)) 713 check_decl ((funct_state)data, op, true, true); 714 else 715 check_op ((funct_state)data, op, true); 716 return false; 717 } 718 719 /* Look into pointer pointed to by GSIP and figure out what interesting side 720 effects it has. */ 721 static void 722 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa) 723 { 724 gimple *stmt = gsi_stmt (*gsip); 725 726 if (is_gimple_debug (stmt)) 727 return; 728 729 /* Do consider clobber as side effects before IPA, so we rather inline 730 C++ destructors and keep clobber semantics than eliminate them. 731 732 TODO: We may get smarter during early optimizations on these and let 733 functions containing only clobbers to be optimized more. This is a common 734 case of C++ destructors. */ 735 736 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt)) 737 return; 738 739 if (dump_file) 740 { 741 fprintf (dump_file, " scanning: "); 742 print_gimple_stmt (dump_file, stmt, 0, 0); 743 } 744 745 if (gimple_has_volatile_ops (stmt) 746 && !gimple_clobber_p (stmt)) 747 { 748 local->pure_const_state = IPA_NEITHER; 749 if (dump_file) 750 fprintf (dump_file, " Volatile stmt is not const/pure\n"); 751 } 752 753 /* Look for loads and stores. */ 754 walk_stmt_load_store_ops (stmt, local, 755 ipa ? check_ipa_load : check_load, 756 ipa ? check_ipa_store : check_store); 757 758 if (gimple_code (stmt) != GIMPLE_CALL 759 && stmt_could_throw_p (stmt)) 760 { 761 if (cfun->can_throw_non_call_exceptions) 762 { 763 if (dump_file) 764 fprintf (dump_file, " can throw; looping\n"); 765 local->looping = true; 766 } 767 if (stmt_can_throw_external (stmt)) 768 { 769 if (dump_file) 770 fprintf (dump_file, " can throw externally\n"); 771 local->can_throw = true; 772 } 773 else 774 if (dump_file) 775 fprintf (dump_file, " can throw\n"); 776 } 777 switch (gimple_code (stmt)) 778 { 779 case GIMPLE_CALL: 780 check_call (local, as_a <gcall *> (stmt), ipa); 781 break; 782 case GIMPLE_LABEL: 783 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt)))) 784 /* Target of long jump. */ 785 { 786 if (dump_file) 787 fprintf (dump_file, " nonlocal label is not const/pure\n"); 788 local->pure_const_state = IPA_NEITHER; 789 } 790 break; 791 case GIMPLE_ASM: 792 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt))) 793 { 794 if (dump_file) 795 fprintf (dump_file, " memory asm clobber is not const/pure\n"); 796 /* Abandon all hope, ye who enter here. */ 797 local->pure_const_state = IPA_NEITHER; 798 local->can_free = true; 799 } 800 if (gimple_asm_volatile_p (as_a <gasm *> (stmt))) 801 { 802 if (dump_file) 803 fprintf (dump_file, " volatile is not const/pure\n"); 804 /* Abandon all hope, ye who enter here. */ 805 local->pure_const_state = IPA_NEITHER; 806 local->looping = true; 807 local->can_free = true; 808 } 809 return; 810 default: 811 break; 812 } 813 } 814 815 816 /* This is the main routine for finding the reference patterns for 817 global variables within a function FN. */ 818 819 static funct_state 820 analyze_function (struct cgraph_node *fn, bool ipa) 821 { 822 tree decl = fn->decl; 823 funct_state l; 824 basic_block this_block; 825 826 l = XCNEW (struct funct_state_d); 827 l->pure_const_state = IPA_CONST; 828 l->state_previously_known = IPA_NEITHER; 829 l->looping_previously_known = true; 830 l->looping = false; 831 l->can_throw = false; 832 l->can_free = false; 833 state_from_flags (&l->state_previously_known, &l->looping_previously_known, 834 flags_from_decl_or_type (fn->decl), 835 fn->cannot_return_p ()); 836 837 if (fn->thunk.thunk_p || fn->alias) 838 { 839 /* Thunk gets propagated through, so nothing interesting happens. */ 840 gcc_assert (ipa); 841 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p) 842 l->pure_const_state = IPA_NEITHER; 843 return l; 844 } 845 846 if (dump_file) 847 { 848 fprintf (dump_file, "\n\n local analysis of %s\n ", 849 fn->name ()); 850 } 851 852 push_cfun (DECL_STRUCT_FUNCTION (decl)); 853 854 FOR_EACH_BB_FN (this_block, cfun) 855 { 856 gimple_stmt_iterator gsi; 857 struct walk_stmt_info wi; 858 859 memset (&wi, 0, sizeof (wi)); 860 for (gsi = gsi_start_bb (this_block); 861 !gsi_end_p (gsi); 862 gsi_next (&gsi)) 863 { 864 check_stmt (&gsi, l, ipa); 865 if (l->pure_const_state == IPA_NEITHER 866 && l->looping 867 && l->can_throw 868 && l->can_free) 869 goto end; 870 } 871 } 872 873 end: 874 if (l->pure_const_state != IPA_NEITHER) 875 { 876 /* Const functions cannot have back edges (an 877 indication of possible infinite loop side 878 effect. */ 879 if (mark_dfs_back_edges ()) 880 { 881 /* Preheaders are needed for SCEV to work. 882 Simple latches and recorded exits improve chances that loop will 883 proved to be finite in testcases such as in loop-15.c 884 and loop-24.c */ 885 loop_optimizer_init (LOOPS_HAVE_PREHEADERS 886 | LOOPS_HAVE_SIMPLE_LATCHES 887 | LOOPS_HAVE_RECORDED_EXITS); 888 if (dump_file && (dump_flags & TDF_DETAILS)) 889 flow_loops_dump (dump_file, NULL, 0); 890 if (mark_irreducible_loops ()) 891 { 892 if (dump_file) 893 fprintf (dump_file, " has irreducible loops\n"); 894 l->looping = true; 895 } 896 else 897 { 898 struct loop *loop; 899 scev_initialize (); 900 FOR_EACH_LOOP (loop, 0) 901 if (!finite_loop_p (loop)) 902 { 903 if (dump_file) 904 fprintf (dump_file, " can not prove finiteness of " 905 "loop %i\n", loop->num); 906 l->looping =true; 907 break; 908 } 909 scev_finalize (); 910 } 911 loop_optimizer_finalize (); 912 } 913 } 914 915 if (dump_file && (dump_flags & TDF_DETAILS)) 916 fprintf (dump_file, " checking previously known:"); 917 918 better_state (&l->pure_const_state, &l->looping, 919 l->state_previously_known, 920 l->looping_previously_known); 921 if (TREE_NOTHROW (decl)) 922 l->can_throw = false; 923 924 pop_cfun (); 925 if (dump_file) 926 { 927 if (l->looping) 928 fprintf (dump_file, "Function is locally looping.\n"); 929 if (l->can_throw) 930 fprintf (dump_file, "Function is locally throwing.\n"); 931 if (l->pure_const_state == IPA_CONST) 932 fprintf (dump_file, "Function is locally const.\n"); 933 if (l->pure_const_state == IPA_PURE) 934 fprintf (dump_file, "Function is locally pure.\n"); 935 if (l->can_free) 936 fprintf (dump_file, "Function can locally free.\n"); 937 } 938 return l; 939 } 940 941 /* Called when new function is inserted to callgraph late. */ 942 static void 943 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED) 944 { 945 /* There are some shared nodes, in particular the initializers on 946 static declarations. We do not need to scan them more than once 947 since all we would be interested in are the addressof 948 operations. */ 949 if (opt_for_fn (node->decl, flag_ipa_pure_const)) 950 set_function_state (node, analyze_function (node, true)); 951 } 952 953 /* Called when new clone is inserted to callgraph late. */ 954 955 static void 956 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst, 957 void *data ATTRIBUTE_UNUSED) 958 { 959 if (has_function_state (src)) 960 { 961 funct_state l = XNEW (struct funct_state_d); 962 gcc_assert (!has_function_state (dst)); 963 memcpy (l, get_function_state (src), sizeof (*l)); 964 set_function_state (dst, l); 965 } 966 } 967 968 /* Called when new clone is inserted to callgraph late. */ 969 970 static void 971 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED) 972 { 973 if (has_function_state (node)) 974 set_function_state (node, NULL); 975 } 976 977 978 void 979 pass_ipa_pure_const:: 980 register_hooks (void) 981 { 982 if (init_p) 983 return; 984 985 init_p = true; 986 987 node_removal_hook_holder = 988 symtab->add_cgraph_removal_hook (&remove_node_data, NULL); 989 node_duplication_hook_holder = 990 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL); 991 function_insertion_hook_holder = 992 symtab->add_cgraph_insertion_hook (&add_new_function, NULL); 993 } 994 995 996 /* Analyze each function in the cgraph to see if it is locally PURE or 997 CONST. */ 998 999 static void 1000 pure_const_generate_summary (void) 1001 { 1002 struct cgraph_node *node; 1003 1004 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass); 1005 pass->register_hooks (); 1006 1007 /* Process all of the functions. 1008 1009 We process AVAIL_INTERPOSABLE functions. We can not use the results 1010 by default, but the info can be used at LTO with -fwhole-program or 1011 when function got cloned and the clone is AVAILABLE. */ 1012 1013 FOR_EACH_DEFINED_FUNCTION (node) 1014 if (opt_for_fn (node->decl, flag_ipa_pure_const)) 1015 set_function_state (node, analyze_function (node, true)); 1016 } 1017 1018 1019 /* Serialize the ipa info for lto. */ 1020 1021 static void 1022 pure_const_write_summary (void) 1023 { 1024 struct cgraph_node *node; 1025 struct lto_simple_output_block *ob 1026 = lto_create_simple_output_block (LTO_section_ipa_pure_const); 1027 unsigned int count = 0; 1028 lto_symtab_encoder_iterator lsei; 1029 lto_symtab_encoder_t encoder; 1030 1031 encoder = lto_get_out_decl_state ()->symtab_node_encoder; 1032 1033 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); 1034 lsei_next_function_in_partition (&lsei)) 1035 { 1036 node = lsei_cgraph_node (lsei); 1037 if (node->definition && has_function_state (node)) 1038 count++; 1039 } 1040 1041 streamer_write_uhwi_stream (ob->main_stream, count); 1042 1043 /* Process all of the functions. */ 1044 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei); 1045 lsei_next_function_in_partition (&lsei)) 1046 { 1047 node = lsei_cgraph_node (lsei); 1048 if (node->definition && has_function_state (node)) 1049 { 1050 struct bitpack_d bp; 1051 funct_state fs; 1052 int node_ref; 1053 lto_symtab_encoder_t encoder; 1054 1055 fs = get_function_state (node); 1056 1057 encoder = ob->decl_state->symtab_node_encoder; 1058 node_ref = lto_symtab_encoder_encode (encoder, node); 1059 streamer_write_uhwi_stream (ob->main_stream, node_ref); 1060 1061 /* Note that flags will need to be read in the opposite 1062 order as we are pushing the bitflags into FLAGS. */ 1063 bp = bitpack_create (ob->main_stream); 1064 bp_pack_value (&bp, fs->pure_const_state, 2); 1065 bp_pack_value (&bp, fs->state_previously_known, 2); 1066 bp_pack_value (&bp, fs->looping_previously_known, 1); 1067 bp_pack_value (&bp, fs->looping, 1); 1068 bp_pack_value (&bp, fs->can_throw, 1); 1069 bp_pack_value (&bp, fs->can_free, 1); 1070 streamer_write_bitpack (&bp); 1071 } 1072 } 1073 1074 lto_destroy_simple_output_block (ob); 1075 } 1076 1077 1078 /* Deserialize the ipa info for lto. */ 1079 1080 static void 1081 pure_const_read_summary (void) 1082 { 1083 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); 1084 struct lto_file_decl_data *file_data; 1085 unsigned int j = 0; 1086 1087 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass); 1088 pass->register_hooks (); 1089 1090 while ((file_data = file_data_vec[j++])) 1091 { 1092 const char *data; 1093 size_t len; 1094 struct lto_input_block *ib 1095 = lto_create_simple_input_block (file_data, 1096 LTO_section_ipa_pure_const, 1097 &data, &len); 1098 if (ib) 1099 { 1100 unsigned int i; 1101 unsigned int count = streamer_read_uhwi (ib); 1102 1103 for (i = 0; i < count; i++) 1104 { 1105 unsigned int index; 1106 struct cgraph_node *node; 1107 struct bitpack_d bp; 1108 funct_state fs; 1109 lto_symtab_encoder_t encoder; 1110 1111 fs = XCNEW (struct funct_state_d); 1112 index = streamer_read_uhwi (ib); 1113 encoder = file_data->symtab_node_encoder; 1114 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, 1115 index)); 1116 set_function_state (node, fs); 1117 1118 /* Note that the flags must be read in the opposite 1119 order in which they were written (the bitflags were 1120 pushed into FLAGS). */ 1121 bp = streamer_read_bitpack (ib); 1122 fs->pure_const_state 1123 = (enum pure_const_state_e) bp_unpack_value (&bp, 2); 1124 fs->state_previously_known 1125 = (enum pure_const_state_e) bp_unpack_value (&bp, 2); 1126 fs->looping_previously_known = bp_unpack_value (&bp, 1); 1127 fs->looping = bp_unpack_value (&bp, 1); 1128 fs->can_throw = bp_unpack_value (&bp, 1); 1129 fs->can_free = bp_unpack_value (&bp, 1); 1130 if (dump_file) 1131 { 1132 int flags = flags_from_decl_or_type (node->decl); 1133 fprintf (dump_file, "Read info for %s/%i ", 1134 node->name (), 1135 node->order); 1136 if (flags & ECF_CONST) 1137 fprintf (dump_file, " const"); 1138 if (flags & ECF_PURE) 1139 fprintf (dump_file, " pure"); 1140 if (flags & ECF_NOTHROW) 1141 fprintf (dump_file, " nothrow"); 1142 fprintf (dump_file, "\n pure const state: %s\n", 1143 pure_const_names[fs->pure_const_state]); 1144 fprintf (dump_file, " previously known state: %s\n", 1145 pure_const_names[fs->state_previously_known]); 1146 if (fs->looping) 1147 fprintf (dump_file," function is locally looping\n"); 1148 if (fs->looping_previously_known) 1149 fprintf (dump_file," function is previously known looping\n"); 1150 if (fs->can_throw) 1151 fprintf (dump_file," function is locally throwing\n"); 1152 if (fs->can_free) 1153 fprintf (dump_file," function can locally free\n"); 1154 } 1155 } 1156 1157 lto_destroy_simple_input_block (file_data, 1158 LTO_section_ipa_pure_const, 1159 ib, data, len); 1160 } 1161 } 1162 } 1163 1164 /* We only propagate across edges that can throw externally and their callee 1165 is not interposable. */ 1166 1167 static bool 1168 ignore_edge_for_nothrow (struct cgraph_edge *e) 1169 { 1170 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl)) 1171 return true; 1172 1173 enum availability avail; 1174 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail, 1175 e->caller); 1176 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl)) 1177 return true; 1178 return opt_for_fn (e->callee->decl, flag_non_call_exceptions) 1179 && !e->callee->binds_to_current_def_p (e->caller); 1180 } 1181 1182 /* Return true if NODE is self recursive function. 1183 Indirectly recursive functions appears as non-trivial strongly 1184 connected components, so we need to care about self recursion 1185 only. */ 1186 1187 static bool 1188 self_recursive_p (struct cgraph_node *node) 1189 { 1190 struct cgraph_edge *e; 1191 for (e = node->callees; e; e = e->next_callee) 1192 if (e->callee->function_symbol () == node) 1193 return true; 1194 return false; 1195 } 1196 1197 /* Return true if N is cdtor that is not const or pure. In this case we may 1198 need to remove unreachable function if it is marked const/pure. */ 1199 1200 static bool 1201 cdtor_p (cgraph_node *n, void *) 1202 { 1203 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl)) 1204 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl)) 1205 || DECL_LOOPING_CONST_OR_PURE_P (n->decl)); 1206 return false; 1207 } 1208 1209 /* We only propagate across edges with non-interposable callee. */ 1210 1211 static bool 1212 ignore_edge_for_pure_const (struct cgraph_edge *e) 1213 { 1214 enum availability avail; 1215 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller); 1216 return (avail <= AVAIL_INTERPOSABLE); 1217 } 1218 1219 1220 /* Produce transitive closure over the callgraph and compute pure/const 1221 attributes. */ 1222 1223 static bool 1224 propagate_pure_const (void) 1225 { 1226 struct cgraph_node *node; 1227 struct cgraph_node *w; 1228 struct cgraph_node **order = 1229 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count); 1230 int order_pos; 1231 int i; 1232 struct ipa_dfs_info * w_info; 1233 bool remove_p = false; 1234 bool has_cdtor; 1235 1236 order_pos = ipa_reduced_postorder (order, true, false, 1237 ignore_edge_for_pure_const); 1238 if (dump_file) 1239 { 1240 cgraph_node::dump_cgraph (dump_file); 1241 ipa_print_order (dump_file, "reduced", order, order_pos); 1242 } 1243 1244 /* Propagate the local information through the call graph to produce 1245 the global information. All the nodes within a cycle will have 1246 the same info so we collapse cycles first. Then we can do the 1247 propagation in one pass from the leaves to the roots. */ 1248 for (i = 0; i < order_pos; i++ ) 1249 { 1250 enum pure_const_state_e pure_const_state = IPA_CONST; 1251 bool looping = false; 1252 int count = 0; 1253 node = order[i]; 1254 1255 if (node->alias) 1256 continue; 1257 1258 if (dump_file && (dump_flags & TDF_DETAILS)) 1259 fprintf (dump_file, "Starting cycle\n"); 1260 1261 /* Find the worst state for any node in the cycle. */ 1262 w = node; 1263 while (w && pure_const_state != IPA_NEITHER) 1264 { 1265 struct cgraph_edge *e; 1266 struct cgraph_edge *ie; 1267 int i; 1268 struct ipa_ref *ref = NULL; 1269 1270 funct_state w_l = get_function_state (w); 1271 if (dump_file && (dump_flags & TDF_DETAILS)) 1272 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n", 1273 w->name (), 1274 w->order, 1275 pure_const_names[w_l->pure_const_state], 1276 w_l->looping); 1277 1278 /* First merge in function body properties. 1279 We are safe to pass NULL as FROM and TO because we will take care 1280 of possible interposition when walking callees. */ 1281 worse_state (&pure_const_state, &looping, 1282 w_l->pure_const_state, w_l->looping, 1283 NULL, NULL); 1284 if (pure_const_state == IPA_NEITHER) 1285 break; 1286 1287 count++; 1288 1289 /* We consider recursive cycles as possibly infinite. 1290 This might be relaxed since infinite recursion leads to stack 1291 overflow. */ 1292 if (count > 1) 1293 looping = true; 1294 1295 /* Now walk the edges and merge in callee properties. */ 1296 for (e = w->callees; e && pure_const_state != IPA_NEITHER; 1297 e = e->next_callee) 1298 { 1299 enum availability avail; 1300 struct cgraph_node *y = e->callee-> 1301 function_or_virtual_thunk_symbol (&avail, 1302 e->caller); 1303 enum pure_const_state_e edge_state = IPA_CONST; 1304 bool edge_looping = false; 1305 1306 if (dump_file && (dump_flags & TDF_DETAILS)) 1307 { 1308 fprintf (dump_file, 1309 " Call to %s/%i", 1310 e->callee->name (), 1311 e->callee->order); 1312 } 1313 if (avail > AVAIL_INTERPOSABLE) 1314 { 1315 funct_state y_l = get_function_state (y); 1316 if (dump_file && (dump_flags & TDF_DETAILS)) 1317 { 1318 fprintf (dump_file, 1319 " state:%s looping:%i\n", 1320 pure_const_names[y_l->pure_const_state], 1321 y_l->looping); 1322 } 1323 if (y_l->pure_const_state > IPA_PURE 1324 && e->cannot_lead_to_return_p ()) 1325 { 1326 if (dump_file && (dump_flags & TDF_DETAILS)) 1327 fprintf (dump_file, 1328 " Ignoring side effects" 1329 " -> pure, looping\n"); 1330 edge_state = IPA_PURE; 1331 edge_looping = true; 1332 } 1333 else 1334 { 1335 edge_state = y_l->pure_const_state; 1336 edge_looping = y_l->looping; 1337 } 1338 } 1339 else if (special_builtin_state (&edge_state, &edge_looping, 1340 y->decl)) 1341 ; 1342 else 1343 state_from_flags (&edge_state, &edge_looping, 1344 flags_from_decl_or_type (y->decl), 1345 e->cannot_lead_to_return_p ()); 1346 1347 /* Merge the results with what we already know. */ 1348 better_state (&edge_state, &edge_looping, 1349 w_l->state_previously_known, 1350 w_l->looping_previously_known); 1351 worse_state (&pure_const_state, &looping, 1352 edge_state, edge_looping, e->caller, e->callee); 1353 if (pure_const_state == IPA_NEITHER) 1354 break; 1355 } 1356 1357 /* Now process the indirect call. */ 1358 for (ie = w->indirect_calls; 1359 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee) 1360 { 1361 enum pure_const_state_e edge_state = IPA_CONST; 1362 bool edge_looping = false; 1363 1364 if (dump_file && (dump_flags & TDF_DETAILS)) 1365 fprintf (dump_file, " Indirect call"); 1366 state_from_flags (&edge_state, &edge_looping, 1367 ie->indirect_info->ecf_flags, 1368 ie->cannot_lead_to_return_p ()); 1369 /* Merge the results with what we already know. */ 1370 better_state (&edge_state, &edge_looping, 1371 w_l->state_previously_known, 1372 w_l->looping_previously_known); 1373 worse_state (&pure_const_state, &looping, 1374 edge_state, edge_looping, NULL, NULL); 1375 if (pure_const_state == IPA_NEITHER) 1376 break; 1377 } 1378 1379 /* And finally all loads and stores. */ 1380 for (i = 0; w->iterate_reference (i, ref) 1381 && pure_const_state != IPA_NEITHER; i++) 1382 { 1383 enum pure_const_state_e ref_state = IPA_CONST; 1384 bool ref_looping = false; 1385 switch (ref->use) 1386 { 1387 case IPA_REF_LOAD: 1388 /* readonly reads are safe. */ 1389 if (TREE_READONLY (ref->referred->decl)) 1390 break; 1391 if (dump_file && (dump_flags & TDF_DETAILS)) 1392 fprintf (dump_file, " nonreadonly global var read\n"); 1393 ref_state = IPA_PURE; 1394 break; 1395 case IPA_REF_STORE: 1396 if (ref->cannot_lead_to_return ()) 1397 break; 1398 ref_state = IPA_NEITHER; 1399 if (dump_file && (dump_flags & TDF_DETAILS)) 1400 fprintf (dump_file, " global var write\n"); 1401 break; 1402 case IPA_REF_ADDR: 1403 case IPA_REF_CHKP: 1404 break; 1405 default: 1406 gcc_unreachable (); 1407 } 1408 better_state (&ref_state, &ref_looping, 1409 w_l->state_previously_known, 1410 w_l->looping_previously_known); 1411 worse_state (&pure_const_state, &looping, 1412 ref_state, ref_looping, NULL, NULL); 1413 if (pure_const_state == IPA_NEITHER) 1414 break; 1415 } 1416 w_info = (struct ipa_dfs_info *) w->aux; 1417 w = w_info->next_cycle; 1418 } 1419 if (dump_file && (dump_flags & TDF_DETAILS)) 1420 fprintf (dump_file, "Result %s looping %i\n", 1421 pure_const_names [pure_const_state], 1422 looping); 1423 1424 /* Find the worst state of can_free for any node in the cycle. */ 1425 bool can_free = false; 1426 w = node; 1427 while (w && !can_free) 1428 { 1429 struct cgraph_edge *e; 1430 funct_state w_l = get_function_state (w); 1431 1432 if (w_l->can_free 1433 || w->get_availability () == AVAIL_INTERPOSABLE 1434 || w->indirect_calls) 1435 can_free = true; 1436 1437 for (e = w->callees; e && !can_free; e = e->next_callee) 1438 { 1439 enum availability avail; 1440 struct cgraph_node *y = e->callee-> 1441 function_or_virtual_thunk_symbol (&avail, 1442 e->caller); 1443 1444 if (avail > AVAIL_INTERPOSABLE) 1445 can_free = get_function_state (y)->can_free; 1446 else 1447 can_free = true; 1448 } 1449 w_info = (struct ipa_dfs_info *) w->aux; 1450 w = w_info->next_cycle; 1451 } 1452 1453 /* Copy back the region's pure_const_state which is shared by 1454 all nodes in the region. */ 1455 w = node; 1456 while (w) 1457 { 1458 funct_state w_l = get_function_state (w); 1459 enum pure_const_state_e this_state = pure_const_state; 1460 bool this_looping = looping; 1461 1462 w_l->can_free = can_free; 1463 w->nonfreeing_fn = !can_free; 1464 if (!can_free && dump_file) 1465 fprintf (dump_file, "Function found not to call free: %s\n", 1466 w->name ()); 1467 1468 if (w_l->state_previously_known != IPA_NEITHER 1469 && this_state > w_l->state_previously_known) 1470 { 1471 this_state = w_l->state_previously_known; 1472 if (this_state == IPA_NEITHER) 1473 this_looping = w_l->looping_previously_known; 1474 } 1475 if (!this_looping && self_recursive_p (w)) 1476 this_looping = true; 1477 if (!w_l->looping_previously_known) 1478 this_looping = false; 1479 1480 /* All nodes within a cycle share the same info. */ 1481 w_l->pure_const_state = this_state; 1482 w_l->looping = this_looping; 1483 1484 /* Inline clones share declaration with their offline copies; 1485 do not modify their declarations since the offline copy may 1486 be different. */ 1487 if (!w->global.inlined_to) 1488 switch (this_state) 1489 { 1490 case IPA_CONST: 1491 if (!TREE_READONLY (w->decl)) 1492 { 1493 warn_function_const (w->decl, !this_looping); 1494 if (dump_file) 1495 fprintf (dump_file, "Function found to be %sconst: %s\n", 1496 this_looping ? "looping " : "", 1497 w->name ()); 1498 } 1499 /* Turning constructor or destructor to non-looping const/pure 1500 enables us to possibly remove the function completely. */ 1501 if (this_looping) 1502 has_cdtor = false; 1503 else 1504 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p, 1505 NULL, true); 1506 if (w->set_const_flag (true, this_looping)) 1507 { 1508 if (dump_file) 1509 fprintf (dump_file, 1510 "Declaration updated to be %sconst: %s\n", 1511 this_looping ? "looping " : "", 1512 w->name ()); 1513 remove_p |= has_cdtor; 1514 } 1515 break; 1516 1517 case IPA_PURE: 1518 if (!DECL_PURE_P (w->decl)) 1519 { 1520 warn_function_pure (w->decl, !this_looping); 1521 if (dump_file) 1522 fprintf (dump_file, "Function found to be %spure: %s\n", 1523 this_looping ? "looping " : "", 1524 w->name ()); 1525 } 1526 if (this_looping) 1527 has_cdtor = false; 1528 else 1529 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p, 1530 NULL, true); 1531 if (w->set_pure_flag (true, this_looping)) 1532 { 1533 if (dump_file) 1534 fprintf (dump_file, 1535 "Declaration updated to be %spure: %s\n", 1536 this_looping ? "looping " : "", 1537 w->name ()); 1538 remove_p |= has_cdtor; 1539 } 1540 break; 1541 1542 default: 1543 break; 1544 } 1545 w_info = (struct ipa_dfs_info *) w->aux; 1546 w = w_info->next_cycle; 1547 } 1548 } 1549 1550 ipa_free_postorder_info (); 1551 free (order); 1552 return remove_p; 1553 } 1554 1555 /* Produce transitive closure over the callgraph and compute nothrow 1556 attributes. */ 1557 1558 static void 1559 propagate_nothrow (void) 1560 { 1561 struct cgraph_node *node; 1562 struct cgraph_node *w; 1563 struct cgraph_node **order = 1564 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count); 1565 int order_pos; 1566 int i; 1567 struct ipa_dfs_info * w_info; 1568 1569 order_pos = ipa_reduced_postorder (order, true, false, 1570 ignore_edge_for_nothrow); 1571 if (dump_file) 1572 { 1573 cgraph_node::dump_cgraph (dump_file); 1574 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos); 1575 } 1576 1577 /* Propagate the local information through the call graph to produce 1578 the global information. All the nodes within a cycle will have 1579 the same info so we collapse cycles first. Then we can do the 1580 propagation in one pass from the leaves to the roots. */ 1581 for (i = 0; i < order_pos; i++ ) 1582 { 1583 bool can_throw = false; 1584 node = order[i]; 1585 1586 if (node->alias) 1587 continue; 1588 1589 /* Find the worst state for any node in the cycle. */ 1590 w = node; 1591 while (w && !can_throw) 1592 { 1593 struct cgraph_edge *e, *ie; 1594 1595 if (!TREE_NOTHROW (w->decl)) 1596 { 1597 funct_state w_l = get_function_state (w); 1598 1599 if (w_l->can_throw 1600 || w->get_availability () == AVAIL_INTERPOSABLE) 1601 can_throw = true; 1602 1603 for (e = w->callees; e && !can_throw; e = e->next_callee) 1604 { 1605 enum availability avail; 1606 1607 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl)) 1608 continue; 1609 1610 struct cgraph_node *y = e->callee-> 1611 function_or_virtual_thunk_symbol (&avail, 1612 e->caller); 1613 1614 /* We can use info about the callee only if we know it can 1615 not be interposed. 1616 When callee is compiled with non-call exceptions we also 1617 must check that the declaration is bound to current 1618 body as other semantically equivalent body may still 1619 throw. */ 1620 if (avail <= AVAIL_INTERPOSABLE 1621 || (!TREE_NOTHROW (y->decl) 1622 && (get_function_state (y)->can_throw 1623 || (opt_for_fn (y->decl, flag_non_call_exceptions) 1624 && !e->callee->binds_to_current_def_p (w))))) 1625 can_throw = true; 1626 } 1627 for (ie = w->indirect_calls; ie && !can_throw; 1628 ie = ie->next_callee) 1629 if (ie->can_throw_external 1630 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW)) 1631 can_throw = true; 1632 } 1633 w_info = (struct ipa_dfs_info *) w->aux; 1634 w = w_info->next_cycle; 1635 } 1636 1637 /* Copy back the region's pure_const_state which is shared by 1638 all nodes in the region. */ 1639 w = node; 1640 while (w) 1641 { 1642 funct_state w_l = get_function_state (w); 1643 if (!can_throw && !TREE_NOTHROW (w->decl)) 1644 { 1645 /* Inline clones share declaration with their offline copies; 1646 do not modify their declarations since the offline copy may 1647 be different. */ 1648 if (!w->global.inlined_to) 1649 { 1650 w->set_nothrow_flag (true); 1651 if (dump_file) 1652 fprintf (dump_file, "Function found to be nothrow: %s\n", 1653 w->name ()); 1654 } 1655 } 1656 else if (can_throw && !TREE_NOTHROW (w->decl)) 1657 w_l->can_throw = true; 1658 w_info = (struct ipa_dfs_info *) w->aux; 1659 w = w_info->next_cycle; 1660 } 1661 } 1662 1663 ipa_free_postorder_info (); 1664 free (order); 1665 } 1666 1667 1668 /* Produce the global information by preforming a transitive closure 1669 on the local information that was produced by generate_summary. */ 1670 1671 unsigned int 1672 pass_ipa_pure_const:: 1673 execute (function *) 1674 { 1675 struct cgraph_node *node; 1676 bool remove_p; 1677 1678 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder); 1679 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder); 1680 symtab->remove_cgraph_removal_hook (node_removal_hook_holder); 1681 1682 /* Nothrow makes more function to not lead to return and improve 1683 later analysis. */ 1684 propagate_nothrow (); 1685 remove_p = propagate_pure_const (); 1686 1687 /* Cleanup. */ 1688 FOR_EACH_FUNCTION (node) 1689 if (has_function_state (node)) 1690 free (get_function_state (node)); 1691 funct_state_vec.release (); 1692 return remove_p ? TODO_remove_functions : 0; 1693 } 1694 1695 static bool 1696 gate_pure_const (void) 1697 { 1698 return flag_ipa_pure_const || in_lto_p; 1699 } 1700 1701 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt) 1702 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt, 1703 pure_const_generate_summary, /* generate_summary */ 1704 pure_const_write_summary, /* write_summary */ 1705 pure_const_read_summary, /* read_summary */ 1706 NULL, /* write_optimization_summary */ 1707 NULL, /* read_optimization_summary */ 1708 NULL, /* stmt_fixup */ 1709 0, /* function_transform_todo_flags_start */ 1710 NULL, /* function_transform */ 1711 NULL), /* variable_transform */ 1712 init_p(false), 1713 function_insertion_hook_holder(NULL), 1714 node_duplication_hook_holder(NULL), 1715 node_removal_hook_holder(NULL) 1716 { 1717 } 1718 1719 ipa_opt_pass_d * 1720 make_pass_ipa_pure_const (gcc::context *ctxt) 1721 { 1722 return new pass_ipa_pure_const (ctxt); 1723 } 1724 1725 /* Return true if function should be skipped for local pure const analysis. */ 1726 1727 static bool 1728 skip_function_for_local_pure_const (struct cgraph_node *node) 1729 { 1730 /* Because we do not schedule pass_fixup_cfg over whole program after early 1731 optimizations we must not promote functions that are called by already 1732 processed functions. */ 1733 1734 if (function_called_by_processed_nodes_p ()) 1735 { 1736 if (dump_file) 1737 fprintf (dump_file, "Function called in recursive cycle; ignoring\n"); 1738 return true; 1739 } 1740 /* Save some work and do not analyze functions which are interposable and 1741 do not have any non-interposable aliases. */ 1742 if (node->get_availability () <= AVAIL_INTERPOSABLE 1743 && !node->has_aliases_p ()) 1744 { 1745 if (dump_file) 1746 fprintf (dump_file, 1747 "Function is interposable; not analyzing.\n"); 1748 return true; 1749 } 1750 return false; 1751 } 1752 1753 /* Simple local pass for pure const discovery reusing the analysis from 1754 ipa_pure_const. This pass is effective when executed together with 1755 other optimization passes in early optimization pass queue. */ 1756 1757 namespace { 1758 1759 const pass_data pass_data_local_pure_const = 1760 { 1761 GIMPLE_PASS, /* type */ 1762 "local-pure-const", /* name */ 1763 OPTGROUP_NONE, /* optinfo_flags */ 1764 TV_IPA_PURE_CONST, /* tv_id */ 1765 0, /* properties_required */ 1766 0, /* properties_provided */ 1767 0, /* properties_destroyed */ 1768 0, /* todo_flags_start */ 1769 0, /* todo_flags_finish */ 1770 }; 1771 1772 class pass_local_pure_const : public gimple_opt_pass 1773 { 1774 public: 1775 pass_local_pure_const (gcc::context *ctxt) 1776 : gimple_opt_pass (pass_data_local_pure_const, ctxt) 1777 {} 1778 1779 /* opt_pass methods: */ 1780 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); } 1781 virtual bool gate (function *) { return gate_pure_const (); } 1782 virtual unsigned int execute (function *); 1783 1784 }; // class pass_local_pure_const 1785 1786 unsigned int 1787 pass_local_pure_const::execute (function *fun) 1788 { 1789 bool changed = false; 1790 funct_state l; 1791 bool skip; 1792 struct cgraph_node *node; 1793 1794 node = cgraph_node::get (current_function_decl); 1795 skip = skip_function_for_local_pure_const (node); 1796 if (!warn_suggest_attribute_const 1797 && !warn_suggest_attribute_pure 1798 && skip) 1799 return 0; 1800 1801 l = analyze_function (node, false); 1802 1803 /* Do NORETURN discovery. */ 1804 if (!skip && !TREE_THIS_VOLATILE (current_function_decl) 1805 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0) 1806 { 1807 warn_function_noreturn (fun->decl); 1808 if (dump_file) 1809 fprintf (dump_file, "Function found to be noreturn: %s\n", 1810 current_function_name ()); 1811 1812 /* Update declaration and reduce profile to executed once. */ 1813 TREE_THIS_VOLATILE (current_function_decl) = 1; 1814 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE) 1815 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; 1816 1817 changed = true; 1818 } 1819 1820 switch (l->pure_const_state) 1821 { 1822 case IPA_CONST: 1823 if (!TREE_READONLY (current_function_decl)) 1824 { 1825 warn_function_const (current_function_decl, !l->looping); 1826 if (dump_file) 1827 fprintf (dump_file, "Function found to be %sconst: %s\n", 1828 l->looping ? "looping " : "", 1829 current_function_name ()); 1830 } 1831 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) 1832 && !l->looping) 1833 { 1834 if (dump_file) 1835 fprintf (dump_file, "Function found to be non-looping: %s\n", 1836 current_function_name ()); 1837 } 1838 if (!skip && node->set_const_flag (true, l->looping)) 1839 { 1840 if (dump_file) 1841 fprintf (dump_file, "Declaration updated to be %sconst: %s\n", 1842 l->looping ? "looping " : "", 1843 current_function_name ()); 1844 changed = true; 1845 } 1846 break; 1847 1848 case IPA_PURE: 1849 if (!DECL_PURE_P (current_function_decl)) 1850 { 1851 warn_function_pure (current_function_decl, !l->looping); 1852 if (dump_file) 1853 fprintf (dump_file, "Function found to be %spure: %s\n", 1854 l->looping ? "looping " : "", 1855 current_function_name ()); 1856 } 1857 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) 1858 && !l->looping) 1859 { 1860 if (dump_file) 1861 fprintf (dump_file, "Function found to be non-looping: %s\n", 1862 current_function_name ()); 1863 } 1864 if (!skip && node->set_pure_flag (true, l->looping)) 1865 { 1866 if (dump_file) 1867 fprintf (dump_file, "Declaration updated to be %spure: %s\n", 1868 l->looping ? "looping " : "", 1869 current_function_name ()); 1870 changed = true; 1871 } 1872 break; 1873 1874 default: 1875 break; 1876 } 1877 if (!l->can_throw && !TREE_NOTHROW (current_function_decl)) 1878 { 1879 node->set_nothrow_flag (true); 1880 changed = true; 1881 if (dump_file) 1882 fprintf (dump_file, "Function found to be nothrow: %s\n", 1883 current_function_name ()); 1884 } 1885 free (l); 1886 if (changed) 1887 return execute_fixup_cfg (); 1888 else 1889 return 0; 1890 } 1891 1892 } // anon namespace 1893 1894 gimple_opt_pass * 1895 make_pass_local_pure_const (gcc::context *ctxt) 1896 { 1897 return new pass_local_pure_const (ctxt); 1898 } 1899 1900 /* Emit noreturn warnings. */ 1901 1902 namespace { 1903 1904 const pass_data pass_data_warn_function_noreturn = 1905 { 1906 GIMPLE_PASS, /* type */ 1907 "*warn_function_noreturn", /* name */ 1908 OPTGROUP_NONE, /* optinfo_flags */ 1909 TV_NONE, /* tv_id */ 1910 PROP_cfg, /* properties_required */ 1911 0, /* properties_provided */ 1912 0, /* properties_destroyed */ 1913 0, /* todo_flags_start */ 1914 0, /* todo_flags_finish */ 1915 }; 1916 1917 class pass_warn_function_noreturn : public gimple_opt_pass 1918 { 1919 public: 1920 pass_warn_function_noreturn (gcc::context *ctxt) 1921 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt) 1922 {} 1923 1924 /* opt_pass methods: */ 1925 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; } 1926 virtual unsigned int execute (function *fun) 1927 { 1928 if (!TREE_THIS_VOLATILE (current_function_decl) 1929 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0) 1930 warn_function_noreturn (current_function_decl); 1931 return 0; 1932 } 1933 1934 }; // class pass_warn_function_noreturn 1935 1936 } // anon namespace 1937 1938 gimple_opt_pass * 1939 make_pass_warn_function_noreturn (gcc::context *ctxt) 1940 { 1941 return new pass_warn_function_noreturn (ctxt); 1942 } 1943 1944 /* Simple local pass for pure const discovery reusing the analysis from 1945 ipa_pure_const. This pass is effective when executed together with 1946 other optimization passes in early optimization pass queue. */ 1947 1948 namespace { 1949 1950 const pass_data pass_data_nothrow = 1951 { 1952 GIMPLE_PASS, /* type */ 1953 "nothrow", /* name */ 1954 OPTGROUP_NONE, /* optinfo_flags */ 1955 TV_IPA_PURE_CONST, /* tv_id */ 1956 0, /* properties_required */ 1957 0, /* properties_provided */ 1958 0, /* properties_destroyed */ 1959 0, /* todo_flags_start */ 1960 0, /* todo_flags_finish */ 1961 }; 1962 1963 class pass_nothrow : public gimple_opt_pass 1964 { 1965 public: 1966 pass_nothrow (gcc::context *ctxt) 1967 : gimple_opt_pass (pass_data_nothrow, ctxt) 1968 {} 1969 1970 /* opt_pass methods: */ 1971 opt_pass * clone () { return new pass_nothrow (m_ctxt); } 1972 virtual bool gate (function *) { return optimize; } 1973 virtual unsigned int execute (function *); 1974 1975 }; // class pass_nothrow 1976 1977 unsigned int 1978 pass_nothrow::execute (function *) 1979 { 1980 struct cgraph_node *node; 1981 basic_block this_block; 1982 1983 if (TREE_NOTHROW (current_function_decl)) 1984 return 0; 1985 1986 node = cgraph_node::get (current_function_decl); 1987 1988 /* We run during lowering, we can not really use availability yet. */ 1989 if (cgraph_node::get (current_function_decl)->get_availability () 1990 <= AVAIL_INTERPOSABLE) 1991 { 1992 if (dump_file) 1993 fprintf (dump_file, "Function is interposable;" 1994 " not analyzing.\n"); 1995 return true; 1996 } 1997 1998 FOR_EACH_BB_FN (this_block, cfun) 1999 { 2000 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block); 2001 !gsi_end_p (gsi); 2002 gsi_next (&gsi)) 2003 if (stmt_can_throw_external (gsi_stmt (gsi))) 2004 { 2005 if (is_gimple_call (gsi_stmt (gsi))) 2006 { 2007 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi)); 2008 if (callee_t && recursive_call_p (current_function_decl, 2009 callee_t)) 2010 continue; 2011 } 2012 2013 if (dump_file) 2014 { 2015 fprintf (dump_file, "Statement can throw: "); 2016 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0); 2017 } 2018 return 0; 2019 } 2020 } 2021 2022 node->set_nothrow_flag (true); 2023 2024 bool cfg_changed = false; 2025 if (self_recursive_p (node)) 2026 FOR_EACH_BB_FN (this_block, cfun) 2027 if (gimple *g = last_stmt (this_block)) 2028 if (is_gimple_call (g)) 2029 { 2030 tree callee_t = gimple_call_fndecl (g); 2031 if (callee_t 2032 && recursive_call_p (current_function_decl, callee_t) 2033 && maybe_clean_eh_stmt (g) 2034 && gimple_purge_dead_eh_edges (this_block)) 2035 cfg_changed = true; 2036 } 2037 2038 if (dump_file) 2039 fprintf (dump_file, "Function found to be nothrow: %s\n", 2040 current_function_name ()); 2041 return cfg_changed ? TODO_cleanup_cfg : 0; 2042 } 2043 2044 } // anon namespace 2045 2046 gimple_opt_pass * 2047 make_pass_nothrow (gcc::context *ctxt) 2048 { 2049 return new pass_nothrow (ctxt); 2050 } 2051