xref: /netbsd-src/external/gpl3/gcc/dist/gcc/ipa-pure-const.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004-2022 Free Software Foundation, Inc.
3    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This file marks functions as being either const (TREE_READONLY) or
22    pure (DECL_PURE_P).  It can also set a variant of these that
23    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 
25    This must be run after inlining decisions have been made since
26    otherwise, the local sets will not contain information that is
27    consistent with post inlined state.  The global sets are not prone
28    to this problem since they are by definition transitive.  */
29 
30 /* The code in this module is called by the ipa pass manager. It
31    should be one of the later passes since it's information is used by
32    the rest of the compilation. */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 #include "symtab-thunks.h"
65 #include "dbgcnt.h"
66 
67 /* Lattice values for const and pure functions.  Everything starts out
68    being const, then may drop to pure and then neither depending on
69    what is found.  */
70 enum pure_const_state_e
71 {
72   IPA_CONST,
73   IPA_PURE,
74   IPA_NEITHER
75 };
76 
77 static const char *pure_const_names[3] = {"const", "pure", "neither"};
78 
79 enum malloc_state_e
80 {
81   STATE_MALLOC_TOP,
82   STATE_MALLOC,
83   STATE_MALLOC_BOTTOM
84 };
85 
86 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
87 
88 /* Holder for the const_state.  There is one of these per function
89    decl.  */
90 class funct_state_d
91 {
92 public:
funct_state_d()93   funct_state_d (): pure_const_state (IPA_NEITHER),
94     state_previously_known (IPA_NEITHER), looping_previously_known (true),
95     looping (true), can_throw (true), can_free (true),
96     malloc_state (STATE_MALLOC_BOTTOM) {}
97 
funct_state_d(const funct_state_d & s)98   funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
99     state_previously_known (s.state_previously_known),
100     looping_previously_known (s.looping_previously_known),
101     looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
102     malloc_state (s.malloc_state) {}
103 
104   /* See above.  */
105   enum pure_const_state_e pure_const_state;
106   /* What user set here; we can be always sure about this.  */
107   enum pure_const_state_e state_previously_known;
108   bool looping_previously_known;
109 
110   /* True if the function could possibly infinite loop.  There are a
111      lot of ways that this could be determined.  We are pretty
112      conservative here.  While it is possible to cse pure and const
113      calls, it is not legal to have dce get rid of the call if there
114      is a possibility that the call could infinite loop since this is
115      a behavioral change.  */
116   bool looping;
117 
118   bool can_throw;
119 
120   /* If function can call free, munmap or otherwise make previously
121      non-trapping memory accesses trapping.  */
122   bool can_free;
123 
124   enum malloc_state_e malloc_state;
125 };
126 
127 typedef class funct_state_d * funct_state;
128 
129 /* The storage of the funct_state is abstracted because there is the
130    possibility that it may be desirable to move this to the cgraph
131    local info.  */
132 
133 class funct_state_summary_t:
134   public fast_function_summary <funct_state_d *, va_heap>
135 {
136 public:
funct_state_summary_t(symbol_table * symtab)137   funct_state_summary_t (symbol_table *symtab):
138     fast_function_summary <funct_state_d *, va_heap> (symtab) {}
139 
140   virtual void insert (cgraph_node *, funct_state_d *state);
141   virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
142 			  funct_state_d *src_data,
143 			  funct_state_d *dst_data);
144 };
145 
146 static funct_state_summary_t *funct_state_summaries = NULL;
147 
148 static bool gate_pure_const (void);
149 
150 namespace {
151 
152 const pass_data pass_data_ipa_pure_const =
153 {
154   IPA_PASS, /* type */
155   "pure-const", /* name */
156   OPTGROUP_NONE, /* optinfo_flags */
157   TV_IPA_PURE_CONST, /* tv_id */
158   0, /* properties_required */
159   0, /* properties_provided */
160   0, /* properties_destroyed */
161   0, /* todo_flags_start */
162   0, /* todo_flags_finish */
163 };
164 
165 class pass_ipa_pure_const : public ipa_opt_pass_d
166 {
167 public:
168   pass_ipa_pure_const(gcc::context *ctxt);
169 
170   /* opt_pass methods: */
gate(function *)171   bool gate (function *) { return gate_pure_const (); }
172   unsigned int execute (function *fun);
173 
174   void register_hooks (void);
175 
176 private:
177   bool init_p;
178 }; // class pass_ipa_pure_const
179 
180 } // anon namespace
181 
182 /* Try to guess if function body will always be visible to compiler
183    when compiling the call and whether compiler will be able
184    to propagate the information by itself.  */
185 
186 static bool
function_always_visible_to_compiler_p(tree decl)187 function_always_visible_to_compiler_p (tree decl)
188 {
189   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
190 	  || DECL_COMDAT (decl));
191 }
192 
193 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
194    is true if the function is known to be finite.  The diagnostic is
195    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
196    OPTION, this function may initialize it and it is always returned
197    by the function.  */
198 
199 static hash_set<tree> *
suggest_attribute(int option,tree decl,bool known_finite,hash_set<tree> * warned_about,const char * attrib_name)200 suggest_attribute (int option, tree decl, bool known_finite,
201 		   hash_set<tree> *warned_about,
202 		   const char * attrib_name)
203 {
204   if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
205     return warned_about;
206   if (TREE_THIS_VOLATILE (decl)
207       || (known_finite && function_always_visible_to_compiler_p (decl)))
208     return warned_about;
209 
210   if (!warned_about)
211     warned_about = new hash_set<tree>;
212   if (warned_about->contains (decl))
213     return warned_about;
214   warned_about->add (decl);
215   warning_at (DECL_SOURCE_LOCATION (decl),
216 	      option,
217 	      known_finite
218 	      ? G_("function might be candidate for attribute %qs")
219 	      : G_("function might be candidate for attribute %qs"
220 		   " if it is known to return normally"), attrib_name);
221   return warned_about;
222 }
223 
224 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
225    is true if the function is known to be finite.  */
226 
227 static void
warn_function_pure(tree decl,bool known_finite)228 warn_function_pure (tree decl, bool known_finite)
229 {
230   /* Declaring a void function pure makes no sense and is diagnosed
231      by -Wattributes because calling it would have no effect.  */
232   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
233     return;
234 
235   static hash_set<tree> *warned_about;
236   warned_about
237     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
238 			 known_finite, warned_about, "pure");
239 }
240 
241 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
242    is true if the function is known to be finite.  */
243 
244 static void
warn_function_const(tree decl,bool known_finite)245 warn_function_const (tree decl, bool known_finite)
246 {
247   /* Declaring a void function const makes no sense is diagnosed
248      by -Wattributes because calling it would have no effect.  */
249   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
250     return;
251 
252   static hash_set<tree> *warned_about;
253   warned_about
254     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
255 			 known_finite, warned_about, "const");
256 }
257 
258 /* Emit suggestion about __attribute__((malloc)) for DECL.  */
259 
260 static void
warn_function_malloc(tree decl)261 warn_function_malloc (tree decl)
262 {
263   static hash_set<tree> *warned_about;
264   warned_about
265     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
266 			 true, warned_about, "malloc");
267 }
268 
269 /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
270 
271 static void
warn_function_noreturn(tree decl)272 warn_function_noreturn (tree decl)
273 {
274   tree original_decl = decl;
275 
276   static hash_set<tree> *warned_about;
277   if (!lang_hooks.missing_noreturn_ok_p (decl)
278       && targetm.warn_func_return (decl))
279     warned_about
280       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
281 			   true, warned_about, "noreturn");
282 }
283 
284 void
warn_function_cold(tree decl)285 warn_function_cold (tree decl)
286 {
287   tree original_decl = decl;
288 
289   static hash_set<tree> *warned_about;
290   warned_about
291     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
292 			 true, warned_about, "cold");
293 }
294 
295 /* Check to see if the use (or definition when CHECKING_WRITE is true)
296    variable T is legal in a function that is either pure or const.  */
297 
298 static inline void
check_decl(funct_state local,tree t,bool checking_write,bool ipa)299 check_decl (funct_state local,
300 	    tree t, bool checking_write, bool ipa)
301 {
302   /* Do not want to do anything with volatile except mark any
303      function that uses one to be not const or pure.  */
304   if (TREE_THIS_VOLATILE (t))
305     {
306       local->pure_const_state = IPA_NEITHER;
307       if (dump_file)
308         fprintf (dump_file, "    Volatile operand is not const/pure\n");
309       return;
310     }
311 
312   /* Do not care about a local automatic that is not static.  */
313   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
314     return;
315 
316   /* If the variable has the "used" attribute, treat it as if it had a
317      been touched by the devil.  */
318   if (DECL_PRESERVE_P (t))
319     {
320       local->pure_const_state = IPA_NEITHER;
321       if (dump_file)
322         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
323       return;
324     }
325 
326   /* In IPA mode we are not interested in checking actual loads and stores;
327      they will be processed at propagation time using ipa_ref.  */
328   if (ipa)
329     return;
330 
331   /* Since we have dealt with the locals and params cases above, if we
332      are CHECKING_WRITE, this cannot be a pure or constant
333      function.  */
334   if (checking_write)
335     {
336       local->pure_const_state = IPA_NEITHER;
337       if (dump_file)
338         fprintf (dump_file, "    static/global memory write is not const/pure\n");
339       return;
340     }
341 
342   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
343     {
344       /* Readonly reads are safe.  */
345       if (TREE_READONLY (t))
346 	return; /* Read of a constant, do not change the function state.  */
347       else
348 	{
349           if (dump_file)
350             fprintf (dump_file, "    global memory read is not const\n");
351 	  /* Just a regular read.  */
352 	  if (local->pure_const_state == IPA_CONST)
353 	    local->pure_const_state = IPA_PURE;
354 	}
355     }
356   else
357     {
358       /* Compilation level statics can be read if they are readonly
359 	 variables.  */
360       if (TREE_READONLY (t))
361 	return;
362 
363       if (dump_file)
364 	fprintf (dump_file, "    static memory read is not const\n");
365       /* Just a regular read.  */
366       if (local->pure_const_state == IPA_CONST)
367 	local->pure_const_state = IPA_PURE;
368     }
369 }
370 
371 
372 /* Check to see if the use (or definition when CHECKING_WRITE is true)
373    variable T is legal in a function that is either pure or const.  */
374 
375 static inline void
check_op(funct_state local,tree t,bool checking_write)376 check_op (funct_state local, tree t, bool checking_write)
377 {
378   t = get_base_address (t);
379   if (t && TREE_THIS_VOLATILE (t))
380     {
381       local->pure_const_state = IPA_NEITHER;
382       if (dump_file)
383 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
384       return;
385     }
386   else if (refs_local_or_readonly_memory_p (t))
387     {
388       if (dump_file)
389 	fprintf (dump_file, "    Indirect ref to local or readonly "
390 		 "memory is OK\n");
391       return;
392     }
393   else if (checking_write)
394     {
395       local->pure_const_state = IPA_NEITHER;
396       if (dump_file)
397 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
398       return;
399     }
400   else
401     {
402       if (dump_file)
403 	fprintf (dump_file, "    Indirect ref read is not const\n");
404       if (local->pure_const_state == IPA_CONST)
405 	local->pure_const_state = IPA_PURE;
406     }
407 }
408 
409 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
410 
411 static void
state_from_flags(enum pure_const_state_e * state,bool * looping,int flags,bool cannot_lead_to_return)412 state_from_flags (enum pure_const_state_e *state, bool *looping,
413 	          int flags, bool cannot_lead_to_return)
414 {
415   *looping = false;
416   if (flags & ECF_LOOPING_CONST_OR_PURE)
417     {
418       *looping = true;
419       if (dump_file && (dump_flags & TDF_DETAILS))
420 	fprintf (dump_file, " looping\n");
421     }
422   if (flags & ECF_CONST)
423     {
424       *state = IPA_CONST;
425       if (dump_file && (dump_flags & TDF_DETAILS))
426 	fprintf (dump_file, " const\n");
427     }
428   else if (flags & ECF_PURE)
429     {
430       *state = IPA_PURE;
431       if (dump_file && (dump_flags & TDF_DETAILS))
432 	fprintf (dump_file, " pure\n");
433     }
434   else if (cannot_lead_to_return)
435     {
436       *state = IPA_PURE;
437       *looping = true;
438       if (dump_file && (dump_flags & TDF_DETAILS))
439 	fprintf (dump_file, " ignoring side effects->pure looping\n");
440     }
441   else
442     {
443       if (dump_file && (dump_flags & TDF_DETAILS))
444 	fprintf (dump_file, " neither\n");
445       *state = IPA_NEITHER;
446       *looping = true;
447     }
448 }
449 
450 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
451    into STATE and LOOPING better of the two variants.
452    Be sure to merge looping correctly.  IPA_NEITHER functions
453    have looping 0 even if they don't have to return.  */
454 
455 static inline void
better_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2)456 better_state (enum pure_const_state_e *state, bool *looping,
457 	      enum pure_const_state_e state2, bool looping2)
458 {
459   if (state2 < *state)
460     {
461       if (*state == IPA_NEITHER)
462 	*looping = looping2;
463       else
464 	*looping = MIN (*looping, looping2);
465       *state = state2;
466     }
467   else if (state2 != IPA_NEITHER)
468     *looping = MIN (*looping, looping2);
469 }
470 
471 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
472    into STATE and LOOPING worse of the two variants.
473    N is the actual node called.  */
474 
475 static inline void
worse_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2,struct symtab_node * from,struct symtab_node * to)476 worse_state (enum pure_const_state_e *state, bool *looping,
477 	     enum pure_const_state_e state2, bool looping2,
478 	     struct symtab_node *from,
479 	     struct symtab_node *to)
480 {
481   /* Consider function:
482 
483      bool a(int *p)
484      {
485        return *p==*p;
486      }
487 
488      During early optimization we will turn this into:
489 
490      bool a(int *p)
491      {
492        return true;
493      }
494 
495      Now if this function will be detected as CONST however when interposed it
496      may end up being just pure.  We always must assume the worst scenario here.
497    */
498   if (*state == IPA_CONST && state2 == IPA_CONST
499       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
500     {
501       if (dump_file && (dump_flags & TDF_DETAILS))
502 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
503 		 "bind to current def.\n", to->dump_name ());
504       state2 = IPA_PURE;
505     }
506   *state = MAX (*state, state2);
507   *looping = MAX (*looping, looping2);
508 }
509 
510 /* Recognize special cases of builtins that are by themselves not const
511    but function using them is.  */
512 bool
builtin_safe_for_const_function_p(bool * looping,tree callee)513 builtin_safe_for_const_function_p (bool *looping, tree callee)
514 {
515   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516     switch (DECL_FUNCTION_CODE (callee))
517       {
518       case BUILT_IN_RETURN:
519       case BUILT_IN_UNREACHABLE:
520       CASE_BUILT_IN_ALLOCA:
521       case BUILT_IN_STACK_SAVE:
522       case BUILT_IN_STACK_RESTORE:
523       case BUILT_IN_EH_POINTER:
524       case BUILT_IN_EH_FILTER:
525       case BUILT_IN_UNWIND_RESUME:
526       case BUILT_IN_CXA_END_CLEANUP:
527       case BUILT_IN_EH_COPY_VALUES:
528       case BUILT_IN_FRAME_ADDRESS:
529       case BUILT_IN_APPLY_ARGS:
530       case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
531       case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
532       case BUILT_IN_DWARF_CFA:
533       case BUILT_IN_RETURN_ADDRESS:
534 	*looping = false;
535 	return true;
536       case BUILT_IN_PREFETCH:
537 	*looping = true;
538 	return true;
539       default:
540 	break;
541       }
542   return false;
543 }
544 
545 /* Check the parameters of a function call to CALL_EXPR to see if
546    there are any references in the parameters that are not allowed for
547    pure or const functions.  Also check to see if this is either an
548    indirect call, a call outside the compilation unit, or has special
549    attributes that may also effect the purity.  The CALL_EXPR node for
550    the entire call expression.  */
551 
552 static void
check_call(funct_state local,gcall * call,bool ipa)553 check_call (funct_state local, gcall *call, bool ipa)
554 {
555   int flags = gimple_call_flags (call);
556   tree callee_t = gimple_call_fndecl (call);
557   bool possibly_throws = stmt_could_throw_p (cfun, call);
558   bool possibly_throws_externally = (possibly_throws
559   				     && stmt_can_throw_external (cfun, call));
560 
561   if (possibly_throws)
562     {
563       unsigned int i;
564       for (i = 0; i < gimple_num_ops (call); i++)
565         if (gimple_op (call, i)
566 	    && tree_could_throw_p (gimple_op (call, i)))
567 	  {
568 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
569 	      {
570 		if (dump_file)
571 		  fprintf (dump_file, "    operand can throw; looping\n");
572 		local->looping = true;
573 	      }
574 	    if (possibly_throws_externally)
575 	      {
576 		if (dump_file)
577 		  fprintf (dump_file, "    operand can throw externally\n");
578 		local->can_throw = true;
579 	      }
580 	  }
581     }
582 
583   /* The const and pure flags are set by a variety of places in the
584      compiler (including here).  If someone has already set the flags
585      for the callee, (such as for some of the builtins) we will use
586      them, otherwise we will compute our own information.
587 
588      Const and pure functions have less clobber effects than other
589      functions so we process these first.  Otherwise if it is a call
590      outside the compilation unit or an indirect call we punt.  This
591      leaves local calls which will be processed by following the call
592      graph.  */
593   if (callee_t)
594     {
595       bool call_looping;
596 
597       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
598 	  && !nonfreeing_call_p (call))
599 	local->can_free = true;
600 
601       if (builtin_safe_for_const_function_p (&call_looping, callee_t))
602 	{
603 	  worse_state (&local->pure_const_state, &local->looping,
604 		       IPA_CONST, call_looping,
605 		       NULL, NULL);
606 	  return;
607 	}
608       /* When bad things happen to bad functions, they cannot be const
609 	 or pure.  */
610       if (setjmp_call_p (callee_t))
611 	{
612 	  if (dump_file)
613 	    fprintf (dump_file, "    setjmp is not const/pure\n");
614           local->looping = true;
615 	  local->pure_const_state = IPA_NEITHER;
616 	}
617 
618       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
619 	switch (DECL_FUNCTION_CODE (callee_t))
620 	  {
621 	  case BUILT_IN_LONGJMP:
622 	  case BUILT_IN_NONLOCAL_GOTO:
623 	    if (dump_file)
624 	      fprintf (dump_file,
625 		       "    longjmp and nonlocal goto is not const/pure\n");
626 	    local->pure_const_state = IPA_NEITHER;
627 	    local->looping = true;
628 	    break;
629 	  default:
630 	    break;
631 	  }
632     }
633   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
634     local->can_free = true;
635 
636   /* When not in IPA mode, we can still handle self recursion.  */
637   if (!ipa && callee_t
638       && recursive_call_p (current_function_decl, callee_t))
639     {
640       if (dump_file)
641         fprintf (dump_file, "    Recursive call can loop.\n");
642       local->looping = true;
643     }
644   /* Either callee is unknown or we are doing local analysis.
645      Look to see if there are any bits available for the callee (such as by
646      declaration or because it is builtin) and process solely on the basis of
647      those bits.  Handle internal calls always, those calls don't have
648      corresponding cgraph edges and thus aren't processed during
649      the propagation.  */
650   else if (!ipa || gimple_call_internal_p (call))
651     {
652       enum pure_const_state_e call_state;
653       bool call_looping;
654       if (possibly_throws && cfun->can_throw_non_call_exceptions)
655         {
656 	  if (dump_file)
657 	    fprintf (dump_file, "    can throw; looping\n");
658           local->looping = true;
659 	}
660       if (possibly_throws_externally)
661         {
662 	  if (dump_file)
663 	    {
664 	      fprintf (dump_file, "    can throw externally to lp %i\n",
665 	      	       lookup_stmt_eh_lp (call));
666 	      if (callee_t)
667 		fprintf (dump_file, "     callee:%s\n",
668 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
669 	    }
670           local->can_throw = true;
671 	}
672       if (dump_file && (dump_flags & TDF_DETAILS))
673 	fprintf (dump_file, "    checking flags for call:");
674       state_from_flags (&call_state, &call_looping, flags,
675 			((flags & (ECF_NORETURN | ECF_NOTHROW))
676 			 == (ECF_NORETURN | ECF_NOTHROW))
677 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
678       worse_state (&local->pure_const_state, &local->looping,
679 		   call_state, call_looping, NULL, NULL);
680     }
681   /* Direct functions calls are handled by IPA propagation.  */
682 }
683 
684 /* Wrapper around check_decl for loads in local more.  */
685 
686 static bool
check_load(gimple *,tree op,tree,void * data)687 check_load (gimple *, tree op, tree, void *data)
688 {
689   if (DECL_P (op))
690     check_decl ((funct_state)data, op, false, false);
691   else
692     check_op ((funct_state)data, op, false);
693   return false;
694 }
695 
696 /* Wrapper around check_decl for stores in local more.  */
697 
698 static bool
check_store(gimple *,tree op,tree,void * data)699 check_store (gimple *, tree op, tree, void *data)
700 {
701   if (DECL_P (op))
702     check_decl ((funct_state)data, op, true, false);
703   else
704     check_op ((funct_state)data, op, true);
705   return false;
706 }
707 
708 /* Wrapper around check_decl for loads in ipa mode.  */
709 
710 static bool
check_ipa_load(gimple *,tree op,tree,void * data)711 check_ipa_load (gimple *, tree op, tree, void *data)
712 {
713   if (DECL_P (op))
714     check_decl ((funct_state)data, op, false, true);
715   else
716     check_op ((funct_state)data, op, false);
717   return false;
718 }
719 
720 /* Wrapper around check_decl for stores in ipa mode.  */
721 
722 static bool
check_ipa_store(gimple *,tree op,tree,void * data)723 check_ipa_store (gimple *, tree op, tree, void *data)
724 {
725   if (DECL_P (op))
726     check_decl ((funct_state)data, op, true, true);
727   else
728     check_op ((funct_state)data, op, true);
729   return false;
730 }
731 
732 /* Look into pointer pointed to by GSIP and figure out what interesting side
733    effects it has.  */
734 static void
check_stmt(gimple_stmt_iterator * gsip,funct_state local,bool ipa)735 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
736 {
737   gimple *stmt = gsi_stmt (*gsip);
738 
739   if (is_gimple_debug (stmt))
740     return;
741 
742   /* Do consider clobber as side effects before IPA, so we rather inline
743      C++ destructors and keep clobber semantics than eliminate them.
744 
745      Similar logic is in ipa-modref.
746 
747      TODO: We may get smarter during early optimizations on these and let
748      functions containing only clobbers to be optimized more.  This is a common
749      case of C++ destructors.  */
750 
751   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
752     return;
753 
754   if (dump_file)
755     {
756       fprintf (dump_file, "  scanning: ");
757       print_gimple_stmt (dump_file, stmt, 0);
758     }
759 
760   if (gimple_has_volatile_ops (stmt)
761       && !gimple_clobber_p (stmt))
762     {
763       local->pure_const_state = IPA_NEITHER;
764       if (dump_file)
765 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
766     }
767 
768   /* Look for loads and stores.  */
769   walk_stmt_load_store_ops (stmt, local,
770 			    ipa ? check_ipa_load : check_load,
771 			    ipa ? check_ipa_store :  check_store);
772 
773   if (gimple_code (stmt) != GIMPLE_CALL
774       && stmt_could_throw_p (cfun, stmt))
775     {
776       if (cfun->can_throw_non_call_exceptions)
777 	{
778 	  if (dump_file)
779 	    fprintf (dump_file, "    can throw; looping\n");
780 	  local->looping = true;
781 	}
782       if (stmt_can_throw_external (cfun, stmt))
783 	{
784 	  if (dump_file)
785 	    fprintf (dump_file, "    can throw externally\n");
786 	  local->can_throw = true;
787 	}
788       else
789 	if (dump_file)
790 	  fprintf (dump_file, "    can throw\n");
791     }
792   switch (gimple_code (stmt))
793     {
794     case GIMPLE_CALL:
795       check_call (local, as_a <gcall *> (stmt), ipa);
796       break;
797     case GIMPLE_LABEL:
798       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
799 	/* Target of long jump. */
800 	{
801           if (dump_file)
802             fprintf (dump_file, "    nonlocal label is not const/pure\n");
803 	  local->pure_const_state = IPA_NEITHER;
804 	}
805       break;
806     case GIMPLE_ASM:
807       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
808 	{
809 	  if (dump_file)
810 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
811 	  /* Abandon all hope, ye who enter here. */
812 	  local->pure_const_state = IPA_NEITHER;
813 	  local->can_free = true;
814 	}
815       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
816 	{
817 	  if (dump_file)
818 	    fprintf (dump_file, "    volatile is not const/pure\n");
819 	  /* Abandon all hope, ye who enter here. */
820 	  local->pure_const_state = IPA_NEITHER;
821 	  local->looping = true;
822 	  local->can_free = true;
823 	}
824       return;
825     default:
826       break;
827     }
828 }
829 
830 /* Check that RETVAL is used only in STMT and in comparisons against 0.
831    RETVAL is return value of the function and STMT is return stmt.  */
832 
833 static bool
check_retval_uses(tree retval,gimple * stmt)834 check_retval_uses (tree retval, gimple *stmt)
835 {
836   imm_use_iterator use_iter;
837   gimple *use_stmt;
838 
839   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
840     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
841       {
842 	tree op2 = gimple_cond_rhs (cond);
843 	if (!integer_zerop (op2))
844 	  return false;
845       }
846     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
847       {
848 	enum tree_code code = gimple_assign_rhs_code (ga);
849 	if (TREE_CODE_CLASS (code) != tcc_comparison)
850 	  return false;
851 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
852 	  return false;
853       }
854     else if (is_gimple_debug (use_stmt))
855       ;
856     else if (use_stmt != stmt)
857       return false;
858 
859   return true;
860 }
861 
862 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
863    attribute. Currently this function does a very conservative analysis.
864    FUN is considered to be a candidate if
865    1) It returns a value of pointer type.
866    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
867       a phi, and element of phi is either NULL or
868       SSA_NAME_DEF_STMT(element) is function call.
869    3) The return-value has immediate uses only within comparisons (gcond or gassign)
870       and return_stmt (and likewise a phi arg has immediate use only within comparison
871       or the phi stmt).  */
872 
873 #define DUMP_AND_RETURN(reason)  \
874 {  \
875   if (dump_file && (dump_flags & TDF_DETAILS))  \
876     fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
877 	     (node->dump_name ()), (reason));  \
878   return false;  \
879 }
880 
881 static bool
malloc_candidate_p_1(function * fun,tree retval,gimple * ret_stmt,bool ipa,bitmap visited)882 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
883 		      bitmap visited)
884 {
885   cgraph_node *node = cgraph_node::get_create (fun->decl);
886   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
887     return true;
888 
889   if (!check_retval_uses (retval, ret_stmt))
890     DUMP_AND_RETURN("Return value has uses outside return stmt"
891 		    " and comparisons against 0.")
892 
893   gimple *def = SSA_NAME_DEF_STMT (retval);
894 
895   if (gcall *call_stmt = dyn_cast<gcall *> (def))
896     {
897       tree callee_decl = gimple_call_fndecl (call_stmt);
898       if (!callee_decl)
899 	return false;
900 
901       if (!ipa && !DECL_IS_MALLOC (callee_decl))
902 	DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
903 			" non-ipa mode.")
904 
905       cgraph_edge *cs = node->get_edge (call_stmt);
906       if (cs)
907 	{
908 	  ipa_call_summary *es = ipa_call_summaries->get_create (cs);
909 	  es->is_return_callee_uncaptured = true;
910 	}
911     }
912 
913     else if (gphi *phi = dyn_cast<gphi *> (def))
914       {
915 	bool all_args_zero = true;
916 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
917 	  {
918 	    tree arg = gimple_phi_arg_def (phi, i);
919 	    if (integer_zerop (arg))
920 	      continue;
921 
922 	    all_args_zero = false;
923 	    if (TREE_CODE (arg) != SSA_NAME)
924 	      DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
925 	    if (!check_retval_uses (arg, phi))
926 	      DUMP_AND_RETURN ("phi arg has uses outside phi"
927 				 " and comparisons against 0.")
928 
929 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
930 	    if (is_a<gphi *> (arg_def))
931 	      {
932 		if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
933 		    DUMP_AND_RETURN ("nested phi fail")
934 		continue;
935 	      }
936 
937 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
938 	    if (!call_stmt)
939 	      DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
940 
941 	    tree callee_decl = gimple_call_fndecl (call_stmt);
942 	    if (!callee_decl)
943 	      return false;
944 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
945 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute"
946 			      " for non-ipa mode.")
947 
948 	    cgraph_edge *cs = node->get_edge (call_stmt);
949 	    if (cs)
950 	      {
951 		ipa_call_summary *es = ipa_call_summaries->get_create (cs);
952 		es->is_return_callee_uncaptured = true;
953 	      }
954 	  }
955 
956 	if (all_args_zero)
957 	  DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
958       }
959 
960     else
961       DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
962 
963   return true;
964 }
965 
966 static bool
malloc_candidate_p(function * fun,bool ipa)967 malloc_candidate_p (function *fun, bool ipa)
968 {
969   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
970   edge e;
971   edge_iterator ei;
972   cgraph_node *node = cgraph_node::get_create (fun->decl);
973 
974   if (EDGE_COUNT (exit_block->preds) == 0
975       || !flag_delete_null_pointer_checks)
976     return false;
977 
978   auto_bitmap visited;
979   FOR_EACH_EDGE (e, ei, exit_block->preds)
980     {
981       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
982       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
983 
984       if (!ret_stmt)
985 	return false;
986 
987       tree retval = gimple_return_retval (ret_stmt);
988       if (!retval)
989 	DUMP_AND_RETURN("No return value.")
990 
991       if (TREE_CODE (retval) != SSA_NAME
992 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
993 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
994 
995       if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
996 	return false;
997     }
998 
999   if (dump_file && (dump_flags & TDF_DETAILS))
1000     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1001 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1002   return true;
1003 }
1004 
1005 #undef DUMP_AND_RETURN
1006 
1007 /* Return true if function is known to be finite.  */
1008 
1009 bool
finite_function_p()1010 finite_function_p ()
1011 {
1012   /* Const functions cannot have back edges (an
1013      indication of possible infinite loop side
1014      effect.  */
1015   bool finite = true;
1016   if (mark_dfs_back_edges ())
1017     {
1018       /* Preheaders are needed for SCEV to work.
1019 	 Simple latches and recorded exits improve chances that loop will
1020 	 proved to be finite in testcases such as in loop-15.c
1021 	 and loop-24.c  */
1022       loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1023 			   | LOOPS_HAVE_SIMPLE_LATCHES
1024 			   | LOOPS_HAVE_RECORDED_EXITS);
1025       if (dump_file && (dump_flags & TDF_DETAILS))
1026 	flow_loops_dump (dump_file, NULL, 0);
1027       if (mark_irreducible_loops ())
1028 	{
1029 	  if (dump_file)
1030 	    fprintf (dump_file, "    has irreducible loops\n");
1031 	  finite = false;
1032 	}
1033       else
1034 	{
1035 	  scev_initialize ();
1036 	  for (auto loop : loops_list (cfun, 0))
1037 	    if (!finite_loop_p (loop))
1038 	      {
1039 		if (dump_file)
1040 		  fprintf (dump_file, "    cannot prove finiteness of "
1041 			   "loop %i\n", loop->num);
1042 		finite =false;
1043 		break;
1044 	      }
1045 	  scev_finalize ();
1046 	}
1047       loop_optimizer_finalize ();
1048     }
1049   return finite;
1050 }
1051 
1052 /* This is the main routine for finding the reference patterns for
1053    global variables within a function FN.  */
1054 
1055 static funct_state
analyze_function(struct cgraph_node * fn,bool ipa)1056 analyze_function (struct cgraph_node *fn, bool ipa)
1057 {
1058   tree decl = fn->decl;
1059   funct_state l;
1060   basic_block this_block;
1061 
1062   l = XCNEW (class funct_state_d);
1063   l->pure_const_state = IPA_CONST;
1064   l->state_previously_known = IPA_NEITHER;
1065   l->looping_previously_known = true;
1066   l->looping = false;
1067   l->can_throw = false;
1068   l->can_free = false;
1069   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1070 		    flags_from_decl_or_type (fn->decl),
1071 		    fn->cannot_return_p ());
1072 
1073   if (fn->thunk || fn->alias)
1074     {
1075       /* Thunk gets propagated through, so nothing interesting happens.  */
1076       gcc_assert (ipa);
1077       if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1078 	l->pure_const_state = IPA_NEITHER;
1079       return l;
1080     }
1081 
1082   if (dump_file)
1083     {
1084       fprintf (dump_file, "\n\n local analysis of %s\n ",
1085 	       fn->dump_name ());
1086     }
1087 
1088   push_cfun (DECL_STRUCT_FUNCTION (decl));
1089 
1090   FOR_EACH_BB_FN (this_block, cfun)
1091     {
1092       gimple_stmt_iterator gsi;
1093       struct walk_stmt_info wi;
1094 
1095       memset (&wi, 0, sizeof (wi));
1096       for (gsi = gsi_start_bb (this_block);
1097 	   !gsi_end_p (gsi);
1098 	   gsi_next (&gsi))
1099 	{
1100 	  /* NULL memory accesses terminates BB.  These accesses are known
1101 	     to trip undefined behaviour.  gimple-ssa-isolate-paths turns them
1102 	     to volatile accesses and adds builtin_trap call which would
1103 	     confuse us otherwise.  */
1104 	  if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1105 						  null_pointer_node))
1106 	    {
1107 	      if (dump_file)
1108 		fprintf (dump_file, "  NULL memory access; terminating BB%s\n",
1109 			 flag_non_call_exceptions ? "; looping" : "");
1110 	      if (flag_non_call_exceptions)
1111 		{
1112 		  l->looping = true;
1113 		  if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1114 		    {
1115 		      if (dump_file)
1116 			fprintf (dump_file, "    can throw externally\n");
1117 		      l->can_throw = true;
1118 		    }
1119 		}
1120 	      break;
1121 	    }
1122 	  check_stmt (&gsi, l, ipa);
1123 	  if (l->pure_const_state == IPA_NEITHER
1124 	      && l->looping
1125 	      && l->can_throw
1126 	      && l->can_free)
1127 	    goto end;
1128 	}
1129     }
1130 
1131 end:
1132   if (l->pure_const_state != IPA_NEITHER
1133       && !l->looping
1134       && !finite_function_p ())
1135     l->looping = true;
1136 
1137   if (dump_file && (dump_flags & TDF_DETAILS))
1138     fprintf (dump_file, "    checking previously known:");
1139 
1140   better_state (&l->pure_const_state, &l->looping,
1141 		l->state_previously_known,
1142 		l->looping_previously_known);
1143   if (TREE_NOTHROW (decl))
1144     l->can_throw = false;
1145 
1146   l->malloc_state = STATE_MALLOC_BOTTOM;
1147   if (DECL_IS_MALLOC (decl))
1148     l->malloc_state = STATE_MALLOC;
1149   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1150     l->malloc_state = STATE_MALLOC_TOP;
1151   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1152     l->malloc_state = STATE_MALLOC;
1153 
1154   pop_cfun ();
1155   if (dump_file)
1156     {
1157       if (l->looping)
1158         fprintf (dump_file, "Function is locally looping.\n");
1159       if (l->can_throw)
1160         fprintf (dump_file, "Function is locally throwing.\n");
1161       if (l->pure_const_state == IPA_CONST)
1162         fprintf (dump_file, "Function is locally const.\n");
1163       if (l->pure_const_state == IPA_PURE)
1164         fprintf (dump_file, "Function is locally pure.\n");
1165       if (l->can_free)
1166 	fprintf (dump_file, "Function can locally free.\n");
1167       if (l->malloc_state == STATE_MALLOC)
1168 	fprintf (dump_file, "Function is locally malloc.\n");
1169     }
1170   return l;
1171 }
1172 
1173 void
insert(cgraph_node * node,funct_state_d * state)1174 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1175 {
1176   /* There are some shared nodes, in particular the initializers on
1177      static declarations.  We do not need to scan them more than once
1178      since all we would be interested in are the addressof
1179      operations.  */
1180   if (opt_for_fn (node->decl, flag_ipa_pure_const))
1181     {
1182       funct_state_d *a = analyze_function (node, true);
1183       new (state) funct_state_d (*a);
1184       free (a);
1185     }
1186   else
1187     /* Do not keep stale summaries.  */
1188     funct_state_summaries->remove (node);
1189 }
1190 
1191 /* Called when new clone is inserted to callgraph late.  */
1192 
1193 void
duplicate(cgraph_node *,cgraph_node * dst,funct_state_d * src_data,funct_state_d * dst_data)1194 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1195 				  funct_state_d *src_data,
1196 				  funct_state_d *dst_data)
1197 {
1198   new (dst_data) funct_state_d (*src_data);
1199   if (dst_data->malloc_state == STATE_MALLOC
1200       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1201     dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1202 }
1203 
1204 
1205 void
1206 pass_ipa_pure_const::
register_hooks(void)1207 register_hooks (void)
1208 {
1209   if (init_p)
1210     return;
1211 
1212   init_p = true;
1213 
1214   funct_state_summaries = new funct_state_summary_t (symtab);
1215 }
1216 
1217 
1218 /* Analyze each function in the cgraph to see if it is locally PURE or
1219    CONST.  */
1220 
1221 static void
pure_const_generate_summary(void)1222 pure_const_generate_summary (void)
1223 {
1224   struct cgraph_node *node;
1225 
1226   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1227   pass->register_hooks ();
1228 
1229   /* Process all of the functions.
1230 
1231      We process AVAIL_INTERPOSABLE functions.  We cannot use the results
1232      by default, but the info can be used at LTO with -fwhole-program or
1233      when function got cloned and the clone is AVAILABLE.  */
1234 
1235   FOR_EACH_DEFINED_FUNCTION (node)
1236     if (opt_for_fn (node->decl, flag_ipa_pure_const))
1237       {
1238 	funct_state_d *a = analyze_function (node, true);
1239 	new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1240 	free (a);
1241       }
1242 }
1243 
1244 
1245 /* Serialize the ipa info for lto.  */
1246 
1247 static void
pure_const_write_summary(void)1248 pure_const_write_summary (void)
1249 {
1250   struct cgraph_node *node;
1251   struct lto_simple_output_block *ob
1252     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1253   unsigned int count = 0;
1254   lto_symtab_encoder_iterator lsei;
1255   lto_symtab_encoder_t encoder;
1256 
1257   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1258 
1259   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1260        lsei_next_function_in_partition (&lsei))
1261     {
1262       node = lsei_cgraph_node (lsei);
1263       if (node->definition && funct_state_summaries->exists (node))
1264 	count++;
1265     }
1266 
1267   streamer_write_uhwi_stream (ob->main_stream, count);
1268 
1269   /* Process all of the functions.  */
1270   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1271        lsei_next_function_in_partition (&lsei))
1272     {
1273       node = lsei_cgraph_node (lsei);
1274       funct_state_d *fs = funct_state_summaries->get (node);
1275       if (node->definition && fs != NULL)
1276 	{
1277 	  struct bitpack_d bp;
1278 	  int node_ref;
1279 	  lto_symtab_encoder_t encoder;
1280 
1281 	  encoder = ob->decl_state->symtab_node_encoder;
1282 	  node_ref = lto_symtab_encoder_encode (encoder, node);
1283 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
1284 
1285 	  /* Note that flags will need to be read in the opposite
1286 	     order as we are pushing the bitflags into FLAGS.  */
1287 	  bp = bitpack_create (ob->main_stream);
1288 	  bp_pack_value (&bp, fs->pure_const_state, 2);
1289 	  bp_pack_value (&bp, fs->state_previously_known, 2);
1290 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
1291 	  bp_pack_value (&bp, fs->looping, 1);
1292 	  bp_pack_value (&bp, fs->can_throw, 1);
1293 	  bp_pack_value (&bp, fs->can_free, 1);
1294 	  bp_pack_value (&bp, fs->malloc_state, 2);
1295 	  streamer_write_bitpack (&bp);
1296 	}
1297     }
1298 
1299   lto_destroy_simple_output_block (ob);
1300 }
1301 
1302 
1303 /* Deserialize the ipa info for lto.  */
1304 
1305 static void
pure_const_read_summary(void)1306 pure_const_read_summary (void)
1307 {
1308   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1309   struct lto_file_decl_data *file_data;
1310   unsigned int j = 0;
1311 
1312   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1313   pass->register_hooks ();
1314 
1315   while ((file_data = file_data_vec[j++]))
1316     {
1317       const char *data;
1318       size_t len;
1319       class lto_input_block *ib
1320 	= lto_create_simple_input_block (file_data,
1321 					 LTO_section_ipa_pure_const,
1322 					 &data, &len);
1323       if (ib)
1324 	{
1325 	  unsigned int i;
1326 	  unsigned int count = streamer_read_uhwi (ib);
1327 
1328 	  for (i = 0; i < count; i++)
1329 	    {
1330 	      unsigned int index;
1331 	      struct cgraph_node *node;
1332 	      struct bitpack_d bp;
1333 	      funct_state fs;
1334 	      lto_symtab_encoder_t encoder;
1335 
1336 	      index = streamer_read_uhwi (ib);
1337 	      encoder = file_data->symtab_node_encoder;
1338 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1339 									index));
1340 
1341 	      fs = funct_state_summaries->get_create (node);
1342 	      /* Note that the flags must be read in the opposite
1343 		 order in which they were written (the bitflags were
1344 		 pushed into FLAGS).  */
1345 	      bp = streamer_read_bitpack (ib);
1346 	      fs->pure_const_state
1347 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1348 	      fs->state_previously_known
1349 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1350 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1351 	      fs->looping = bp_unpack_value (&bp, 1);
1352 	      fs->can_throw = bp_unpack_value (&bp, 1);
1353 	      fs->can_free = bp_unpack_value (&bp, 1);
1354 	      fs->malloc_state
1355 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
1356 
1357 	      if (dump_file)
1358 		{
1359 		  int flags = flags_from_decl_or_type (node->decl);
1360 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
1361 		  if (flags & ECF_CONST)
1362 		    fprintf (dump_file, " const");
1363 		  if (flags & ECF_PURE)
1364 		    fprintf (dump_file, " pure");
1365 		  if (flags & ECF_NOTHROW)
1366 		    fprintf (dump_file, " nothrow");
1367 		  fprintf (dump_file, "\n  pure const state: %s\n",
1368 			   pure_const_names[fs->pure_const_state]);
1369 		  fprintf (dump_file, "  previously known state: %s\n",
1370 			   pure_const_names[fs->state_previously_known]);
1371 		  if (fs->looping)
1372 		    fprintf (dump_file,"  function is locally looping\n");
1373 		  if (fs->looping_previously_known)
1374 		    fprintf (dump_file,"  function is previously known looping\n");
1375 		  if (fs->can_throw)
1376 		    fprintf (dump_file,"  function is locally throwing\n");
1377 		  if (fs->can_free)
1378 		    fprintf (dump_file,"  function can locally free\n");
1379 		  fprintf (dump_file, "\n malloc state: %s\n",
1380 			   malloc_state_names[fs->malloc_state]);
1381 		}
1382 	    }
1383 
1384 	  lto_destroy_simple_input_block (file_data,
1385 					  LTO_section_ipa_pure_const,
1386 					  ib, data, len);
1387 	}
1388     }
1389 }
1390 
1391 /* We only propagate across edges that can throw externally and their callee
1392    is not interposable.  */
1393 
1394 static bool
ignore_edge_for_nothrow(struct cgraph_edge * e)1395 ignore_edge_for_nothrow (struct cgraph_edge *e)
1396 {
1397   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1398     return true;
1399 
1400   enum availability avail;
1401   cgraph_node *ultimate_target
1402     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1403   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1404     return true;
1405   return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1406 	   && !e->callee->binds_to_current_def_p (e->caller))
1407 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1408 	  || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1409 }
1410 
1411 /* Return true if NODE is self recursive function.
1412    Indirectly recursive functions appears as non-trivial strongly
1413    connected components, so we need to care about self recursion
1414    only.  */
1415 
1416 static bool
self_recursive_p(struct cgraph_node * node)1417 self_recursive_p (struct cgraph_node *node)
1418 {
1419   struct cgraph_edge *e;
1420   for (e = node->callees; e; e = e->next_callee)
1421     if (e->callee->function_symbol () == node)
1422       return true;
1423   return false;
1424 }
1425 
1426 /* Return true if N is cdtor that is not const or pure.  In this case we may
1427    need to remove unreachable function if it is marked const/pure.  */
1428 
1429 static bool
cdtor_p(cgraph_node * n,void *)1430 cdtor_p (cgraph_node *n, void *)
1431 {
1432   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1433     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1434 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1435   return false;
1436 }
1437 
1438 /* Skip edges from and to nodes without ipa_pure_const enabled.
1439    Ignore not available symbols.  */
1440 
1441 static bool
ignore_edge_for_pure_const(struct cgraph_edge * e)1442 ignore_edge_for_pure_const (struct cgraph_edge *e)
1443 {
1444   enum availability avail;
1445   cgraph_node *ultimate_target
1446     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1447 
1448   return (avail <= AVAIL_INTERPOSABLE
1449 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1450 	  || !opt_for_fn (ultimate_target->decl,
1451 			  flag_ipa_pure_const));
1452 }
1453 
1454 /* Return true if function should be skipped for local pure const analysis.  */
1455 
1456 static bool
skip_function_for_local_pure_const(struct cgraph_node * node)1457 skip_function_for_local_pure_const (struct cgraph_node *node)
1458 {
1459   /* Because we do not schedule pass_fixup_cfg over whole program after early
1460      optimizations we must not promote functions that are called by already
1461      processed functions.  */
1462 
1463   if (function_called_by_processed_nodes_p ())
1464     {
1465       if (dump_file)
1466 	fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1467       return true;
1468     }
1469   /* Save some work and do not analyze functions which are interposable and
1470      do not have any non-interposable aliases.  */
1471   if (node->get_availability () <= AVAIL_INTERPOSABLE
1472       && !flag_lto
1473       && !node->has_aliases_p ())
1474     {
1475       if (dump_file)
1476 	fprintf (dump_file,
1477 		 "Function is interposable; not analyzing.\n");
1478       return true;
1479     }
1480   return false;
1481 }
1482 
1483 /* Make function const and output warning.  If LOCAL is true,
1484    return true if anything changed.  Otherwise return true if
1485    we may have introduced removale ctors.  */
1486 
1487 bool
ipa_make_function_const(struct cgraph_node * node,bool looping,bool local)1488 ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1489 {
1490   bool cdtor = false;
1491 
1492   if (TREE_READONLY (node->decl)
1493       && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1494     return false;
1495   warn_function_const (node->decl, !looping);
1496   if (local && skip_function_for_local_pure_const (node))
1497     return false;
1498   if (dump_file)
1499     fprintf (dump_file, "Function found to be %sconst: %s\n",
1500 	     looping ? "looping " : "",
1501 	     node->dump_name ());
1502   if (!local && !looping)
1503     cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1504   if (!dbg_cnt (ipa_attr))
1505     return false;
1506   if (node->set_const_flag (true, looping))
1507     {
1508       if (dump_file)
1509 	fprintf (dump_file,
1510 		 "Declaration updated to be %sconst: %s\n",
1511 		 looping ? "looping " : "",
1512 		 node->dump_name ());
1513       if (local)
1514 	return true;
1515       return cdtor;
1516     }
1517   return false;
1518 }
1519 
1520 /* Make function const and output warning.  If LOCAL is true,
1521    return true if anything changed.  Otherwise return true if
1522    we may have introduced removale ctors.  */
1523 
1524 bool
ipa_make_function_pure(struct cgraph_node * node,bool looping,bool local)1525 ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1526 {
1527   bool cdtor = false;
1528 
1529   if (TREE_READONLY (node->decl)
1530       || (DECL_PURE_P (node->decl)
1531 	  && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1532     return false;
1533   warn_function_pure (node->decl, !looping);
1534   if (local && skip_function_for_local_pure_const (node))
1535     return false;
1536   if (dump_file)
1537     fprintf (dump_file, "Function found to be %spure: %s\n",
1538 	     looping ? "looping " : "",
1539 	     node->dump_name ());
1540   if (!local && !looping)
1541     cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1542   if (!dbg_cnt (ipa_attr))
1543     return false;
1544   if (node->set_pure_flag (true, looping))
1545     {
1546       if (dump_file)
1547 	fprintf (dump_file,
1548 		 "Declaration updated to be %spure: %s\n",
1549 		 looping ? "looping " : "",
1550 		 node->dump_name ());
1551       if (local)
1552 	return true;
1553       return cdtor;
1554     }
1555   return false;
1556 }
1557 
1558 /* Produce transitive closure over the callgraph and compute pure/const
1559    attributes.  */
1560 
1561 static bool
propagate_pure_const(void)1562 propagate_pure_const (void)
1563 {
1564   struct cgraph_node *node;
1565   struct cgraph_node *w;
1566   struct cgraph_node **order =
1567     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1568   int order_pos;
1569   int i;
1570   struct ipa_dfs_info * w_info;
1571   bool remove_p = false;
1572 
1573   order_pos = ipa_reduced_postorder (order, true,
1574 				     ignore_edge_for_pure_const);
1575   if (dump_file)
1576     {
1577       cgraph_node::dump_cgraph (dump_file);
1578       ipa_print_order (dump_file, "reduced", order, order_pos);
1579     }
1580 
1581   /* Propagate the local information through the call graph to produce
1582      the global information.  All the nodes within a cycle will have
1583      the same info so we collapse cycles first.  Then we can do the
1584      propagation in one pass from the leaves to the roots.  */
1585   for (i = 0; i < order_pos; i++ )
1586     {
1587       enum pure_const_state_e pure_const_state = IPA_CONST;
1588       bool looping = false;
1589       int count = 0;
1590       node = order[i];
1591 
1592       if (node->alias)
1593 	continue;
1594 
1595       if (dump_file && (dump_flags & TDF_DETAILS))
1596 	fprintf (dump_file, "Starting cycle\n");
1597 
1598       /* Find the worst state for any node in the cycle.  */
1599       w = node;
1600       while (w && pure_const_state != IPA_NEITHER)
1601 	{
1602 	  struct cgraph_edge *e;
1603 	  struct cgraph_edge *ie;
1604 	  int i;
1605 	  struct ipa_ref *ref = NULL;
1606 
1607 	  funct_state w_l = funct_state_summaries->get_create (w);
1608 	  if (dump_file && (dump_flags & TDF_DETAILS))
1609 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
1610 		     w->dump_name (),
1611 		     pure_const_names[w_l->pure_const_state],
1612 		     w_l->looping);
1613 
1614 	  /* First merge in function body properties.
1615 	     We are safe to pass NULL as FROM and TO because we will take care
1616 	     of possible interposition when walking callees.  */
1617 	  worse_state (&pure_const_state, &looping,
1618 		       w_l->pure_const_state, w_l->looping,
1619 		       NULL, NULL);
1620 	  if (pure_const_state == IPA_NEITHER)
1621 	    break;
1622 
1623 	  count++;
1624 
1625 	  /* We consider recursive cycles as possibly infinite.
1626 	     This might be relaxed since infinite recursion leads to stack
1627 	     overflow.  */
1628 	  if (count > 1)
1629 	    looping = true;
1630 
1631 	  /* Now walk the edges and merge in callee properties.  */
1632 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1633 	       e = e->next_callee)
1634 	    {
1635 	      enum availability avail;
1636 	      struct cgraph_node *y = e->callee->
1637 				function_or_virtual_thunk_symbol (&avail,
1638 								  e->caller);
1639 	      enum pure_const_state_e edge_state = IPA_CONST;
1640 	      bool edge_looping = false;
1641 
1642 	      if (e->recursive_p ())
1643 		looping = true;
1644 
1645 	      if (dump_file && (dump_flags & TDF_DETAILS))
1646 		{
1647 		  fprintf (dump_file, "    Call to %s",
1648 			   e->callee->dump_name ());
1649 		}
1650 	      if (avail > AVAIL_INTERPOSABLE)
1651 		{
1652 		  funct_state y_l = funct_state_summaries->get_create (y);
1653 
1654 		  if (dump_file && (dump_flags & TDF_DETAILS))
1655 		    {
1656 		      fprintf (dump_file,
1657 			       " state:%s looping:%i\n",
1658 			       pure_const_names[y_l->pure_const_state],
1659 			       y_l->looping);
1660 		    }
1661 		  if (y_l->pure_const_state > IPA_PURE
1662 		      && e->cannot_lead_to_return_p ())
1663 		    {
1664 		      if (dump_file && (dump_flags & TDF_DETAILS))
1665 			fprintf (dump_file,
1666 				 "        Ignoring side effects"
1667 				 " -> pure, looping\n");
1668 		      edge_state = IPA_PURE;
1669 		      edge_looping = true;
1670 		    }
1671 		  else
1672 		    {
1673 		      edge_state = y_l->pure_const_state;
1674 		      edge_looping = y_l->looping;
1675 		    }
1676 		}
1677 	      else if (builtin_safe_for_const_function_p (&edge_looping,
1678 							   y->decl))
1679 		edge_state = IPA_CONST;
1680 	      else
1681 		state_from_flags (&edge_state, &edge_looping,
1682 				  flags_from_decl_or_type (y->decl),
1683 				  e->cannot_lead_to_return_p ());
1684 
1685 	      /* Merge the results with what we already know.  */
1686 	      better_state (&edge_state, &edge_looping,
1687 			    w_l->state_previously_known,
1688 			    w_l->looping_previously_known);
1689 	      worse_state (&pure_const_state, &looping,
1690 			   edge_state, edge_looping, e->caller, e->callee);
1691 	      if (pure_const_state == IPA_NEITHER)
1692 	        break;
1693 	    }
1694 
1695 	  /* Now process the indirect call.  */
1696           for (ie = w->indirect_calls;
1697 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1698 	    {
1699 	      enum pure_const_state_e edge_state = IPA_CONST;
1700 	      bool edge_looping = false;
1701 
1702 	      if (dump_file && (dump_flags & TDF_DETAILS))
1703 		fprintf (dump_file, "    Indirect call");
1704 	      state_from_flags (&edge_state, &edge_looping,
1705 			        ie->indirect_info->ecf_flags,
1706 				ie->cannot_lead_to_return_p ());
1707 	      /* Merge the results with what we already know.  */
1708 	      better_state (&edge_state, &edge_looping,
1709 			    w_l->state_previously_known,
1710 			    w_l->looping_previously_known);
1711 	      worse_state (&pure_const_state, &looping,
1712 			   edge_state, edge_looping, NULL, NULL);
1713 	      if (pure_const_state == IPA_NEITHER)
1714 	        break;
1715 	    }
1716 
1717 	  /* And finally all loads and stores.  */
1718 	  for (i = 0; w->iterate_reference (i, ref)
1719 	       && pure_const_state != IPA_NEITHER; i++)
1720 	    {
1721 	      enum pure_const_state_e ref_state = IPA_CONST;
1722 	      bool ref_looping = false;
1723 	      switch (ref->use)
1724 		{
1725 		case IPA_REF_LOAD:
1726 		  /* readonly reads are safe.  */
1727 		  if (TREE_READONLY (ref->referred->decl))
1728 		    break;
1729 		  if (dump_file && (dump_flags & TDF_DETAILS))
1730 		    fprintf (dump_file, "    nonreadonly global var read\n");
1731 		  ref_state = IPA_PURE;
1732 		  break;
1733 		case IPA_REF_STORE:
1734 		  if (ref->cannot_lead_to_return ())
1735 		    break;
1736 		  ref_state = IPA_NEITHER;
1737 		  if (dump_file && (dump_flags & TDF_DETAILS))
1738 		    fprintf (dump_file, "    global var write\n");
1739 		  break;
1740 		case IPA_REF_ADDR:
1741 		  break;
1742 		default:
1743 		  gcc_unreachable ();
1744 		}
1745 	      better_state (&ref_state, &ref_looping,
1746 			    w_l->state_previously_known,
1747 			    w_l->looping_previously_known);
1748 	      worse_state (&pure_const_state, &looping,
1749 			   ref_state, ref_looping, NULL, NULL);
1750 	      if (pure_const_state == IPA_NEITHER)
1751 		break;
1752 	    }
1753 	  w_info = (struct ipa_dfs_info *) w->aux;
1754 	  w = w_info->next_cycle;
1755 	}
1756       if (dump_file && (dump_flags & TDF_DETAILS))
1757 	fprintf (dump_file, "Result %s looping %i\n",
1758 		 pure_const_names [pure_const_state],
1759 		 looping);
1760 
1761       /* Find the worst state of can_free for any node in the cycle.  */
1762       bool can_free = false;
1763       w = node;
1764       while (w && !can_free)
1765 	{
1766 	  struct cgraph_edge *e;
1767 	  funct_state w_l = funct_state_summaries->get (w);
1768 
1769 	  if (w_l->can_free
1770 	      || w->get_availability () == AVAIL_INTERPOSABLE
1771 	      || w->indirect_calls)
1772 	    can_free = true;
1773 
1774 	  for (e = w->callees; e && !can_free; e = e->next_callee)
1775 	    {
1776 	      enum availability avail;
1777 	      struct cgraph_node *y = e->callee->
1778 				function_or_virtual_thunk_symbol (&avail,
1779 								  e->caller);
1780 
1781 	      if (avail > AVAIL_INTERPOSABLE)
1782 		can_free = funct_state_summaries->get (y)->can_free;
1783 	      else
1784 		can_free = true;
1785 	    }
1786 	  w_info = (struct ipa_dfs_info *) w->aux;
1787 	  w = w_info->next_cycle;
1788 	}
1789 
1790       /* Copy back the region's pure_const_state which is shared by
1791 	 all nodes in the region.  */
1792       w = node;
1793       while (w)
1794 	{
1795 	  funct_state w_l = funct_state_summaries->get (w);
1796 	  enum pure_const_state_e this_state = pure_const_state;
1797 	  bool this_looping = looping;
1798 
1799 	  w_l->can_free = can_free;
1800 	  w->nonfreeing_fn = !can_free;
1801 	  if (!can_free && dump_file)
1802 	    fprintf (dump_file, "Function found not to call free: %s\n",
1803 		     w->dump_name ());
1804 
1805 	  if (w_l->state_previously_known != IPA_NEITHER
1806 	      && this_state > w_l->state_previously_known)
1807 	    {
1808 	      if (this_state == IPA_NEITHER)
1809 		this_looping = w_l->looping_previously_known;
1810 	      this_state = w_l->state_previously_known;
1811 	    }
1812 	  if (!this_looping && self_recursive_p (w))
1813 	    this_looping = true;
1814 	  if (!w_l->looping_previously_known)
1815 	    this_looping = false;
1816 
1817 	  /* All nodes within a cycle share the same info.  */
1818 	  w_l->pure_const_state = this_state;
1819 	  w_l->looping = this_looping;
1820 
1821 	  /* Inline clones share declaration with their offline copies;
1822 	     do not modify their declarations since the offline copy may
1823 	     be different.  */
1824 	  if (!w->inlined_to)
1825 	    switch (this_state)
1826 	      {
1827 	      case IPA_CONST:
1828 		remove_p |= ipa_make_function_const (w, this_looping, false);
1829 		break;
1830 
1831 	      case IPA_PURE:
1832 		remove_p |= ipa_make_function_pure (w, this_looping, false);
1833 		break;
1834 
1835 	      default:
1836 		break;
1837 	      }
1838 	  w_info = (struct ipa_dfs_info *) w->aux;
1839 	  w = w_info->next_cycle;
1840 	}
1841     }
1842 
1843   ipa_free_postorder_info ();
1844   free (order);
1845   return remove_p;
1846 }
1847 
1848 /* Produce transitive closure over the callgraph and compute nothrow
1849    attributes.  */
1850 
1851 static void
propagate_nothrow(void)1852 propagate_nothrow (void)
1853 {
1854   struct cgraph_node *node;
1855   struct cgraph_node *w;
1856   struct cgraph_node **order =
1857     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1858   int order_pos;
1859   int i;
1860   struct ipa_dfs_info * w_info;
1861 
1862   order_pos = ipa_reduced_postorder (order, true,
1863 				     ignore_edge_for_nothrow);
1864   if (dump_file)
1865     {
1866       cgraph_node::dump_cgraph (dump_file);
1867       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1868     }
1869 
1870   /* Propagate the local information through the call graph to produce
1871      the global information.  All the nodes within a cycle will have
1872      the same info so we collapse cycles first.  Then we can do the
1873      propagation in one pass from the leaves to the roots.  */
1874   for (i = 0; i < order_pos; i++ )
1875     {
1876       bool can_throw = false;
1877       node = order[i];
1878 
1879       if (node->alias)
1880 	continue;
1881 
1882       /* Find the worst state for any node in the cycle.  */
1883       w = node;
1884       while (w && !can_throw)
1885 	{
1886 	  struct cgraph_edge *e, *ie;
1887 
1888 	  if (!TREE_NOTHROW (w->decl))
1889 	    {
1890 	      funct_state w_l = funct_state_summaries->get_create (w);
1891 
1892 	      if (w_l->can_throw
1893 		  || w->get_availability () == AVAIL_INTERPOSABLE)
1894 		can_throw = true;
1895 
1896 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
1897 		{
1898 		  enum availability avail;
1899 
1900 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1901 		    continue;
1902 
1903 		  struct cgraph_node *y = e->callee->
1904 				   function_or_virtual_thunk_symbol (&avail,
1905 								     e->caller);
1906 
1907 		  /* We can use info about the callee only if we know it
1908 		     cannot be interposed.
1909 		     When callee is compiled with non-call exceptions we also
1910 		     must check that the declaration is bound to current
1911 		     body as other semantically equivalent body may still
1912 		     throw.  */
1913 		  if (avail <= AVAIL_INTERPOSABLE
1914 		      || (!TREE_NOTHROW (y->decl)
1915 			  && (funct_state_summaries->get_create (y)->can_throw
1916 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
1917 				  && !e->callee->binds_to_current_def_p (w)))))
1918 		    can_throw = true;
1919 		}
1920 	      for (ie = w->indirect_calls; ie && !can_throw;
1921 		   ie = ie->next_callee)
1922 		if (ie->can_throw_external
1923 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1924 		  can_throw = true;
1925 	    }
1926 	  w_info = (struct ipa_dfs_info *) w->aux;
1927 	  w = w_info->next_cycle;
1928 	}
1929 
1930       /* Copy back the region's pure_const_state which is shared by
1931 	 all nodes in the region.  */
1932       w = node;
1933       while (w)
1934 	{
1935 	  funct_state w_l = funct_state_summaries->get_create (w);
1936 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1937 	    {
1938 	      /* Inline clones share declaration with their offline copies;
1939 		 do not modify their declarations since the offline copy may
1940 		 be different.  */
1941 	      if (!w->inlined_to)
1942 		{
1943 		  w->set_nothrow_flag (true);
1944 		  if (dump_file)
1945 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
1946 			     w->dump_name ());
1947 		}
1948 	    }
1949 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1950 	    w_l->can_throw = true;
1951 	  w_info = (struct ipa_dfs_info *) w->aux;
1952 	  w = w_info->next_cycle;
1953 	}
1954     }
1955 
1956   ipa_free_postorder_info ();
1957   free (order);
1958 }
1959 
1960 /* Debugging function to dump state of malloc lattice.  */
1961 
1962 DEBUG_FUNCTION
1963 static void
dump_malloc_lattice(FILE * dump_file,const char * s)1964 dump_malloc_lattice (FILE *dump_file, const char *s)
1965 {
1966   if (!dump_file)
1967     return;
1968 
1969   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1970   cgraph_node *node;
1971   FOR_EACH_FUNCTION (node)
1972     {
1973       funct_state fs = funct_state_summaries->get (node);
1974       if (fs)
1975 	fprintf (dump_file, "%s: %s\n", node->dump_name (),
1976 		 malloc_state_names[fs->malloc_state]);
1977     }
1978 }
1979 
1980 /* Propagate malloc attribute across the callgraph.  */
1981 
1982 static void
propagate_malloc(void)1983 propagate_malloc (void)
1984 {
1985   cgraph_node *node;
1986   FOR_EACH_FUNCTION (node)
1987     {
1988       if (DECL_IS_MALLOC (node->decl))
1989 	if (!funct_state_summaries->exists (node))
1990 	  {
1991 	    funct_state fs = funct_state_summaries->get_create (node);
1992 	    fs->malloc_state = STATE_MALLOC;
1993 	  }
1994     }
1995 
1996   dump_malloc_lattice (dump_file, "Initial");
1997   struct cgraph_node **order
1998     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1999   int order_pos = ipa_reverse_postorder (order);
2000   bool changed = true;
2001 
2002   while (changed)
2003     {
2004       changed = false;
2005       /* Walk in postorder.  */
2006       for (int i = order_pos - 1; i >= 0; --i)
2007 	{
2008 	  cgraph_node *node = order[i];
2009 	  if (node->alias
2010 	      || !node->definition
2011 	      || !funct_state_summaries->exists (node))
2012 	    continue;
2013 
2014 	  funct_state l = funct_state_summaries->get (node);
2015 
2016 	  /* FIXME: add support for indirect-calls.  */
2017 	  if (node->indirect_calls)
2018 	    {
2019 	      l->malloc_state = STATE_MALLOC_BOTTOM;
2020 	      continue;
2021 	    }
2022 
2023 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
2024 	    {
2025 	      l->malloc_state = STATE_MALLOC_BOTTOM;
2026 	      continue;
2027 	    }
2028 
2029 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
2030 	    continue;
2031 
2032 	  auto_vec<cgraph_node *, 16> callees;
2033 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2034 	    {
2035 	      ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2036 	      if (es && es->is_return_callee_uncaptured)
2037 		callees.safe_push (cs->callee);
2038 	    }
2039 
2040 	  malloc_state_e new_state = l->malloc_state;
2041 	  for (unsigned j = 0; j < callees.length (); j++)
2042 	    {
2043 	      cgraph_node *callee = callees[j];
2044 	      if (!funct_state_summaries->exists (node))
2045 		{
2046 		  new_state = STATE_MALLOC_BOTTOM;
2047 		  break;
2048 		}
2049 	      malloc_state_e callee_state
2050 		= funct_state_summaries->get_create (callee)->malloc_state;
2051 	      if (new_state < callee_state)
2052 		new_state = callee_state;
2053 	    }
2054 	  if (new_state != l->malloc_state)
2055 	    {
2056 	      changed = true;
2057 	      l->malloc_state = new_state;
2058 	    }
2059 	}
2060     }
2061 
2062   FOR_EACH_DEFINED_FUNCTION (node)
2063     if (funct_state_summaries->exists (node))
2064       {
2065 	funct_state l = funct_state_summaries->get (node);
2066 	if (!node->alias
2067 	    && l->malloc_state == STATE_MALLOC
2068 	    && !node->inlined_to
2069 	    && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2070 	  {
2071 	    if (dump_file && (dump_flags & TDF_DETAILS))
2072 	      fprintf (dump_file, "Function %s found to be malloc\n",
2073 		       node->dump_name ());
2074 
2075 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2076 	    node->set_malloc_flag (true);
2077 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
2078 		warn_function_malloc (node->decl);
2079 	  }
2080       }
2081 
2082   dump_malloc_lattice (dump_file, "after propagation");
2083   ipa_free_postorder_info ();
2084   free (order);
2085 }
2086 
2087 /* Produce the global information by preforming a transitive closure
2088    on the local information that was produced by generate_summary.  */
2089 
2090 unsigned int
2091 pass_ipa_pure_const::
execute(function *)2092 execute (function *)
2093 {
2094   bool remove_p;
2095 
2096   /* Nothrow makes more function to not lead to return and improve
2097      later analysis.  */
2098   propagate_nothrow ();
2099   propagate_malloc ();
2100   remove_p = propagate_pure_const ();
2101 
2102   delete funct_state_summaries;
2103   return remove_p ? TODO_remove_functions : 0;
2104 }
2105 
2106 static bool
gate_pure_const(void)2107 gate_pure_const (void)
2108 {
2109   return flag_ipa_pure_const || in_lto_p;
2110 }
2111 
pass_ipa_pure_const(gcc::context * ctxt)2112 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2113     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2114 		     pure_const_generate_summary, /* generate_summary */
2115 		     pure_const_write_summary, /* write_summary */
2116 		     pure_const_read_summary, /* read_summary */
2117 		     NULL, /* write_optimization_summary */
2118 		     NULL, /* read_optimization_summary */
2119 		     NULL, /* stmt_fixup */
2120 		     0, /* function_transform_todo_flags_start */
2121 		     NULL, /* function_transform */
2122 		     NULL), /* variable_transform */
2123   init_p (false) {}
2124 
2125 ipa_opt_pass_d *
make_pass_ipa_pure_const(gcc::context * ctxt)2126 make_pass_ipa_pure_const (gcc::context *ctxt)
2127 {
2128   return new pass_ipa_pure_const (ctxt);
2129 }
2130 
2131 /* Simple local pass for pure const discovery reusing the analysis from
2132    ipa_pure_const.   This pass is effective when executed together with
2133    other optimization passes in early optimization pass queue.  */
2134 
2135 namespace {
2136 
2137 const pass_data pass_data_local_pure_const =
2138 {
2139   GIMPLE_PASS, /* type */
2140   "local-pure-const", /* name */
2141   OPTGROUP_NONE, /* optinfo_flags */
2142   TV_IPA_PURE_CONST, /* tv_id */
2143   0, /* properties_required */
2144   0, /* properties_provided */
2145   0, /* properties_destroyed */
2146   0, /* todo_flags_start */
2147   0, /* todo_flags_finish */
2148 };
2149 
2150 class pass_local_pure_const : public gimple_opt_pass
2151 {
2152 public:
pass_local_pure_const(gcc::context * ctxt)2153   pass_local_pure_const (gcc::context *ctxt)
2154     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2155   {}
2156 
2157   /* opt_pass methods: */
clone()2158   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
gate(function *)2159   virtual bool gate (function *) { return gate_pure_const (); }
2160   virtual unsigned int execute (function *);
2161 
2162 }; // class pass_local_pure_const
2163 
2164 unsigned int
execute(function * fun)2165 pass_local_pure_const::execute (function *fun)
2166 {
2167   bool changed = false;
2168   funct_state l;
2169   bool skip;
2170   struct cgraph_node *node;
2171 
2172   node = cgraph_node::get (current_function_decl);
2173   skip = skip_function_for_local_pure_const (node);
2174 
2175   if (!warn_suggest_attribute_const
2176       && !warn_suggest_attribute_pure
2177       && skip)
2178     return 0;
2179 
2180   l = analyze_function (node, false);
2181 
2182   /* Do NORETURN discovery.  */
2183   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2184       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2185     {
2186       warn_function_noreturn (fun->decl);
2187       if (dump_file)
2188 	fprintf (dump_file, "Function found to be noreturn: %s\n",
2189 		 current_function_name ());
2190 
2191       /* Update declaration and reduce profile to executed once.  */
2192       if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2193 	changed = true;
2194       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2195 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2196     }
2197 
2198   switch (l->pure_const_state)
2199     {
2200     case IPA_CONST:
2201       changed |= ipa_make_function_const
2202 		   (cgraph_node::get (current_function_decl), l->looping, true);
2203       break;
2204 
2205     case IPA_PURE:
2206       changed |= ipa_make_function_pure
2207 		   (cgraph_node::get (current_function_decl), l->looping, true);
2208       break;
2209 
2210     default:
2211       break;
2212     }
2213   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2214     {
2215       node->set_nothrow_flag (true);
2216       changed = true;
2217       if (dump_file)
2218 	fprintf (dump_file, "Function found to be nothrow: %s\n",
2219 		 current_function_name ());
2220     }
2221 
2222   if (l->malloc_state == STATE_MALLOC
2223       && !DECL_IS_MALLOC (current_function_decl))
2224     {
2225       node->set_malloc_flag (true);
2226       if (warn_suggest_attribute_malloc)
2227 	warn_function_malloc (node->decl);
2228       changed = true;
2229       if (dump_file)
2230 	fprintf (dump_file, "Function found to be malloc: %s\n",
2231 		 node->dump_name ());
2232     }
2233 
2234   free (l);
2235   if (changed)
2236     return execute_fixup_cfg ();
2237   else
2238     return 0;
2239 }
2240 
2241 } // anon namespace
2242 
2243 gimple_opt_pass *
make_pass_local_pure_const(gcc::context * ctxt)2244 make_pass_local_pure_const (gcc::context *ctxt)
2245 {
2246   return new pass_local_pure_const (ctxt);
2247 }
2248 
2249 /* Emit noreturn warnings.  */
2250 
2251 namespace {
2252 
2253 const pass_data pass_data_warn_function_noreturn =
2254 {
2255   GIMPLE_PASS, /* type */
2256   "*warn_function_noreturn", /* name */
2257   OPTGROUP_NONE, /* optinfo_flags */
2258   TV_NONE, /* tv_id */
2259   PROP_cfg, /* properties_required */
2260   0, /* properties_provided */
2261   0, /* properties_destroyed */
2262   0, /* todo_flags_start */
2263   0, /* todo_flags_finish */
2264 };
2265 
2266 class pass_warn_function_noreturn : public gimple_opt_pass
2267 {
2268 public:
pass_warn_function_noreturn(gcc::context * ctxt)2269   pass_warn_function_noreturn (gcc::context *ctxt)
2270     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2271   {}
2272 
2273   /* opt_pass methods: */
gate(function *)2274   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
execute(function * fun)2275   virtual unsigned int execute (function *fun)
2276     {
2277       if (!TREE_THIS_VOLATILE (current_function_decl)
2278 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2279 	warn_function_noreturn (current_function_decl);
2280       return 0;
2281     }
2282 
2283 }; // class pass_warn_function_noreturn
2284 
2285 } // anon namespace
2286 
2287 gimple_opt_pass *
make_pass_warn_function_noreturn(gcc::context * ctxt)2288 make_pass_warn_function_noreturn (gcc::context *ctxt)
2289 {
2290   return new pass_warn_function_noreturn (ctxt);
2291 }
2292 
2293 /* Simple local pass for pure const discovery reusing the analysis from
2294    ipa_pure_const.   This pass is effective when executed together with
2295    other optimization passes in early optimization pass queue.  */
2296 
2297 namespace {
2298 
2299 const pass_data pass_data_nothrow =
2300 {
2301   GIMPLE_PASS, /* type */
2302   "nothrow", /* name */
2303   OPTGROUP_NONE, /* optinfo_flags */
2304   TV_IPA_PURE_CONST, /* tv_id */
2305   0, /* properties_required */
2306   0, /* properties_provided */
2307   0, /* properties_destroyed */
2308   0, /* todo_flags_start */
2309   0, /* todo_flags_finish */
2310 };
2311 
2312 class pass_nothrow : public gimple_opt_pass
2313 {
2314 public:
pass_nothrow(gcc::context * ctxt)2315   pass_nothrow (gcc::context *ctxt)
2316     : gimple_opt_pass (pass_data_nothrow, ctxt)
2317   {}
2318 
2319   /* opt_pass methods: */
clone()2320   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
gate(function *)2321   virtual bool gate (function *) { return optimize; }
2322   virtual unsigned int execute (function *);
2323 
2324 }; // class pass_nothrow
2325 
2326 unsigned int
execute(function *)2327 pass_nothrow::execute (function *)
2328 {
2329   struct cgraph_node *node;
2330   basic_block this_block;
2331 
2332   if (TREE_NOTHROW (current_function_decl))
2333     return 0;
2334 
2335   node = cgraph_node::get (current_function_decl);
2336 
2337   /* We run during lowering, we cannot really use availability yet.  */
2338   if (cgraph_node::get (current_function_decl)->get_availability ()
2339       <= AVAIL_INTERPOSABLE)
2340     {
2341       if (dump_file)
2342         fprintf (dump_file, "Function is interposable;"
2343 	         " not analyzing.\n");
2344       return true;
2345     }
2346 
2347   FOR_EACH_BB_FN (this_block, cfun)
2348     {
2349       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2350 	   !gsi_end_p (gsi);
2351 	   gsi_next (&gsi))
2352         if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2353 	  {
2354 	    if (is_gimple_call (gsi_stmt (gsi)))
2355 	      {
2356 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2357 		if (callee_t && recursive_call_p (current_function_decl,
2358 						  callee_t))
2359 		  continue;
2360 	      }
2361 
2362 	    if (dump_file)
2363 	      {
2364 		fprintf (dump_file, "Statement can throw: ");
2365 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2366 	      }
2367 	    return 0;
2368 	  }
2369     }
2370 
2371   node->set_nothrow_flag (true);
2372 
2373   bool cfg_changed = false;
2374   if (self_recursive_p (node))
2375     FOR_EACH_BB_FN (this_block, cfun)
2376       if (gimple *g = last_stmt (this_block))
2377 	if (is_gimple_call (g))
2378 	  {
2379 	    tree callee_t = gimple_call_fndecl (g);
2380 	    if (callee_t
2381 		&& recursive_call_p (current_function_decl, callee_t)
2382 		&& maybe_clean_eh_stmt (g)
2383 		&& gimple_purge_dead_eh_edges (this_block))
2384 	      cfg_changed = true;
2385 	  }
2386 
2387   if (dump_file)
2388     fprintf (dump_file, "Function found to be nothrow: %s\n",
2389 	     current_function_name ());
2390   return cfg_changed ? TODO_cleanup_cfg : 0;
2391 }
2392 
2393 } // anon namespace
2394 
2395 gimple_opt_pass *
make_pass_nothrow(gcc::context * ctxt)2396 make_pass_nothrow (gcc::context *ctxt)
2397 {
2398   return new pass_nothrow (ctxt);
2399 }
2400