xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-pure-const.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This file marks functions as being either const (TREE_READONLY) or
22    pure (DECL_PURE_P).  It can also set a variant of these that
23    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 
25    This must be run after inlining decisions have been made since
26    otherwise, the local sets will not contain information that is
27    consistent with post inlined state.  The global sets are not prone
28    to this problem since they are by definition transitive.  */
29 
30 /* The code in this module is called by the ipa pass manager. It
31    should be one of the later passes since it's information is used by
32    the rest of the compilation. */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 
65 /* Lattice values for const and pure functions.  Everything starts out
66    being const, then may drop to pure and then neither depending on
67    what is found.  */
68 enum pure_const_state_e
69 {
70   IPA_CONST,
71   IPA_PURE,
72   IPA_NEITHER
73 };
74 
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
76 
77 enum malloc_state_e
78 {
79   STATE_MALLOC_TOP,
80   STATE_MALLOC,
81   STATE_MALLOC_BOTTOM
82 };
83 
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
85 
86 /* Holder for the const_state.  There is one of these per function
87    decl.  */
88 class funct_state_d
89 {
90 public:
funct_state_d()91   funct_state_d (): pure_const_state (IPA_NEITHER),
92     state_previously_known (IPA_NEITHER), looping_previously_known (true),
93     looping (true), can_throw (true), can_free (true),
94     malloc_state (STATE_MALLOC_BOTTOM) {}
95 
funct_state_d(const funct_state_d & s)96   funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97     state_previously_known (s.state_previously_known),
98     looping_previously_known (s.looping_previously_known),
99     looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100     malloc_state (s.malloc_state) {}
101 
102   /* See above.  */
103   enum pure_const_state_e pure_const_state;
104   /* What user set here; we can be always sure about this.  */
105   enum pure_const_state_e state_previously_known;
106   bool looping_previously_known;
107 
108   /* True if the function could possibly infinite loop.  There are a
109      lot of ways that this could be determined.  We are pretty
110      conservative here.  While it is possible to cse pure and const
111      calls, it is not legal to have dce get rid of the call if there
112      is a possibility that the call could infinite loop since this is
113      a behavioral change.  */
114   bool looping;
115 
116   bool can_throw;
117 
118   /* If function can call free, munmap or otherwise make previously
119      non-trapping memory accesses trapping.  */
120   bool can_free;
121 
122   enum malloc_state_e malloc_state;
123 };
124 
125 typedef class funct_state_d * funct_state;
126 
127 /* The storage of the funct_state is abstracted because there is the
128    possibility that it may be desirable to move this to the cgraph
129    local info.  */
130 
131 class funct_state_summary_t:
132   public fast_function_summary <funct_state_d *, va_heap>
133 {
134 public:
funct_state_summary_t(symbol_table * symtab)135   funct_state_summary_t (symbol_table *symtab):
136     fast_function_summary <funct_state_d *, va_heap> (symtab) {}
137 
138   virtual void insert (cgraph_node *, funct_state_d *state);
139   virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
140 			  funct_state_d *src_data,
141 			  funct_state_d *dst_data);
142 };
143 
144 static funct_state_summary_t *funct_state_summaries = NULL;
145 
146 static bool gate_pure_const (void);
147 
148 namespace {
149 
150 const pass_data pass_data_ipa_pure_const =
151 {
152   IPA_PASS, /* type */
153   "pure-const", /* name */
154   OPTGROUP_NONE, /* optinfo_flags */
155   TV_IPA_PURE_CONST, /* tv_id */
156   0, /* properties_required */
157   0, /* properties_provided */
158   0, /* properties_destroyed */
159   0, /* todo_flags_start */
160   0, /* todo_flags_finish */
161 };
162 
163 class pass_ipa_pure_const : public ipa_opt_pass_d
164 {
165 public:
166   pass_ipa_pure_const(gcc::context *ctxt);
167 
168   /* opt_pass methods: */
gate(function *)169   bool gate (function *) { return gate_pure_const (); }
170   unsigned int execute (function *fun);
171 
172   void register_hooks (void);
173 
174 private:
175   bool init_p;
176 }; // class pass_ipa_pure_const
177 
178 } // anon namespace
179 
180 /* Try to guess if function body will always be visible to compiler
181    when compiling the call and whether compiler will be able
182    to propagate the information by itself.  */
183 
184 static bool
function_always_visible_to_compiler_p(tree decl)185 function_always_visible_to_compiler_p (tree decl)
186 {
187   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
188 	  || DECL_COMDAT (decl));
189 }
190 
191 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
192    is true if the function is known to be finite.  The diagnostic is
193    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
194    OPTION, this function may initialize it and it is always returned
195    by the function.  */
196 
197 static hash_set<tree> *
suggest_attribute(int option,tree decl,bool known_finite,hash_set<tree> * warned_about,const char * attrib_name)198 suggest_attribute (int option, tree decl, bool known_finite,
199 		   hash_set<tree> *warned_about,
200 		   const char * attrib_name)
201 {
202   if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
203     return warned_about;
204   if (TREE_THIS_VOLATILE (decl)
205       || (known_finite && function_always_visible_to_compiler_p (decl)))
206     return warned_about;
207 
208   if (!warned_about)
209     warned_about = new hash_set<tree>;
210   if (warned_about->contains (decl))
211     return warned_about;
212   warned_about->add (decl);
213   warning_at (DECL_SOURCE_LOCATION (decl),
214 	      option,
215 	      known_finite
216 	      ? G_("function might be candidate for attribute %qs")
217 	      : G_("function might be candidate for attribute %qs"
218 		   " if it is known to return normally"), attrib_name);
219   return warned_about;
220 }
221 
222 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
223    is true if the function is known to be finite.  */
224 
225 static void
warn_function_pure(tree decl,bool known_finite)226 warn_function_pure (tree decl, bool known_finite)
227 {
228   /* Declaring a void function pure makes no sense and is diagnosed
229      by -Wattributes because calling it would have no effect.  */
230   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
231     return;
232 
233   static hash_set<tree> *warned_about;
234   warned_about
235     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
236 			 known_finite, warned_about, "pure");
237 }
238 
239 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
240    is true if the function is known to be finite.  */
241 
242 static void
warn_function_const(tree decl,bool known_finite)243 warn_function_const (tree decl, bool known_finite)
244 {
245   /* Declaring a void function const makes no sense is diagnosed
246      by -Wattributes because calling it would have no effect.  */
247   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
248     return;
249 
250   static hash_set<tree> *warned_about;
251   warned_about
252     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
253 			 known_finite, warned_about, "const");
254 }
255 
256 /* Emit suggestion about __attribute__((malloc)) for DECL.  */
257 
258 static void
warn_function_malloc(tree decl)259 warn_function_malloc (tree decl)
260 {
261   static hash_set<tree> *warned_about;
262   warned_about
263     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
264 			 true, warned_about, "malloc");
265 }
266 
267 /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
268 
269 static void
warn_function_noreturn(tree decl)270 warn_function_noreturn (tree decl)
271 {
272   tree original_decl = decl;
273 
274   static hash_set<tree> *warned_about;
275   if (!lang_hooks.missing_noreturn_ok_p (decl)
276       && targetm.warn_func_return (decl))
277     warned_about
278       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
279 			   true, warned_about, "noreturn");
280 }
281 
282 void
warn_function_cold(tree decl)283 warn_function_cold (tree decl)
284 {
285   tree original_decl = decl;
286 
287   static hash_set<tree> *warned_about;
288   warned_about
289     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
290 			 true, warned_about, "cold");
291 }
292 
293 /* Check to see if the use (or definition when CHECKING_WRITE is true)
294    variable T is legal in a function that is either pure or const.  */
295 
296 static inline void
check_decl(funct_state local,tree t,bool checking_write,bool ipa)297 check_decl (funct_state local,
298 	    tree t, bool checking_write, bool ipa)
299 {
300   /* Do not want to do anything with volatile except mark any
301      function that uses one to be not const or pure.  */
302   if (TREE_THIS_VOLATILE (t))
303     {
304       local->pure_const_state = IPA_NEITHER;
305       if (dump_file)
306         fprintf (dump_file, "    Volatile operand is not const/pure\n");
307       return;
308     }
309 
310   /* Do not care about a local automatic that is not static.  */
311   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
312     return;
313 
314   /* If the variable has the "used" attribute, treat it as if it had a
315      been touched by the devil.  */
316   if (DECL_PRESERVE_P (t))
317     {
318       local->pure_const_state = IPA_NEITHER;
319       if (dump_file)
320         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
321       return;
322     }
323 
324   /* In IPA mode we are not interested in checking actual loads and stores;
325      they will be processed at propagation time using ipa_ref.  */
326   if (ipa)
327     return;
328 
329   /* Since we have dealt with the locals and params cases above, if we
330      are CHECKING_WRITE, this cannot be a pure or constant
331      function.  */
332   if (checking_write)
333     {
334       local->pure_const_state = IPA_NEITHER;
335       if (dump_file)
336         fprintf (dump_file, "    static/global memory write is not const/pure\n");
337       return;
338     }
339 
340   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
341     {
342       /* Readonly reads are safe.  */
343       if (TREE_READONLY (t))
344 	return; /* Read of a constant, do not change the function state.  */
345       else
346 	{
347           if (dump_file)
348             fprintf (dump_file, "    global memory read is not const\n");
349 	  /* Just a regular read.  */
350 	  if (local->pure_const_state == IPA_CONST)
351 	    local->pure_const_state = IPA_PURE;
352 	}
353     }
354   else
355     {
356       /* Compilation level statics can be read if they are readonly
357 	 variables.  */
358       if (TREE_READONLY (t))
359 	return;
360 
361       if (dump_file)
362 	fprintf (dump_file, "    static memory read is not const\n");
363       /* Just a regular read.  */
364       if (local->pure_const_state == IPA_CONST)
365 	local->pure_const_state = IPA_PURE;
366     }
367 }
368 
369 
370 /* Check to see if the use (or definition when CHECKING_WRITE is true)
371    variable T is legal in a function that is either pure or const.  */
372 
373 static inline void
check_op(funct_state local,tree t,bool checking_write)374 check_op (funct_state local, tree t, bool checking_write)
375 {
376   t = get_base_address (t);
377   if (t && TREE_THIS_VOLATILE (t))
378     {
379       local->pure_const_state = IPA_NEITHER;
380       if (dump_file)
381 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
382       return;
383     }
384   else if (t
385   	   && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
386 	   && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
387 	   && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
388     {
389       if (dump_file)
390 	fprintf (dump_file, "    Indirect ref to local memory is OK\n");
391       return;
392     }
393   else if (checking_write)
394     {
395       local->pure_const_state = IPA_NEITHER;
396       if (dump_file)
397 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
398       return;
399     }
400   else
401     {
402       if (dump_file)
403 	fprintf (dump_file, "    Indirect ref read is not const\n");
404       if (local->pure_const_state == IPA_CONST)
405 	local->pure_const_state = IPA_PURE;
406     }
407 }
408 
409 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
410 
411 static void
state_from_flags(enum pure_const_state_e * state,bool * looping,int flags,bool cannot_lead_to_return)412 state_from_flags (enum pure_const_state_e *state, bool *looping,
413 	          int flags, bool cannot_lead_to_return)
414 {
415   *looping = false;
416   if (flags & ECF_LOOPING_CONST_OR_PURE)
417     {
418       *looping = true;
419       if (dump_file && (dump_flags & TDF_DETAILS))
420 	fprintf (dump_file, " looping\n");
421     }
422   if (flags & ECF_CONST)
423     {
424       *state = IPA_CONST;
425       if (dump_file && (dump_flags & TDF_DETAILS))
426 	fprintf (dump_file, " const\n");
427     }
428   else if (flags & ECF_PURE)
429     {
430       *state = IPA_PURE;
431       if (dump_file && (dump_flags & TDF_DETAILS))
432 	fprintf (dump_file, " pure\n");
433     }
434   else if (cannot_lead_to_return)
435     {
436       *state = IPA_PURE;
437       *looping = true;
438       if (dump_file && (dump_flags & TDF_DETAILS))
439 	fprintf (dump_file, " ignoring side effects->pure looping\n");
440     }
441   else
442     {
443       if (dump_file && (dump_flags & TDF_DETAILS))
444 	fprintf (dump_file, " neither\n");
445       *state = IPA_NEITHER;
446       *looping = true;
447     }
448 }
449 
450 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
451    into STATE and LOOPING better of the two variants.
452    Be sure to merge looping correctly.  IPA_NEITHER functions
453    have looping 0 even if they don't have to return.  */
454 
455 static inline void
better_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2)456 better_state (enum pure_const_state_e *state, bool *looping,
457 	      enum pure_const_state_e state2, bool looping2)
458 {
459   if (state2 < *state)
460     {
461       if (*state == IPA_NEITHER)
462 	*looping = looping2;
463       else
464 	*looping = MIN (*looping, looping2);
465       *state = state2;
466     }
467   else if (state2 != IPA_NEITHER)
468     *looping = MIN (*looping, looping2);
469 }
470 
471 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
472    into STATE and LOOPING worse of the two variants.
473    N is the actual node called.  */
474 
475 static inline void
worse_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2,struct symtab_node * from,struct symtab_node * to)476 worse_state (enum pure_const_state_e *state, bool *looping,
477 	     enum pure_const_state_e state2, bool looping2,
478 	     struct symtab_node *from,
479 	     struct symtab_node *to)
480 {
481   /* Consider function:
482 
483      bool a(int *p)
484      {
485        return *p==*p;
486      }
487 
488      During early optimization we will turn this into:
489 
490      bool a(int *p)
491      {
492        return true;
493      }
494 
495      Now if this function will be detected as CONST however when interposed it
496      may end up being just pure.  We always must assume the worst scenario here.
497    */
498   if (*state == IPA_CONST && state2 == IPA_CONST
499       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
500     {
501       if (dump_file && (dump_flags & TDF_DETAILS))
502 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
503 		 "bind to current def.\n", to->dump_name ());
504       state2 = IPA_PURE;
505     }
506   *state = MAX (*state, state2);
507   *looping = MAX (*looping, looping2);
508 }
509 
510 /* Recognize special cases of builtins that are by themselves not pure or const
511    but function using them is.  */
512 static bool
special_builtin_state(enum pure_const_state_e * state,bool * looping,tree callee)513 special_builtin_state (enum pure_const_state_e *state, bool *looping,
514 		       tree callee)
515 {
516   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
517     switch (DECL_FUNCTION_CODE (callee))
518       {
519       case BUILT_IN_RETURN:
520       case BUILT_IN_UNREACHABLE:
521       CASE_BUILT_IN_ALLOCA:
522       case BUILT_IN_STACK_SAVE:
523       case BUILT_IN_STACK_RESTORE:
524       case BUILT_IN_EH_POINTER:
525       case BUILT_IN_EH_FILTER:
526       case BUILT_IN_UNWIND_RESUME:
527       case BUILT_IN_CXA_END_CLEANUP:
528       case BUILT_IN_EH_COPY_VALUES:
529       case BUILT_IN_FRAME_ADDRESS:
530       case BUILT_IN_APPLY_ARGS:
531       case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
532       case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
533 	*looping = false;
534 	*state = IPA_CONST;
535 	return true;
536       case BUILT_IN_PREFETCH:
537 	*looping = true;
538 	*state = IPA_CONST;
539 	return true;
540       default:
541 	break;
542       }
543   return false;
544 }
545 
546 /* Check the parameters of a function call to CALL_EXPR to see if
547    there are any references in the parameters that are not allowed for
548    pure or const functions.  Also check to see if this is either an
549    indirect call, a call outside the compilation unit, or has special
550    attributes that may also effect the purity.  The CALL_EXPR node for
551    the entire call expression.  */
552 
553 static void
check_call(funct_state local,gcall * call,bool ipa)554 check_call (funct_state local, gcall *call, bool ipa)
555 {
556   int flags = gimple_call_flags (call);
557   tree callee_t = gimple_call_fndecl (call);
558   bool possibly_throws = stmt_could_throw_p (cfun, call);
559   bool possibly_throws_externally = (possibly_throws
560   				     && stmt_can_throw_external (cfun, call));
561 
562   if (possibly_throws)
563     {
564       unsigned int i;
565       for (i = 0; i < gimple_num_ops (call); i++)
566         if (gimple_op (call, i)
567 	    && tree_could_throw_p (gimple_op (call, i)))
568 	  {
569 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
570 	      {
571 		if (dump_file)
572 		  fprintf (dump_file, "    operand can throw; looping\n");
573 		local->looping = true;
574 	      }
575 	    if (possibly_throws_externally)
576 	      {
577 		if (dump_file)
578 		  fprintf (dump_file, "    operand can throw externally\n");
579 		local->can_throw = true;
580 	      }
581 	  }
582     }
583 
584   /* The const and pure flags are set by a variety of places in the
585      compiler (including here).  If someone has already set the flags
586      for the callee, (such as for some of the builtins) we will use
587      them, otherwise we will compute our own information.
588 
589      Const and pure functions have less clobber effects than other
590      functions so we process these first.  Otherwise if it is a call
591      outside the compilation unit or an indirect call we punt.  This
592      leaves local calls which will be processed by following the call
593      graph.  */
594   if (callee_t)
595     {
596       enum pure_const_state_e call_state;
597       bool call_looping;
598 
599       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
600 	  && !nonfreeing_call_p (call))
601 	local->can_free = true;
602 
603       if (special_builtin_state (&call_state, &call_looping, callee_t))
604 	{
605 	  worse_state (&local->pure_const_state, &local->looping,
606 		       call_state, call_looping,
607 		       NULL, NULL);
608 	  return;
609 	}
610       /* When bad things happen to bad functions, they cannot be const
611 	 or pure.  */
612       if (setjmp_call_p (callee_t))
613 	{
614 	  if (dump_file)
615 	    fprintf (dump_file, "    setjmp is not const/pure\n");
616           local->looping = true;
617 	  local->pure_const_state = IPA_NEITHER;
618 	}
619 
620       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
621 	switch (DECL_FUNCTION_CODE (callee_t))
622 	  {
623 	  case BUILT_IN_LONGJMP:
624 	  case BUILT_IN_NONLOCAL_GOTO:
625 	    if (dump_file)
626 	      fprintf (dump_file,
627 		       "    longjmp and nonlocal goto is not const/pure\n");
628 	    local->pure_const_state = IPA_NEITHER;
629 	    local->looping = true;
630 	    break;
631 	  default:
632 	    break;
633 	  }
634     }
635   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
636     local->can_free = true;
637 
638   /* When not in IPA mode, we can still handle self recursion.  */
639   if (!ipa && callee_t
640       && recursive_call_p (current_function_decl, callee_t))
641     {
642       if (dump_file)
643         fprintf (dump_file, "    Recursive call can loop.\n");
644       local->looping = true;
645     }
646   /* Either callee is unknown or we are doing local analysis.
647      Look to see if there are any bits available for the callee (such as by
648      declaration or because it is builtin) and process solely on the basis of
649      those bits.  Handle internal calls always, those calls don't have
650      corresponding cgraph edges and thus aren't processed during
651      the propagation.  */
652   else if (!ipa || gimple_call_internal_p (call))
653     {
654       enum pure_const_state_e call_state;
655       bool call_looping;
656       if (possibly_throws && cfun->can_throw_non_call_exceptions)
657         {
658 	  if (dump_file)
659 	    fprintf (dump_file, "    can throw; looping\n");
660           local->looping = true;
661 	}
662       if (possibly_throws_externally)
663         {
664 	  if (dump_file)
665 	    {
666 	      fprintf (dump_file, "    can throw externally to lp %i\n",
667 	      	       lookup_stmt_eh_lp (call));
668 	      if (callee_t)
669 		fprintf (dump_file, "     callee:%s\n",
670 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
671 	    }
672           local->can_throw = true;
673 	}
674       if (dump_file && (dump_flags & TDF_DETAILS))
675 	fprintf (dump_file, "    checking flags for call:");
676       state_from_flags (&call_state, &call_looping, flags,
677 			((flags & (ECF_NORETURN | ECF_NOTHROW))
678 			 == (ECF_NORETURN | ECF_NOTHROW))
679 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
680       worse_state (&local->pure_const_state, &local->looping,
681 		   call_state, call_looping, NULL, NULL);
682     }
683   /* Direct functions calls are handled by IPA propagation.  */
684 }
685 
686 /* Wrapper around check_decl for loads in local more.  */
687 
688 static bool
check_load(gimple *,tree op,tree,void * data)689 check_load (gimple *, tree op, tree, void *data)
690 {
691   if (DECL_P (op))
692     check_decl ((funct_state)data, op, false, false);
693   else
694     check_op ((funct_state)data, op, false);
695   return false;
696 }
697 
698 /* Wrapper around check_decl for stores in local more.  */
699 
700 static bool
check_store(gimple *,tree op,tree,void * data)701 check_store (gimple *, tree op, tree, void *data)
702 {
703   if (DECL_P (op))
704     check_decl ((funct_state)data, op, true, false);
705   else
706     check_op ((funct_state)data, op, true);
707   return false;
708 }
709 
710 /* Wrapper around check_decl for loads in ipa mode.  */
711 
712 static bool
check_ipa_load(gimple *,tree op,tree,void * data)713 check_ipa_load (gimple *, tree op, tree, void *data)
714 {
715   if (DECL_P (op))
716     check_decl ((funct_state)data, op, false, true);
717   else
718     check_op ((funct_state)data, op, false);
719   return false;
720 }
721 
722 /* Wrapper around check_decl for stores in ipa mode.  */
723 
724 static bool
check_ipa_store(gimple *,tree op,tree,void * data)725 check_ipa_store (gimple *, tree op, tree, void *data)
726 {
727   if (DECL_P (op))
728     check_decl ((funct_state)data, op, true, true);
729   else
730     check_op ((funct_state)data, op, true);
731   return false;
732 }
733 
734 /* Look into pointer pointed to by GSIP and figure out what interesting side
735    effects it has.  */
736 static void
check_stmt(gimple_stmt_iterator * gsip,funct_state local,bool ipa)737 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
738 {
739   gimple *stmt = gsi_stmt (*gsip);
740 
741   if (is_gimple_debug (stmt))
742     return;
743 
744   /* Do consider clobber as side effects before IPA, so we rather inline
745      C++ destructors and keep clobber semantics than eliminate them.
746 
747      TODO: We may get smarter during early optimizations on these and let
748      functions containing only clobbers to be optimized more.  This is a common
749      case of C++ destructors.  */
750 
751   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
752     return;
753 
754   if (dump_file)
755     {
756       fprintf (dump_file, "  scanning: ");
757       print_gimple_stmt (dump_file, stmt, 0);
758     }
759 
760   if (gimple_has_volatile_ops (stmt)
761       && !gimple_clobber_p (stmt))
762     {
763       local->pure_const_state = IPA_NEITHER;
764       if (dump_file)
765 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
766     }
767 
768   /* Look for loads and stores.  */
769   walk_stmt_load_store_ops (stmt, local,
770 			    ipa ? check_ipa_load : check_load,
771 			    ipa ? check_ipa_store :  check_store);
772 
773   if (gimple_code (stmt) != GIMPLE_CALL
774       && stmt_could_throw_p (cfun, stmt))
775     {
776       if (cfun->can_throw_non_call_exceptions)
777 	{
778 	  if (dump_file)
779 	    fprintf (dump_file, "    can throw; looping\n");
780 	  local->looping = true;
781 	}
782       if (stmt_can_throw_external (cfun, stmt))
783 	{
784 	  if (dump_file)
785 	    fprintf (dump_file, "    can throw externally\n");
786 	  local->can_throw = true;
787 	}
788       else
789 	if (dump_file)
790 	  fprintf (dump_file, "    can throw\n");
791     }
792   switch (gimple_code (stmt))
793     {
794     case GIMPLE_CALL:
795       check_call (local, as_a <gcall *> (stmt), ipa);
796       break;
797     case GIMPLE_LABEL:
798       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
799 	/* Target of long jump. */
800 	{
801           if (dump_file)
802             fprintf (dump_file, "    nonlocal label is not const/pure\n");
803 	  local->pure_const_state = IPA_NEITHER;
804 	}
805       break;
806     case GIMPLE_ASM:
807       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
808 	{
809 	  if (dump_file)
810 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
811 	  /* Abandon all hope, ye who enter here. */
812 	  local->pure_const_state = IPA_NEITHER;
813 	  local->can_free = true;
814 	}
815       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
816 	{
817 	  if (dump_file)
818 	    fprintf (dump_file, "    volatile is not const/pure\n");
819 	  /* Abandon all hope, ye who enter here. */
820 	  local->pure_const_state = IPA_NEITHER;
821 	  local->looping = true;
822 	  local->can_free = true;
823 	}
824       return;
825     default:
826       break;
827     }
828 }
829 
830 /* Check that RETVAL is used only in STMT and in comparisons against 0.
831    RETVAL is return value of the function and STMT is return stmt.  */
832 
833 static bool
check_retval_uses(tree retval,gimple * stmt)834 check_retval_uses (tree retval, gimple *stmt)
835 {
836   imm_use_iterator use_iter;
837   gimple *use_stmt;
838 
839   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
840     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
841       {
842 	tree op2 = gimple_cond_rhs (cond);
843 	if (!integer_zerop (op2))
844 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
845       }
846     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
847       {
848 	enum tree_code code = gimple_assign_rhs_code (ga);
849 	if (TREE_CODE_CLASS (code) != tcc_comparison)
850 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
851 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
852 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
853       }
854     else if (is_gimple_debug (use_stmt))
855       ;
856     else if (use_stmt != stmt)
857       RETURN_FROM_IMM_USE_STMT (use_iter, false);
858 
859   return true;
860 }
861 
862 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
863    attribute. Currently this function does a very conservative analysis.
864    FUN is considered to be a candidate if
865    1) It returns a value of pointer type.
866    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
867       a phi, and element of phi is either NULL or
868       SSA_NAME_DEF_STMT(element) is function call.
869    3) The return-value has immediate uses only within comparisons (gcond or gassign)
870       and return_stmt (and likewise a phi arg has immediate use only within comparison
871       or the phi stmt).  */
872 
873 #define DUMP_AND_RETURN(reason)  \
874 {  \
875   if (dump_file && (dump_flags & TDF_DETAILS))  \
876     fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
877 	     (node->dump_name ()), (reason));  \
878   return false;  \
879 }
880 
881 static bool
malloc_candidate_p_1(function * fun,tree retval,gimple * ret_stmt,bool ipa,bitmap visited)882 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
883 		      bitmap visited)
884 {
885   cgraph_node *node = cgraph_node::get_create (fun->decl);
886   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
887     return true;
888 
889   if (!check_retval_uses (retval, ret_stmt))
890     DUMP_AND_RETURN("Return value has uses outside return stmt"
891 		    " and comparisons against 0.")
892 
893   gimple *def = SSA_NAME_DEF_STMT (retval);
894 
895   if (gcall *call_stmt = dyn_cast<gcall *> (def))
896     {
897       tree callee_decl = gimple_call_fndecl (call_stmt);
898       if (!callee_decl)
899 	return false;
900 
901       if (!ipa && !DECL_IS_MALLOC (callee_decl))
902 	DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
903 			" non-ipa mode.")
904 
905       cgraph_edge *cs = node->get_edge (call_stmt);
906       if (cs)
907 	{
908 	  ipa_call_summary *es = ipa_call_summaries->get_create (cs);
909 	  es->is_return_callee_uncaptured = true;
910 	}
911     }
912 
913     else if (gphi *phi = dyn_cast<gphi *> (def))
914       {
915 	bool all_args_zero = true;
916 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
917 	  {
918 	    tree arg = gimple_phi_arg_def (phi, i);
919 	    if (integer_zerop (arg))
920 	      continue;
921 
922 	    all_args_zero = false;
923 	    if (TREE_CODE (arg) != SSA_NAME)
924 	      DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
925 	    if (!check_retval_uses (arg, phi))
926 	      DUMP_AND_RETURN ("phi arg has uses outside phi"
927 				 " and comparisons against 0.")
928 
929 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
930 	    if (is_a<gphi *> (arg_def))
931 	      {
932 		if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
933 		    DUMP_AND_RETURN ("nested phi fail")
934 		continue;
935 	      }
936 
937 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
938 	    if (!call_stmt)
939 	      DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
940 
941 	    tree callee_decl = gimple_call_fndecl (call_stmt);
942 	    if (!callee_decl)
943 	      return false;
944 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
945 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute"
946 			      " for non-ipa mode.")
947 
948 	    cgraph_edge *cs = node->get_edge (call_stmt);
949 	    if (cs)
950 	      {
951 		ipa_call_summary *es = ipa_call_summaries->get_create (cs);
952 		es->is_return_callee_uncaptured = true;
953 	      }
954 	  }
955 
956 	if (all_args_zero)
957 	  DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
958       }
959 
960     else
961       DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
962 
963   return true;
964 }
965 
966 static bool
malloc_candidate_p(function * fun,bool ipa)967 malloc_candidate_p (function *fun, bool ipa)
968 {
969   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
970   edge e;
971   edge_iterator ei;
972   cgraph_node *node = cgraph_node::get_create (fun->decl);
973 
974   if (EDGE_COUNT (exit_block->preds) == 0
975       || !flag_delete_null_pointer_checks)
976     return false;
977 
978   auto_bitmap visited;
979   FOR_EACH_EDGE (e, ei, exit_block->preds)
980     {
981       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
982       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
983 
984       if (!ret_stmt)
985 	return false;
986 
987       tree retval = gimple_return_retval (ret_stmt);
988       if (!retval)
989 	DUMP_AND_RETURN("No return value.")
990 
991       if (TREE_CODE (retval) != SSA_NAME
992 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
993 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
994 
995       if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
996 	return false;
997     }
998 
999   if (dump_file && (dump_flags & TDF_DETAILS))
1000     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1001 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1002   return true;
1003 }
1004 
1005 #undef DUMP_AND_RETURN
1006 
1007 /* This is the main routine for finding the reference patterns for
1008    global variables within a function FN.  */
1009 
1010 static funct_state
analyze_function(struct cgraph_node * fn,bool ipa)1011 analyze_function (struct cgraph_node *fn, bool ipa)
1012 {
1013   tree decl = fn->decl;
1014   funct_state l;
1015   basic_block this_block;
1016 
1017   l = XCNEW (class funct_state_d);
1018   l->pure_const_state = IPA_CONST;
1019   l->state_previously_known = IPA_NEITHER;
1020   l->looping_previously_known = true;
1021   l->looping = false;
1022   l->can_throw = false;
1023   l->can_free = false;
1024   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1025 		    flags_from_decl_or_type (fn->decl),
1026 		    fn->cannot_return_p ());
1027 
1028   if (fn->thunk.thunk_p || fn->alias)
1029     {
1030       /* Thunk gets propagated through, so nothing interesting happens.  */
1031       gcc_assert (ipa);
1032       if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1033 	l->pure_const_state = IPA_NEITHER;
1034       return l;
1035     }
1036 
1037   if (dump_file)
1038     {
1039       fprintf (dump_file, "\n\n local analysis of %s\n ",
1040 	       fn->dump_name ());
1041     }
1042 
1043   push_cfun (DECL_STRUCT_FUNCTION (decl));
1044 
1045   FOR_EACH_BB_FN (this_block, cfun)
1046     {
1047       gimple_stmt_iterator gsi;
1048       struct walk_stmt_info wi;
1049 
1050       memset (&wi, 0, sizeof (wi));
1051       for (gsi = gsi_start_bb (this_block);
1052 	   !gsi_end_p (gsi);
1053 	   gsi_next (&gsi))
1054 	{
1055 	  check_stmt (&gsi, l, ipa);
1056 	  if (l->pure_const_state == IPA_NEITHER
1057 	      && l->looping
1058 	      && l->can_throw
1059 	      && l->can_free)
1060 	    goto end;
1061 	}
1062     }
1063 
1064 end:
1065   if (l->pure_const_state != IPA_NEITHER)
1066     {
1067       /* Const functions cannot have back edges (an
1068 	 indication of possible infinite loop side
1069 	 effect.  */
1070       if (mark_dfs_back_edges ())
1071         {
1072 	  /* Preheaders are needed for SCEV to work.
1073 	     Simple latches and recorded exits improve chances that loop will
1074 	     proved to be finite in testcases such as in loop-15.c
1075 	     and loop-24.c  */
1076 	  loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1077 			       | LOOPS_HAVE_SIMPLE_LATCHES
1078 			       | LOOPS_HAVE_RECORDED_EXITS);
1079 	  if (dump_file && (dump_flags & TDF_DETAILS))
1080 	    flow_loops_dump (dump_file, NULL, 0);
1081 	  if (mark_irreducible_loops ())
1082 	    {
1083 	      if (dump_file)
1084 	        fprintf (dump_file, "    has irreducible loops\n");
1085 	      l->looping = true;
1086 	    }
1087 	  else
1088 	    {
1089 	      class loop *loop;
1090 	      scev_initialize ();
1091 	      FOR_EACH_LOOP (loop, 0)
1092 		if (!finite_loop_p (loop))
1093 		  {
1094 		    if (dump_file)
1095 		      fprintf (dump_file, "    cannot prove finiteness of "
1096 			       "loop %i\n", loop->num);
1097 		    l->looping =true;
1098 		    break;
1099 		  }
1100 	      scev_finalize ();
1101 	    }
1102           loop_optimizer_finalize ();
1103 	}
1104     }
1105 
1106   if (dump_file && (dump_flags & TDF_DETAILS))
1107     fprintf (dump_file, "    checking previously known:");
1108 
1109   better_state (&l->pure_const_state, &l->looping,
1110 		l->state_previously_known,
1111 		l->looping_previously_known);
1112   if (TREE_NOTHROW (decl))
1113     l->can_throw = false;
1114 
1115   l->malloc_state = STATE_MALLOC_BOTTOM;
1116   if (DECL_IS_MALLOC (decl))
1117     l->malloc_state = STATE_MALLOC;
1118   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1119     l->malloc_state = STATE_MALLOC_TOP;
1120   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1121     l->malloc_state = STATE_MALLOC;
1122 
1123   pop_cfun ();
1124   if (dump_file)
1125     {
1126       if (l->looping)
1127         fprintf (dump_file, "Function is locally looping.\n");
1128       if (l->can_throw)
1129         fprintf (dump_file, "Function is locally throwing.\n");
1130       if (l->pure_const_state == IPA_CONST)
1131         fprintf (dump_file, "Function is locally const.\n");
1132       if (l->pure_const_state == IPA_PURE)
1133         fprintf (dump_file, "Function is locally pure.\n");
1134       if (l->can_free)
1135 	fprintf (dump_file, "Function can locally free.\n");
1136       if (l->malloc_state == STATE_MALLOC)
1137 	fprintf (dump_file, "Function is locally malloc.\n");
1138     }
1139   return l;
1140 }
1141 
1142 void
insert(cgraph_node * node,funct_state_d * state)1143 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1144 {
1145   /* There are some shared nodes, in particular the initializers on
1146      static declarations.  We do not need to scan them more than once
1147      since all we would be interested in are the addressof
1148      operations.  */
1149   if (opt_for_fn (node->decl, flag_ipa_pure_const))
1150     {
1151       funct_state_d *a = analyze_function (node, true);
1152       new (state) funct_state_d (*a);
1153       free (a);
1154     }
1155 }
1156 
1157 /* Called when new clone is inserted to callgraph late.  */
1158 
1159 void
duplicate(cgraph_node *,cgraph_node * dst,funct_state_d * src_data,funct_state_d * dst_data)1160 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1161 				  funct_state_d *src_data,
1162 				  funct_state_d *dst_data)
1163 {
1164   new (dst_data) funct_state_d (*src_data);
1165   if (dst_data->malloc_state == STATE_MALLOC
1166       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1167     dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1168 }
1169 
1170 
1171 void
1172 pass_ipa_pure_const::
register_hooks(void)1173 register_hooks (void)
1174 {
1175   if (init_p)
1176     return;
1177 
1178   init_p = true;
1179 
1180   funct_state_summaries = new funct_state_summary_t (symtab);
1181 }
1182 
1183 
1184 /* Analyze each function in the cgraph to see if it is locally PURE or
1185    CONST.  */
1186 
1187 static void
pure_const_generate_summary(void)1188 pure_const_generate_summary (void)
1189 {
1190   struct cgraph_node *node;
1191 
1192   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1193   pass->register_hooks ();
1194 
1195   /* Process all of the functions.
1196 
1197      We process AVAIL_INTERPOSABLE functions.  We cannot use the results
1198      by default, but the info can be used at LTO with -fwhole-program or
1199      when function got cloned and the clone is AVAILABLE.  */
1200 
1201   FOR_EACH_DEFINED_FUNCTION (node)
1202     if (opt_for_fn (node->decl, flag_ipa_pure_const))
1203       {
1204 	funct_state_d *a = analyze_function (node, true);
1205 	new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1206 	free (a);
1207       }
1208 }
1209 
1210 
1211 /* Serialize the ipa info for lto.  */
1212 
1213 static void
pure_const_write_summary(void)1214 pure_const_write_summary (void)
1215 {
1216   struct cgraph_node *node;
1217   struct lto_simple_output_block *ob
1218     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1219   unsigned int count = 0;
1220   lto_symtab_encoder_iterator lsei;
1221   lto_symtab_encoder_t encoder;
1222 
1223   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1224 
1225   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1226        lsei_next_function_in_partition (&lsei))
1227     {
1228       node = lsei_cgraph_node (lsei);
1229       if (node->definition && funct_state_summaries->exists (node))
1230 	count++;
1231     }
1232 
1233   streamer_write_uhwi_stream (ob->main_stream, count);
1234 
1235   /* Process all of the functions.  */
1236   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1237        lsei_next_function_in_partition (&lsei))
1238     {
1239       node = lsei_cgraph_node (lsei);
1240       funct_state_d *fs = funct_state_summaries->get (node);
1241       if (node->definition && fs != NULL)
1242 	{
1243 	  struct bitpack_d bp;
1244 	  int node_ref;
1245 	  lto_symtab_encoder_t encoder;
1246 
1247 	  encoder = ob->decl_state->symtab_node_encoder;
1248 	  node_ref = lto_symtab_encoder_encode (encoder, node);
1249 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
1250 
1251 	  /* Note that flags will need to be read in the opposite
1252 	     order as we are pushing the bitflags into FLAGS.  */
1253 	  bp = bitpack_create (ob->main_stream);
1254 	  bp_pack_value (&bp, fs->pure_const_state, 2);
1255 	  bp_pack_value (&bp, fs->state_previously_known, 2);
1256 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
1257 	  bp_pack_value (&bp, fs->looping, 1);
1258 	  bp_pack_value (&bp, fs->can_throw, 1);
1259 	  bp_pack_value (&bp, fs->can_free, 1);
1260 	  bp_pack_value (&bp, fs->malloc_state, 2);
1261 	  streamer_write_bitpack (&bp);
1262 	}
1263     }
1264 
1265   lto_destroy_simple_output_block (ob);
1266 }
1267 
1268 
1269 /* Deserialize the ipa info for lto.  */
1270 
1271 static void
pure_const_read_summary(void)1272 pure_const_read_summary (void)
1273 {
1274   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1275   struct lto_file_decl_data *file_data;
1276   unsigned int j = 0;
1277 
1278   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1279   pass->register_hooks ();
1280 
1281   while ((file_data = file_data_vec[j++]))
1282     {
1283       const char *data;
1284       size_t len;
1285       class lto_input_block *ib
1286 	= lto_create_simple_input_block (file_data,
1287 					 LTO_section_ipa_pure_const,
1288 					 &data, &len);
1289       if (ib)
1290 	{
1291 	  unsigned int i;
1292 	  unsigned int count = streamer_read_uhwi (ib);
1293 
1294 	  for (i = 0; i < count; i++)
1295 	    {
1296 	      unsigned int index;
1297 	      struct cgraph_node *node;
1298 	      struct bitpack_d bp;
1299 	      funct_state fs;
1300 	      lto_symtab_encoder_t encoder;
1301 
1302 	      index = streamer_read_uhwi (ib);
1303 	      encoder = file_data->symtab_node_encoder;
1304 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1305 									index));
1306 
1307 	      fs = funct_state_summaries->get_create (node);
1308 	      /* Note that the flags must be read in the opposite
1309 		 order in which they were written (the bitflags were
1310 		 pushed into FLAGS).  */
1311 	      bp = streamer_read_bitpack (ib);
1312 	      fs->pure_const_state
1313 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1314 	      fs->state_previously_known
1315 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1316 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1317 	      fs->looping = bp_unpack_value (&bp, 1);
1318 	      fs->can_throw = bp_unpack_value (&bp, 1);
1319 	      fs->can_free = bp_unpack_value (&bp, 1);
1320 	      fs->malloc_state
1321 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
1322 
1323 	      if (dump_file)
1324 		{
1325 		  int flags = flags_from_decl_or_type (node->decl);
1326 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
1327 		  if (flags & ECF_CONST)
1328 		    fprintf (dump_file, " const");
1329 		  if (flags & ECF_PURE)
1330 		    fprintf (dump_file, " pure");
1331 		  if (flags & ECF_NOTHROW)
1332 		    fprintf (dump_file, " nothrow");
1333 		  fprintf (dump_file, "\n  pure const state: %s\n",
1334 			   pure_const_names[fs->pure_const_state]);
1335 		  fprintf (dump_file, "  previously known state: %s\n",
1336 			   pure_const_names[fs->state_previously_known]);
1337 		  if (fs->looping)
1338 		    fprintf (dump_file,"  function is locally looping\n");
1339 		  if (fs->looping_previously_known)
1340 		    fprintf (dump_file,"  function is previously known looping\n");
1341 		  if (fs->can_throw)
1342 		    fprintf (dump_file,"  function is locally throwing\n");
1343 		  if (fs->can_free)
1344 		    fprintf (dump_file,"  function can locally free\n");
1345 		  fprintf (dump_file, "\n malloc state: %s\n",
1346 			   malloc_state_names[fs->malloc_state]);
1347 		}
1348 	    }
1349 
1350 	  lto_destroy_simple_input_block (file_data,
1351 					  LTO_section_ipa_pure_const,
1352 					  ib, data, len);
1353 	}
1354     }
1355 }
1356 
1357 /* We only propagate across edges that can throw externally and their callee
1358    is not interposable.  */
1359 
1360 static bool
ignore_edge_for_nothrow(struct cgraph_edge * e)1361 ignore_edge_for_nothrow (struct cgraph_edge *e)
1362 {
1363   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1364     return true;
1365 
1366   enum availability avail;
1367   cgraph_node *ultimate_target
1368     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1369   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1370     return true;
1371   return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1372 	   && !e->callee->binds_to_current_def_p (e->caller))
1373 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1374 	  || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1375 }
1376 
1377 /* Return true if NODE is self recursive function.
1378    Indirectly recursive functions appears as non-trivial strongly
1379    connected components, so we need to care about self recursion
1380    only.  */
1381 
1382 static bool
self_recursive_p(struct cgraph_node * node)1383 self_recursive_p (struct cgraph_node *node)
1384 {
1385   struct cgraph_edge *e;
1386   for (e = node->callees; e; e = e->next_callee)
1387     if (e->callee->function_symbol () == node)
1388       return true;
1389   return false;
1390 }
1391 
1392 /* Return true if N is cdtor that is not const or pure.  In this case we may
1393    need to remove unreachable function if it is marked const/pure.  */
1394 
1395 static bool
cdtor_p(cgraph_node * n,void *)1396 cdtor_p (cgraph_node *n, void *)
1397 {
1398   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1399     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1400 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1401   return false;
1402 }
1403 
1404 /* Skip edges from and to nodes without ipa_pure_const enabled.
1405    Ignore not available symbols.  */
1406 
1407 static bool
ignore_edge_for_pure_const(struct cgraph_edge * e)1408 ignore_edge_for_pure_const (struct cgraph_edge *e)
1409 {
1410   enum availability avail;
1411   cgraph_node *ultimate_target
1412     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1413 
1414   return (avail <= AVAIL_INTERPOSABLE
1415 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1416 	  || !opt_for_fn (ultimate_target->decl,
1417 			  flag_ipa_pure_const));
1418 }
1419 
1420 /* Produce transitive closure over the callgraph and compute pure/const
1421    attributes.  */
1422 
1423 static bool
propagate_pure_const(void)1424 propagate_pure_const (void)
1425 {
1426   struct cgraph_node *node;
1427   struct cgraph_node *w;
1428   struct cgraph_node **order =
1429     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1430   int order_pos;
1431   int i;
1432   struct ipa_dfs_info * w_info;
1433   bool remove_p = false;
1434   bool has_cdtor;
1435 
1436   order_pos = ipa_reduced_postorder (order, true,
1437 				     ignore_edge_for_pure_const);
1438   if (dump_file)
1439     {
1440       cgraph_node::dump_cgraph (dump_file);
1441       ipa_print_order (dump_file, "reduced", order, order_pos);
1442     }
1443 
1444   /* Propagate the local information through the call graph to produce
1445      the global information.  All the nodes within a cycle will have
1446      the same info so we collapse cycles first.  Then we can do the
1447      propagation in one pass from the leaves to the roots.  */
1448   for (i = 0; i < order_pos; i++ )
1449     {
1450       enum pure_const_state_e pure_const_state = IPA_CONST;
1451       bool looping = false;
1452       int count = 0;
1453       node = order[i];
1454 
1455       if (node->alias)
1456 	continue;
1457 
1458       if (dump_file && (dump_flags & TDF_DETAILS))
1459 	fprintf (dump_file, "Starting cycle\n");
1460 
1461       /* Find the worst state for any node in the cycle.  */
1462       w = node;
1463       while (w && pure_const_state != IPA_NEITHER)
1464 	{
1465 	  struct cgraph_edge *e;
1466 	  struct cgraph_edge *ie;
1467 	  int i;
1468 	  struct ipa_ref *ref = NULL;
1469 
1470 	  funct_state w_l = funct_state_summaries->get_create (w);
1471 	  if (dump_file && (dump_flags & TDF_DETAILS))
1472 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
1473 		     w->dump_name (),
1474 		     pure_const_names[w_l->pure_const_state],
1475 		     w_l->looping);
1476 
1477 	  /* First merge in function body properties.
1478 	     We are safe to pass NULL as FROM and TO because we will take care
1479 	     of possible interposition when walking callees.  */
1480 	  worse_state (&pure_const_state, &looping,
1481 		       w_l->pure_const_state, w_l->looping,
1482 		       NULL, NULL);
1483 	  if (pure_const_state == IPA_NEITHER)
1484 	    break;
1485 
1486 	  count++;
1487 
1488 	  /* We consider recursive cycles as possibly infinite.
1489 	     This might be relaxed since infinite recursion leads to stack
1490 	     overflow.  */
1491 	  if (count > 1)
1492 	    looping = true;
1493 
1494 	  /* Now walk the edges and merge in callee properties.  */
1495 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1496 	       e = e->next_callee)
1497 	    {
1498 	      enum availability avail;
1499 	      struct cgraph_node *y = e->callee->
1500 				function_or_virtual_thunk_symbol (&avail,
1501 								  e->caller);
1502 	      enum pure_const_state_e edge_state = IPA_CONST;
1503 	      bool edge_looping = false;
1504 
1505 	      if (dump_file && (dump_flags & TDF_DETAILS))
1506 		{
1507 		  fprintf (dump_file, "    Call to %s",
1508 			   e->callee->dump_name ());
1509 		}
1510 	      if (avail > AVAIL_INTERPOSABLE)
1511 		{
1512 		  funct_state y_l = funct_state_summaries->get_create (y);
1513 
1514 		  if (dump_file && (dump_flags & TDF_DETAILS))
1515 		    {
1516 		      fprintf (dump_file,
1517 			       " state:%s looping:%i\n",
1518 			       pure_const_names[y_l->pure_const_state],
1519 			       y_l->looping);
1520 		    }
1521 		  if (y_l->pure_const_state > IPA_PURE
1522 		      && e->cannot_lead_to_return_p ())
1523 		    {
1524 		      if (dump_file && (dump_flags & TDF_DETAILS))
1525 			fprintf (dump_file,
1526 				 "        Ignoring side effects"
1527 				 " -> pure, looping\n");
1528 		      edge_state = IPA_PURE;
1529 		      edge_looping = true;
1530 		    }
1531 		  else
1532 		    {
1533 		      edge_state = y_l->pure_const_state;
1534 		      edge_looping = y_l->looping;
1535 		    }
1536 		}
1537 	      else if (special_builtin_state (&edge_state, &edge_looping,
1538 					      y->decl))
1539 		;
1540 	      else
1541 		state_from_flags (&edge_state, &edge_looping,
1542 				  flags_from_decl_or_type (y->decl),
1543 				  e->cannot_lead_to_return_p ());
1544 
1545 	      /* Merge the results with what we already know.  */
1546 	      better_state (&edge_state, &edge_looping,
1547 			    w_l->state_previously_known,
1548 			    w_l->looping_previously_known);
1549 	      worse_state (&pure_const_state, &looping,
1550 			   edge_state, edge_looping, e->caller, e->callee);
1551 	      if (pure_const_state == IPA_NEITHER)
1552 	        break;
1553 	    }
1554 
1555 	  /* Now process the indirect call.  */
1556           for (ie = w->indirect_calls;
1557 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1558 	    {
1559 	      enum pure_const_state_e edge_state = IPA_CONST;
1560 	      bool edge_looping = false;
1561 
1562 	      if (dump_file && (dump_flags & TDF_DETAILS))
1563 		fprintf (dump_file, "    Indirect call");
1564 	      state_from_flags (&edge_state, &edge_looping,
1565 			        ie->indirect_info->ecf_flags,
1566 				ie->cannot_lead_to_return_p ());
1567 	      /* Merge the results with what we already know.  */
1568 	      better_state (&edge_state, &edge_looping,
1569 			    w_l->state_previously_known,
1570 			    w_l->looping_previously_known);
1571 	      worse_state (&pure_const_state, &looping,
1572 			   edge_state, edge_looping, NULL, NULL);
1573 	      if (pure_const_state == IPA_NEITHER)
1574 	        break;
1575 	    }
1576 
1577 	  /* And finally all loads and stores.  */
1578 	  for (i = 0; w->iterate_reference (i, ref)
1579 	       && pure_const_state != IPA_NEITHER; i++)
1580 	    {
1581 	      enum pure_const_state_e ref_state = IPA_CONST;
1582 	      bool ref_looping = false;
1583 	      switch (ref->use)
1584 		{
1585 		case IPA_REF_LOAD:
1586 		  /* readonly reads are safe.  */
1587 		  if (TREE_READONLY (ref->referred->decl))
1588 		    break;
1589 		  if (dump_file && (dump_flags & TDF_DETAILS))
1590 		    fprintf (dump_file, "    nonreadonly global var read\n");
1591 		  ref_state = IPA_PURE;
1592 		  break;
1593 		case IPA_REF_STORE:
1594 		  if (ref->cannot_lead_to_return ())
1595 		    break;
1596 		  ref_state = IPA_NEITHER;
1597 		  if (dump_file && (dump_flags & TDF_DETAILS))
1598 		    fprintf (dump_file, "    global var write\n");
1599 		  break;
1600 		case IPA_REF_ADDR:
1601 		  break;
1602 		default:
1603 		  gcc_unreachable ();
1604 		}
1605 	      better_state (&ref_state, &ref_looping,
1606 			    w_l->state_previously_known,
1607 			    w_l->looping_previously_known);
1608 	      worse_state (&pure_const_state, &looping,
1609 			   ref_state, ref_looping, NULL, NULL);
1610 	      if (pure_const_state == IPA_NEITHER)
1611 		break;
1612 	    }
1613 	  w_info = (struct ipa_dfs_info *) w->aux;
1614 	  w = w_info->next_cycle;
1615 	}
1616       if (dump_file && (dump_flags & TDF_DETAILS))
1617 	fprintf (dump_file, "Result %s looping %i\n",
1618 		 pure_const_names [pure_const_state],
1619 		 looping);
1620 
1621       /* Find the worst state of can_free for any node in the cycle.  */
1622       bool can_free = false;
1623       w = node;
1624       while (w && !can_free)
1625 	{
1626 	  struct cgraph_edge *e;
1627 	  funct_state w_l = funct_state_summaries->get (w);
1628 
1629 	  if (w_l->can_free
1630 	      || w->get_availability () == AVAIL_INTERPOSABLE
1631 	      || w->indirect_calls)
1632 	    can_free = true;
1633 
1634 	  for (e = w->callees; e && !can_free; e = e->next_callee)
1635 	    {
1636 	      enum availability avail;
1637 	      struct cgraph_node *y = e->callee->
1638 				function_or_virtual_thunk_symbol (&avail,
1639 								  e->caller);
1640 
1641 	      if (avail > AVAIL_INTERPOSABLE)
1642 		can_free = funct_state_summaries->get (y)->can_free;
1643 	      else
1644 		can_free = true;
1645 	    }
1646 	  w_info = (struct ipa_dfs_info *) w->aux;
1647 	  w = w_info->next_cycle;
1648 	}
1649 
1650       /* Copy back the region's pure_const_state which is shared by
1651 	 all nodes in the region.  */
1652       w = node;
1653       while (w)
1654 	{
1655 	  funct_state w_l = funct_state_summaries->get (w);
1656 	  enum pure_const_state_e this_state = pure_const_state;
1657 	  bool this_looping = looping;
1658 
1659 	  w_l->can_free = can_free;
1660 	  w->nonfreeing_fn = !can_free;
1661 	  if (!can_free && dump_file)
1662 	    fprintf (dump_file, "Function found not to call free: %s\n",
1663 		     w->dump_name ());
1664 
1665 	  if (w_l->state_previously_known != IPA_NEITHER
1666 	      && this_state > w_l->state_previously_known)
1667 	    {
1668 	      if (this_state == IPA_NEITHER)
1669 		this_looping = w_l->looping_previously_known;
1670 	      this_state = w_l->state_previously_known;
1671 	    }
1672 	  if (!this_looping && self_recursive_p (w))
1673 	    this_looping = true;
1674 	  if (!w_l->looping_previously_known)
1675 	    this_looping = false;
1676 
1677 	  /* All nodes within a cycle share the same info.  */
1678 	  w_l->pure_const_state = this_state;
1679 	  w_l->looping = this_looping;
1680 
1681 	  /* Inline clones share declaration with their offline copies;
1682 	     do not modify their declarations since the offline copy may
1683 	     be different.  */
1684 	  if (!w->inlined_to)
1685 	    switch (this_state)
1686 	      {
1687 	      case IPA_CONST:
1688 		if (!TREE_READONLY (w->decl))
1689 		  {
1690 		    warn_function_const (w->decl, !this_looping);
1691 		    if (dump_file)
1692 		      fprintf (dump_file, "Function found to be %sconst: %s\n",
1693 			       this_looping ? "looping " : "",
1694 			       w->dump_name ());
1695 		  }
1696 		/* Turning constructor or destructor to non-looping const/pure
1697 		   enables us to possibly remove the function completely.  */
1698 		if (this_looping)
1699 		  has_cdtor = false;
1700 		else
1701 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1702 							      NULL, true);
1703 		if (w->set_const_flag (true, this_looping))
1704 		  {
1705 		    if (dump_file)
1706 		      fprintf (dump_file,
1707 			       "Declaration updated to be %sconst: %s\n",
1708 			       this_looping ? "looping " : "",
1709 			       w->dump_name ());
1710 		    remove_p |= has_cdtor;
1711 		  }
1712 		break;
1713 
1714 	      case IPA_PURE:
1715 		if (!DECL_PURE_P (w->decl))
1716 		  {
1717 		    warn_function_pure (w->decl, !this_looping);
1718 		    if (dump_file)
1719 		      fprintf (dump_file, "Function found to be %spure: %s\n",
1720 			       this_looping ? "looping " : "",
1721 			       w->dump_name ());
1722 		  }
1723 		if (this_looping)
1724 		  has_cdtor = false;
1725 		else
1726 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1727 							      NULL, true);
1728 		if (w->set_pure_flag (true, this_looping))
1729 		  {
1730 		    if (dump_file)
1731 		      fprintf (dump_file,
1732 			       "Declaration updated to be %spure: %s\n",
1733 			       this_looping ? "looping " : "",
1734 			       w->dump_name ());
1735 		    remove_p |= has_cdtor;
1736 		  }
1737 		break;
1738 
1739 	      default:
1740 		break;
1741 	      }
1742 	  w_info = (struct ipa_dfs_info *) w->aux;
1743 	  w = w_info->next_cycle;
1744 	}
1745     }
1746 
1747   ipa_free_postorder_info ();
1748   free (order);
1749   return remove_p;
1750 }
1751 
1752 /* Produce transitive closure over the callgraph and compute nothrow
1753    attributes.  */
1754 
1755 static void
propagate_nothrow(void)1756 propagate_nothrow (void)
1757 {
1758   struct cgraph_node *node;
1759   struct cgraph_node *w;
1760   struct cgraph_node **order =
1761     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1762   int order_pos;
1763   int i;
1764   struct ipa_dfs_info * w_info;
1765 
1766   order_pos = ipa_reduced_postorder (order, true,
1767 				     ignore_edge_for_nothrow);
1768   if (dump_file)
1769     {
1770       cgraph_node::dump_cgraph (dump_file);
1771       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1772     }
1773 
1774   /* Propagate the local information through the call graph to produce
1775      the global information.  All the nodes within a cycle will have
1776      the same info so we collapse cycles first.  Then we can do the
1777      propagation in one pass from the leaves to the roots.  */
1778   for (i = 0; i < order_pos; i++ )
1779     {
1780       bool can_throw = false;
1781       node = order[i];
1782 
1783       if (node->alias)
1784 	continue;
1785 
1786       /* Find the worst state for any node in the cycle.  */
1787       w = node;
1788       while (w && !can_throw)
1789 	{
1790 	  struct cgraph_edge *e, *ie;
1791 
1792 	  if (!TREE_NOTHROW (w->decl))
1793 	    {
1794 	      funct_state w_l = funct_state_summaries->get_create (w);
1795 
1796 	      if (w_l->can_throw
1797 		  || w->get_availability () == AVAIL_INTERPOSABLE)
1798 		can_throw = true;
1799 
1800 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
1801 		{
1802 		  enum availability avail;
1803 
1804 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1805 		    continue;
1806 
1807 		  struct cgraph_node *y = e->callee->
1808 				   function_or_virtual_thunk_symbol (&avail,
1809 								     e->caller);
1810 
1811 		  /* We can use info about the callee only if we know it
1812 		     cannot be interposed.
1813 		     When callee is compiled with non-call exceptions we also
1814 		     must check that the declaration is bound to current
1815 		     body as other semantically equivalent body may still
1816 		     throw.  */
1817 		  if (avail <= AVAIL_INTERPOSABLE
1818 		      || (!TREE_NOTHROW (y->decl)
1819 			  && (funct_state_summaries->get_create (y)->can_throw
1820 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
1821 				  && !e->callee->binds_to_current_def_p (w)))))
1822 		    can_throw = true;
1823 		}
1824 	      for (ie = w->indirect_calls; ie && !can_throw;
1825 		   ie = ie->next_callee)
1826 		if (ie->can_throw_external
1827 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1828 		  can_throw = true;
1829 	    }
1830 	  w_info = (struct ipa_dfs_info *) w->aux;
1831 	  w = w_info->next_cycle;
1832 	}
1833 
1834       /* Copy back the region's pure_const_state which is shared by
1835 	 all nodes in the region.  */
1836       w = node;
1837       while (w)
1838 	{
1839 	  funct_state w_l = funct_state_summaries->get_create (w);
1840 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1841 	    {
1842 	      /* Inline clones share declaration with their offline copies;
1843 		 do not modify their declarations since the offline copy may
1844 		 be different.  */
1845 	      if (!w->inlined_to)
1846 		{
1847 		  w->set_nothrow_flag (true);
1848 		  if (dump_file)
1849 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
1850 			     w->dump_name ());
1851 		}
1852 	    }
1853 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1854 	    w_l->can_throw = true;
1855 	  w_info = (struct ipa_dfs_info *) w->aux;
1856 	  w = w_info->next_cycle;
1857 	}
1858     }
1859 
1860   ipa_free_postorder_info ();
1861   free (order);
1862 }
1863 
1864 /* Debugging function to dump state of malloc lattice.  */
1865 
1866 DEBUG_FUNCTION
1867 static void
dump_malloc_lattice(FILE * dump_file,const char * s)1868 dump_malloc_lattice (FILE *dump_file, const char *s)
1869 {
1870   if (!dump_file)
1871     return;
1872 
1873   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1874   cgraph_node *node;
1875   FOR_EACH_FUNCTION (node)
1876     {
1877       funct_state fs = funct_state_summaries->get (node);
1878       if (fs)
1879 	fprintf (dump_file, "%s: %s\n", node->dump_name (),
1880 		 malloc_state_names[fs->malloc_state]);
1881     }
1882 }
1883 
1884 /* Propagate malloc attribute across the callgraph.  */
1885 
1886 static void
propagate_malloc(void)1887 propagate_malloc (void)
1888 {
1889   cgraph_node *node;
1890   FOR_EACH_FUNCTION (node)
1891     {
1892       if (DECL_IS_MALLOC (node->decl))
1893 	if (!funct_state_summaries->exists (node))
1894 	  {
1895 	    funct_state fs = funct_state_summaries->get_create (node);
1896 	    fs->malloc_state = STATE_MALLOC;
1897 	  }
1898     }
1899 
1900   dump_malloc_lattice (dump_file, "Initial");
1901   struct cgraph_node **order
1902     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1903   int order_pos = ipa_reverse_postorder (order);
1904   bool changed = true;
1905 
1906   while (changed)
1907     {
1908       changed = false;
1909       /* Walk in postorder.  */
1910       for (int i = order_pos - 1; i >= 0; --i)
1911 	{
1912 	  cgraph_node *node = order[i];
1913 	  if (node->alias
1914 	      || !node->definition
1915 	      || !funct_state_summaries->exists (node))
1916 	    continue;
1917 
1918 	  funct_state l = funct_state_summaries->get (node);
1919 
1920 	  /* FIXME: add support for indirect-calls.  */
1921 	  if (node->indirect_calls)
1922 	    {
1923 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1924 	      continue;
1925 	    }
1926 
1927 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
1928 	    {
1929 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1930 	      continue;
1931 	    }
1932 
1933 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
1934 	    continue;
1935 
1936 	  vec<cgraph_node *> callees = vNULL;
1937 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1938 	    {
1939 	      ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1940 	      if (es && es->is_return_callee_uncaptured)
1941 		callees.safe_push (cs->callee);
1942 	    }
1943 
1944 	  malloc_state_e new_state = l->malloc_state;
1945 	  for (unsigned j = 0; j < callees.length (); j++)
1946 	    {
1947 	      cgraph_node *callee = callees[j];
1948 	      if (!funct_state_summaries->exists (node))
1949 		{
1950 		  new_state = STATE_MALLOC_BOTTOM;
1951 		  break;
1952 		}
1953 	      malloc_state_e callee_state
1954 		= funct_state_summaries->get_create (callee)->malloc_state;
1955 	      if (new_state < callee_state)
1956 		new_state = callee_state;
1957 	    }
1958 	  if (new_state != l->malloc_state)
1959 	    {
1960 	      changed = true;
1961 	      l->malloc_state = new_state;
1962 	    }
1963 	}
1964     }
1965 
1966   FOR_EACH_DEFINED_FUNCTION (node)
1967     if (funct_state_summaries->exists (node))
1968       {
1969 	funct_state l = funct_state_summaries->get (node);
1970 	if (!node->alias
1971 	    && l->malloc_state == STATE_MALLOC
1972 	    && !node->inlined_to)
1973 	  {
1974 	    if (dump_file && (dump_flags & TDF_DETAILS))
1975 	      fprintf (dump_file, "Function %s found to be malloc\n",
1976 		       node->dump_name ());
1977 
1978 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1979 	    node->set_malloc_flag (true);
1980 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
1981 		warn_function_malloc (node->decl);
1982 	  }
1983       }
1984 
1985   dump_malloc_lattice (dump_file, "after propagation");
1986   ipa_free_postorder_info ();
1987   free (order);
1988 }
1989 
1990 /* Produce the global information by preforming a transitive closure
1991    on the local information that was produced by generate_summary.  */
1992 
1993 unsigned int
1994 pass_ipa_pure_const::
execute(function *)1995 execute (function *)
1996 {
1997   bool remove_p;
1998 
1999   /* Nothrow makes more function to not lead to return and improve
2000      later analysis.  */
2001   propagate_nothrow ();
2002   propagate_malloc ();
2003   remove_p = propagate_pure_const ();
2004 
2005   delete funct_state_summaries;
2006   return remove_p ? TODO_remove_functions : 0;
2007 }
2008 
2009 static bool
gate_pure_const(void)2010 gate_pure_const (void)
2011 {
2012   return flag_ipa_pure_const || in_lto_p;
2013 }
2014 
pass_ipa_pure_const(gcc::context * ctxt)2015 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2016     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2017 		     pure_const_generate_summary, /* generate_summary */
2018 		     pure_const_write_summary, /* write_summary */
2019 		     pure_const_read_summary, /* read_summary */
2020 		     NULL, /* write_optimization_summary */
2021 		     NULL, /* read_optimization_summary */
2022 		     NULL, /* stmt_fixup */
2023 		     0, /* function_transform_todo_flags_start */
2024 		     NULL, /* function_transform */
2025 		     NULL), /* variable_transform */
2026   init_p (false) {}
2027 
2028 ipa_opt_pass_d *
make_pass_ipa_pure_const(gcc::context * ctxt)2029 make_pass_ipa_pure_const (gcc::context *ctxt)
2030 {
2031   return new pass_ipa_pure_const (ctxt);
2032 }
2033 
2034 /* Return true if function should be skipped for local pure const analysis.  */
2035 
2036 static bool
skip_function_for_local_pure_const(struct cgraph_node * node)2037 skip_function_for_local_pure_const (struct cgraph_node *node)
2038 {
2039   /* Because we do not schedule pass_fixup_cfg over whole program after early
2040      optimizations we must not promote functions that are called by already
2041      processed functions.  */
2042 
2043   if (function_called_by_processed_nodes_p ())
2044     {
2045       if (dump_file)
2046         fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2047       return true;
2048     }
2049   /* Save some work and do not analyze functions which are interposable and
2050      do not have any non-interposable aliases.  */
2051   if (node->get_availability () <= AVAIL_INTERPOSABLE
2052       && !node->has_aliases_p ())
2053     {
2054       if (dump_file)
2055         fprintf (dump_file,
2056 		 "Function is interposable; not analyzing.\n");
2057       return true;
2058     }
2059   return false;
2060 }
2061 
2062 /* Simple local pass for pure const discovery reusing the analysis from
2063    ipa_pure_const.   This pass is effective when executed together with
2064    other optimization passes in early optimization pass queue.  */
2065 
2066 namespace {
2067 
2068 const pass_data pass_data_local_pure_const =
2069 {
2070   GIMPLE_PASS, /* type */
2071   "local-pure-const", /* name */
2072   OPTGROUP_NONE, /* optinfo_flags */
2073   TV_IPA_PURE_CONST, /* tv_id */
2074   0, /* properties_required */
2075   0, /* properties_provided */
2076   0, /* properties_destroyed */
2077   0, /* todo_flags_start */
2078   0, /* todo_flags_finish */
2079 };
2080 
2081 class pass_local_pure_const : public gimple_opt_pass
2082 {
2083 public:
pass_local_pure_const(gcc::context * ctxt)2084   pass_local_pure_const (gcc::context *ctxt)
2085     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2086   {}
2087 
2088   /* opt_pass methods: */
clone()2089   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
gate(function *)2090   virtual bool gate (function *) { return gate_pure_const (); }
2091   virtual unsigned int execute (function *);
2092 
2093 }; // class pass_local_pure_const
2094 
2095 unsigned int
execute(function * fun)2096 pass_local_pure_const::execute (function *fun)
2097 {
2098   bool changed = false;
2099   funct_state l;
2100   bool skip;
2101   struct cgraph_node *node;
2102 
2103   node = cgraph_node::get (current_function_decl);
2104   skip = skip_function_for_local_pure_const (node);
2105 
2106   if (!warn_suggest_attribute_const
2107       && !warn_suggest_attribute_pure
2108       && skip)
2109     return 0;
2110 
2111   l = analyze_function (node, false);
2112 
2113   /* Do NORETURN discovery.  */
2114   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2115       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2116     {
2117       warn_function_noreturn (fun->decl);
2118       if (dump_file)
2119 	fprintf (dump_file, "Function found to be noreturn: %s\n",
2120 		 current_function_name ());
2121 
2122       /* Update declaration and reduce profile to executed once.  */
2123       TREE_THIS_VOLATILE (current_function_decl) = 1;
2124       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2125 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2126 
2127       changed = true;
2128     }
2129 
2130   switch (l->pure_const_state)
2131     {
2132     case IPA_CONST:
2133       if (!TREE_READONLY (current_function_decl))
2134 	{
2135 	  warn_function_const (current_function_decl, !l->looping);
2136 	  if (dump_file)
2137 	    fprintf (dump_file, "Function found to be %sconst: %s\n",
2138 		     l->looping ? "looping " : "",
2139 		     current_function_name ());
2140 	}
2141       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2142 	       && !l->looping)
2143 	{
2144 	  if (dump_file)
2145 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2146 		     current_function_name ());
2147 	}
2148       if (!skip && node->set_const_flag (true, l->looping))
2149 	{
2150 	  if (dump_file)
2151 	    fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2152 		     l->looping ? "looping " : "",
2153 		     current_function_name ());
2154 	  changed = true;
2155 	}
2156       break;
2157 
2158     case IPA_PURE:
2159       if (!DECL_PURE_P (current_function_decl))
2160 	{
2161 	  warn_function_pure (current_function_decl, !l->looping);
2162 	  if (dump_file)
2163 	    fprintf (dump_file, "Function found to be %spure: %s\n",
2164 		     l->looping ? "looping " : "",
2165 		     current_function_name ());
2166 	}
2167       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2168 	       && !l->looping)
2169 	{
2170 	  if (dump_file)
2171 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2172 		     current_function_name ());
2173 	}
2174       if (!skip && node->set_pure_flag (true, l->looping))
2175 	{
2176 	  if (dump_file)
2177 	    fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2178 		     l->looping ? "looping " : "",
2179 		     current_function_name ());
2180 	  changed = true;
2181 	}
2182       break;
2183 
2184     default:
2185       break;
2186     }
2187   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2188     {
2189       node->set_nothrow_flag (true);
2190       changed = true;
2191       if (dump_file)
2192 	fprintf (dump_file, "Function found to be nothrow: %s\n",
2193 		 current_function_name ());
2194     }
2195 
2196   if (l->malloc_state == STATE_MALLOC
2197       && !DECL_IS_MALLOC (current_function_decl))
2198     {
2199       node->set_malloc_flag (true);
2200       if (warn_suggest_attribute_malloc)
2201 	warn_function_malloc (node->decl);
2202       changed = true;
2203       if (dump_file)
2204 	fprintf (dump_file, "Function found to be malloc: %s\n",
2205 		 node->dump_name ());
2206     }
2207 
2208   free (l);
2209   if (changed)
2210     return execute_fixup_cfg ();
2211   else
2212     return 0;
2213 }
2214 
2215 } // anon namespace
2216 
2217 gimple_opt_pass *
make_pass_local_pure_const(gcc::context * ctxt)2218 make_pass_local_pure_const (gcc::context *ctxt)
2219 {
2220   return new pass_local_pure_const (ctxt);
2221 }
2222 
2223 /* Emit noreturn warnings.  */
2224 
2225 namespace {
2226 
2227 const pass_data pass_data_warn_function_noreturn =
2228 {
2229   GIMPLE_PASS, /* type */
2230   "*warn_function_noreturn", /* name */
2231   OPTGROUP_NONE, /* optinfo_flags */
2232   TV_NONE, /* tv_id */
2233   PROP_cfg, /* properties_required */
2234   0, /* properties_provided */
2235   0, /* properties_destroyed */
2236   0, /* todo_flags_start */
2237   0, /* todo_flags_finish */
2238 };
2239 
2240 class pass_warn_function_noreturn : public gimple_opt_pass
2241 {
2242 public:
pass_warn_function_noreturn(gcc::context * ctxt)2243   pass_warn_function_noreturn (gcc::context *ctxt)
2244     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2245   {}
2246 
2247   /* opt_pass methods: */
gate(function *)2248   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
execute(function * fun)2249   virtual unsigned int execute (function *fun)
2250     {
2251       if (!TREE_THIS_VOLATILE (current_function_decl)
2252 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2253 	warn_function_noreturn (current_function_decl);
2254       return 0;
2255     }
2256 
2257 }; // class pass_warn_function_noreturn
2258 
2259 } // anon namespace
2260 
2261 gimple_opt_pass *
make_pass_warn_function_noreturn(gcc::context * ctxt)2262 make_pass_warn_function_noreturn (gcc::context *ctxt)
2263 {
2264   return new pass_warn_function_noreturn (ctxt);
2265 }
2266 
2267 /* Simple local pass for pure const discovery reusing the analysis from
2268    ipa_pure_const.   This pass is effective when executed together with
2269    other optimization passes in early optimization pass queue.  */
2270 
2271 namespace {
2272 
2273 const pass_data pass_data_nothrow =
2274 {
2275   GIMPLE_PASS, /* type */
2276   "nothrow", /* name */
2277   OPTGROUP_NONE, /* optinfo_flags */
2278   TV_IPA_PURE_CONST, /* tv_id */
2279   0, /* properties_required */
2280   0, /* properties_provided */
2281   0, /* properties_destroyed */
2282   0, /* todo_flags_start */
2283   0, /* todo_flags_finish */
2284 };
2285 
2286 class pass_nothrow : public gimple_opt_pass
2287 {
2288 public:
pass_nothrow(gcc::context * ctxt)2289   pass_nothrow (gcc::context *ctxt)
2290     : gimple_opt_pass (pass_data_nothrow, ctxt)
2291   {}
2292 
2293   /* opt_pass methods: */
clone()2294   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
gate(function *)2295   virtual bool gate (function *) { return optimize; }
2296   virtual unsigned int execute (function *);
2297 
2298 }; // class pass_nothrow
2299 
2300 unsigned int
execute(function *)2301 pass_nothrow::execute (function *)
2302 {
2303   struct cgraph_node *node;
2304   basic_block this_block;
2305 
2306   if (TREE_NOTHROW (current_function_decl))
2307     return 0;
2308 
2309   node = cgraph_node::get (current_function_decl);
2310 
2311   /* We run during lowering, we cannot really use availability yet.  */
2312   if (cgraph_node::get (current_function_decl)->get_availability ()
2313       <= AVAIL_INTERPOSABLE)
2314     {
2315       if (dump_file)
2316         fprintf (dump_file, "Function is interposable;"
2317 	         " not analyzing.\n");
2318       return true;
2319     }
2320 
2321   FOR_EACH_BB_FN (this_block, cfun)
2322     {
2323       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2324 	   !gsi_end_p (gsi);
2325 	   gsi_next (&gsi))
2326         if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2327 	  {
2328 	    if (is_gimple_call (gsi_stmt (gsi)))
2329 	      {
2330 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2331 		if (callee_t && recursive_call_p (current_function_decl,
2332 						  callee_t))
2333 		  continue;
2334 	      }
2335 
2336 	    if (dump_file)
2337 	      {
2338 		fprintf (dump_file, "Statement can throw: ");
2339 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2340 	      }
2341 	    return 0;
2342 	  }
2343     }
2344 
2345   node->set_nothrow_flag (true);
2346 
2347   bool cfg_changed = false;
2348   if (self_recursive_p (node))
2349     FOR_EACH_BB_FN (this_block, cfun)
2350       if (gimple *g = last_stmt (this_block))
2351 	if (is_gimple_call (g))
2352 	  {
2353 	    tree callee_t = gimple_call_fndecl (g);
2354 	    if (callee_t
2355 		&& recursive_call_p (current_function_decl, callee_t)
2356 		&& maybe_clean_eh_stmt (g)
2357 		&& gimple_purge_dead_eh_edges (this_block))
2358 	      cfg_changed = true;
2359 	  }
2360 
2361   if (dump_file)
2362     fprintf (dump_file, "Function found to be nothrow: %s\n",
2363 	     current_function_name ());
2364   return cfg_changed ? TODO_cleanup_cfg : 0;
2365 }
2366 
2367 } // anon namespace
2368 
2369 gimple_opt_pass *
make_pass_nothrow(gcc::context * ctxt)2370 make_pass_nothrow (gcc::context *ctxt)
2371 {
2372   return new pass_nothrow (ctxt);
2373 }
2374