xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-pure-const.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* This file marks functions as being either const (TREE_READONLY) or
23    pure (DECL_PURE_P).  It can also set a variant of these that
24    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 
26    This must be run after inlining decisions have been made since
27    otherwise, the local sets will not contain information that is
28    consistent with post inlined state.  The global sets are not prone
29    to this problem since they are by definition transitive.  */
30 
31 /* The code in this module is called by the ipa pass manager. It
32    should be one of the later passes since it's information is used by
33    the rest of the compilation. */
34 
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
45 #include "ggc.h"
46 #include "ipa-utils.h"
47 #include "gimple.h"
48 #include "cgraph.h"
49 #include "output.h"
50 #include "flags.h"
51 #include "timevar.h"
52 #include "diagnostic.h"
53 #include "langhooks.h"
54 #include "target.h"
55 #include "lto-streamer.h"
56 #include "cfgloop.h"
57 #include "tree-scalar-evolution.h"
58 
59 static struct pointer_set_t *visited_nodes;
60 
61 /* Lattice values for const and pure functions.  Everything starts out
62    being const, then may drop to pure and then neither depending on
63    what is found.  */
64 enum pure_const_state_e
65 {
66   IPA_CONST,
67   IPA_PURE,
68   IPA_NEITHER
69 };
70 
71 /* Holder for the const_state.  There is one of these per function
72    decl.  */
73 struct funct_state_d
74 {
75   /* See above.  */
76   enum pure_const_state_e pure_const_state;
77   /* What user set here; we can be always sure about this.  */
78   enum pure_const_state_e state_previously_known;
79   bool looping_previously_known;
80 
81   /* True if the function could possibly infinite loop.  There are a
82      lot of ways that this could be determined.  We are pretty
83      conservative here.  While it is possible to cse pure and const
84      calls, it is not legal to have dce get rid of the call if there
85      is a possibility that the call could infinite loop since this is
86      a behavioral change.  */
87   bool looping;
88 
89   bool can_throw;
90 };
91 
92 typedef struct funct_state_d * funct_state;
93 
94 /* The storage of the funct_state is abstracted because there is the
95    possibility that it may be desirable to move this to the cgraph
96    local info.  */
97 
98 /* Array, indexed by cgraph node uid, of function states.  */
99 
100 DEF_VEC_P (funct_state);
101 DEF_VEC_ALLOC_P (funct_state, heap);
102 static VEC (funct_state, heap) *funct_state_vec;
103 
104 /* Holders of ipa cgraph hooks: */
105 static struct cgraph_node_hook_list *function_insertion_hook_holder;
106 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
107 static struct cgraph_node_hook_list *node_removal_hook_holder;
108 
109 /* Init the function state.  */
110 
111 static void
112 finish_state (void)
113 {
114   free (funct_state_vec);
115 }
116 
117 
118 /* Return the function state from NODE.  */
119 
120 static inline funct_state
121 get_function_state (struct cgraph_node *node)
122 {
123   if (!funct_state_vec
124       || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
125     return NULL;
126   return VEC_index (funct_state, funct_state_vec, node->uid);
127 }
128 
129 /* Set the function state S for NODE.  */
130 
131 static inline void
132 set_function_state (struct cgraph_node *node, funct_state s)
133 {
134   if (!funct_state_vec
135       || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
136      VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
137   VEC_replace (funct_state, funct_state_vec, node->uid, s);
138 }
139 
140 /* Check to see if the use (or definition when CHECKING_WRITE is true)
141    variable T is legal in a function that is either pure or const.  */
142 
143 static inline void
144 check_decl (funct_state local,
145 	    tree t, bool checking_write)
146 {
147   /* Do not want to do anything with volatile except mark any
148      function that uses one to be not const or pure.  */
149   if (TREE_THIS_VOLATILE (t))
150     {
151       local->pure_const_state = IPA_NEITHER;
152       if (dump_file)
153         fprintf (dump_file, "    Volatile operand is not const/pure");
154       return;
155     }
156 
157   /* Do not care about a local automatic that is not static.  */
158   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
159     return;
160 
161   /* If the variable has the "used" attribute, treat it as if it had a
162      been touched by the devil.  */
163   if (DECL_PRESERVE_P (t))
164     {
165       local->pure_const_state = IPA_NEITHER;
166       if (dump_file)
167         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
168       return;
169     }
170 
171   /* Since we have dealt with the locals and params cases above, if we
172      are CHECKING_WRITE, this cannot be a pure or constant
173      function.  */
174   if (checking_write)
175     {
176       local->pure_const_state = IPA_NEITHER;
177       if (dump_file)
178         fprintf (dump_file, "    static/global memory write is not const/pure\n");
179       return;
180     }
181 
182   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
183     {
184       /* Readonly reads are safe.  */
185       if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
186 	return; /* Read of a constant, do not change the function state.  */
187       else
188 	{
189           if (dump_file)
190             fprintf (dump_file, "    global memory read is not const\n");
191 	  /* Just a regular read.  */
192 	  if (local->pure_const_state == IPA_CONST)
193 	    local->pure_const_state = IPA_PURE;
194 	}
195     }
196   else
197     {
198       /* Compilation level statics can be read if they are readonly
199 	 variables.  */
200       if (TREE_READONLY (t))
201 	return;
202 
203       if (dump_file)
204 	fprintf (dump_file, "    static memory read is not const\n");
205       /* Just a regular read.  */
206       if (local->pure_const_state == IPA_CONST)
207 	local->pure_const_state = IPA_PURE;
208     }
209 }
210 
211 
212 /* Check to see if the use (or definition when CHECKING_WRITE is true)
213    variable T is legal in a function that is either pure or const.  */
214 
215 static inline void
216 check_op (funct_state local, tree t, bool checking_write)
217 {
218   t = get_base_address (t);
219   if (t && TREE_THIS_VOLATILE (t))
220     {
221       local->pure_const_state = IPA_NEITHER;
222       if (dump_file)
223 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
224       return;
225     }
226   else if (t
227   	   && INDIRECT_REF_P (t)
228 	   && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
229 	   && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
230     {
231       if (dump_file)
232 	fprintf (dump_file, "    Indirect ref to local memory is OK\n");
233       return;
234     }
235   else if (checking_write)
236     {
237       local->pure_const_state = IPA_NEITHER;
238       if (dump_file)
239 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
240       return;
241     }
242   else
243     {
244       if (dump_file)
245 	fprintf (dump_file, "    Indirect ref read is not const\n");
246       if (local->pure_const_state == IPA_CONST)
247 	local->pure_const_state = IPA_PURE;
248     }
249 }
250 
251 /* Check the parameters of a function call to CALL_EXPR to see if
252    there are any references in the parameters that are not allowed for
253    pure or const functions.  Also check to see if this is either an
254    indirect call, a call outside the compilation unit, or has special
255    attributes that may also effect the purity.  The CALL_EXPR node for
256    the entire call expression.  */
257 
258 static void
259 check_call (funct_state local, gimple call, bool ipa)
260 {
261   int flags = gimple_call_flags (call);
262   tree callee_t = gimple_call_fndecl (call);
263   bool possibly_throws = stmt_could_throw_p (call);
264   bool possibly_throws_externally = (possibly_throws
265   				     && stmt_can_throw_external (call));
266 
267   if (possibly_throws)
268     {
269       unsigned int i;
270       for (i = 0; i < gimple_num_ops (call); i++)
271         if (gimple_op (call, i)
272 	    && tree_could_throw_p (gimple_op (call, i)))
273 	  {
274 	    if (possibly_throws && flag_non_call_exceptions)
275 	      {
276 		if (dump_file)
277 		  fprintf (dump_file, "    operand can throw; looping\n");
278 		local->looping = true;
279 	      }
280 	    if (possibly_throws_externally)
281 	      {
282 		if (dump_file)
283 		  fprintf (dump_file, "    operand can throw externally\n");
284 		local->can_throw = true;
285 	      }
286 	  }
287     }
288 
289   /* The const and pure flags are set by a variety of places in the
290      compiler (including here).  If someone has already set the flags
291      for the callee, (such as for some of the builtins) we will use
292      them, otherwise we will compute our own information.
293 
294      Const and pure functions have less clobber effects than other
295      functions so we process these first.  Otherwise if it is a call
296      outside the compilation unit or an indirect call we punt.  This
297      leaves local calls which will be processed by following the call
298      graph.  */
299   if (callee_t)
300     {
301       /* When bad things happen to bad functions, they cannot be const
302 	 or pure.  */
303       if (setjmp_call_p (callee_t))
304 	{
305 	  if (dump_file)
306 	    fprintf (dump_file, "    setjmp is not const/pure\n");
307           local->looping = true;
308 	  local->pure_const_state = IPA_NEITHER;
309 	}
310 
311       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
312 	switch (DECL_FUNCTION_CODE (callee_t))
313 	  {
314 	  case BUILT_IN_LONGJMP:
315 	  case BUILT_IN_NONLOCAL_GOTO:
316 	    if (dump_file)
317 	      fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
318 	    local->pure_const_state = IPA_NEITHER;
319             local->looping = true;
320 	    break;
321 	  default:
322 	    break;
323 	  }
324     }
325 
326   /* When not in IPA mode, we can still handle self recursion.  */
327   if (!ipa && callee_t == current_function_decl)
328     local->looping = true;
329   /* Either calle is unknown or we are doing local analysis.
330      Look to see if there are any bits available for the callee (such as by
331      declaration or because it is builtin) and process solely on the basis of
332      those bits. */
333   else if (!ipa || !callee_t)
334     {
335       if (possibly_throws && flag_non_call_exceptions)
336         {
337 	  if (dump_file)
338 	    fprintf (dump_file, "    can throw; looping\n");
339           local->looping = true;
340 	}
341       if (possibly_throws_externally)
342         {
343 	  if (dump_file)
344 	    {
345 	      fprintf (dump_file, "    can throw externally to lp %i\n",
346 	      	       lookup_stmt_eh_lp (call));
347 	      if (callee_t)
348 		fprintf (dump_file, "     callee:%s\n",
349 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
350 	    }
351           local->can_throw = true;
352 	}
353       if (flags & ECF_CONST)
354 	{
355           if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
356             local->looping = true;
357 	 }
358       else if (flags & ECF_PURE)
359 	{
360           if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
361             local->looping = true;
362 	  if (dump_file)
363 	    fprintf (dump_file, "    pure function call in not const\n");
364 	  if (local->pure_const_state == IPA_CONST)
365 	    local->pure_const_state = IPA_PURE;
366 	}
367       else
368 	{
369 	  if (dump_file)
370 	    fprintf (dump_file, "    uknown function call is not const/pure\n");
371 	  local->pure_const_state = IPA_NEITHER;
372           local->looping = true;
373 	}
374     }
375   /* Direct functions calls are handled by IPA propagation.  */
376 }
377 
378 /* Wrapper around check_decl for loads.  */
379 
380 static bool
381 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
382 {
383   if (DECL_P (op))
384     check_decl ((funct_state)data, op, false);
385   else
386     check_op ((funct_state)data, op, false);
387   return false;
388 }
389 
390 /* Wrapper around check_decl for stores.  */
391 
392 static bool
393 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
394 {
395   if (DECL_P (op))
396     check_decl ((funct_state)data, op, true);
397   else
398     check_op ((funct_state)data, op, true);
399   return false;
400 }
401 
402 /* Look into pointer pointed to by GSIP and figure out what interesting side
403    effects it has.  */
404 static void
405 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
406 {
407   gimple stmt = gsi_stmt (*gsip);
408   unsigned int i = 0;
409 
410   if (is_gimple_debug (stmt))
411     return;
412 
413   if (dump_file)
414     {
415       fprintf (dump_file, "  scanning: ");
416       print_gimple_stmt (dump_file, stmt, 0, 0);
417     }
418 
419   if (gimple_has_volatile_ops (stmt))
420     {
421       local->pure_const_state = IPA_NEITHER;
422       if (dump_file)
423 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
424     }
425 
426   /* Look for loads and stores.  */
427   walk_stmt_load_store_ops (stmt, local, check_load, check_store);
428 
429   if (gimple_code (stmt) != GIMPLE_CALL
430       && stmt_could_throw_p (stmt))
431     {
432       if (flag_non_call_exceptions)
433 	{
434 	  if (dump_file)
435 	    fprintf (dump_file, "    can throw; looping");
436 	  local->looping = true;
437 	}
438       if (stmt_can_throw_external (stmt))
439 	{
440 	  if (dump_file)
441 	    fprintf (dump_file, "    can throw externally");
442 	  local->can_throw = true;
443 	}
444     }
445   switch (gimple_code (stmt))
446     {
447     case GIMPLE_CALL:
448       check_call (local, stmt, ipa);
449       break;
450     case GIMPLE_LABEL:
451       if (DECL_NONLOCAL (gimple_label_label (stmt)))
452 	/* Target of long jump. */
453 	{
454           if (dump_file)
455             fprintf (dump_file, "    nonlocal label is not const/pure");
456 	  local->pure_const_state = IPA_NEITHER;
457 	}
458       break;
459     case GIMPLE_ASM:
460       for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
461 	{
462 	  tree op = gimple_asm_clobber_op (stmt, i);
463 	  if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
464 	    {
465               if (dump_file)
466                 fprintf (dump_file, "    memory asm clobber is not const/pure");
467 	      /* Abandon all hope, ye who enter here. */
468 	      local->pure_const_state = IPA_NEITHER;
469 	    }
470 	}
471       if (gimple_asm_volatile_p (stmt))
472 	{
473 	  if (dump_file)
474 	    fprintf (dump_file, "    volatile is not const/pure");
475 	  /* Abandon all hope, ye who enter here. */
476 	  local->pure_const_state = IPA_NEITHER;
477           local->looping = true;
478 	}
479       return;
480     default:
481       break;
482     }
483 }
484 
485 
486 /* This is the main routine for finding the reference patterns for
487    global variables within a function FN.  */
488 
489 static funct_state
490 analyze_function (struct cgraph_node *fn, bool ipa)
491 {
492   tree decl = fn->decl;
493   tree old_decl = current_function_decl;
494   funct_state l;
495   basic_block this_block;
496 
497   l = XCNEW (struct funct_state_d);
498   l->pure_const_state = IPA_CONST;
499   l->state_previously_known = IPA_NEITHER;
500   l->looping_previously_known = true;
501   l->looping = false;
502   l->can_throw = false;
503 
504   if (dump_file)
505     {
506       fprintf (dump_file, "\n\n local analysis of %s\n ",
507 	       cgraph_node_name (fn));
508     }
509 
510   push_cfun (DECL_STRUCT_FUNCTION (decl));
511   current_function_decl = decl;
512 
513   FOR_EACH_BB (this_block)
514     {
515       gimple_stmt_iterator gsi;
516       struct walk_stmt_info wi;
517 
518       memset (&wi, 0, sizeof(wi));
519       for (gsi = gsi_start_bb (this_block);
520 	   !gsi_end_p (gsi);
521 	   gsi_next (&gsi))
522 	{
523 	  check_stmt (&gsi, l, ipa);
524 	  if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
525 	    goto end;
526 	}
527     }
528 
529 end:
530   if (l->pure_const_state != IPA_NEITHER)
531     {
532       /* Const functions cannot have back edges (an
533 	 indication of possible infinite loop side
534 	 effect.  */
535       if (mark_dfs_back_edges ())
536         {
537 	  /* Preheaders are needed for SCEV to work.
538 	     Simple lateches and recorded exits improve chances that loop will
539 	     proved to be finite in testcases such as in loop-15.c and loop-24.c  */
540 	  loop_optimizer_init (LOOPS_NORMAL
541 			       | LOOPS_HAVE_RECORDED_EXITS);
542 	  if (dump_file && (dump_flags & TDF_DETAILS))
543 	    flow_loops_dump (dump_file, NULL, 0);
544 	  if (mark_irreducible_loops ())
545 	    {
546 	      if (dump_file)
547 	        fprintf (dump_file, "    has irreducible loops\n");
548 	      l->looping = true;
549 	    }
550 	  else
551 	    {
552 	      loop_iterator li;
553 	      struct loop *loop;
554 	      scev_initialize ();
555 	      FOR_EACH_LOOP (li, loop, 0)
556 		if (!finite_loop_p (loop))
557 		  {
558 		    if (dump_file)
559 		      fprintf (dump_file, "    can not prove finiteness of loop %i\n", loop->num);
560 		    l->looping =true;
561 		    break;
562 		  }
563 	      scev_finalize ();
564 	    }
565           loop_optimizer_finalize ();
566 	}
567     }
568 
569   if (TREE_READONLY (decl))
570     {
571       l->pure_const_state = IPA_CONST;
572       l->state_previously_known = IPA_CONST;
573       if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
574         l->looping = false, l->looping_previously_known = false;
575     }
576   if (DECL_PURE_P (decl))
577     {
578       if (l->pure_const_state != IPA_CONST)
579         l->pure_const_state = IPA_PURE;
580       l->state_previously_known = IPA_PURE;
581       if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
582         l->looping = false, l->looping_previously_known = false;
583     }
584   if (TREE_NOTHROW (decl))
585     l->can_throw = false;
586 
587   pop_cfun ();
588   current_function_decl = old_decl;
589   if (dump_file)
590     {
591       if (l->looping)
592         fprintf (dump_file, "Function is locally looping.\n");
593       if (l->can_throw)
594         fprintf (dump_file, "Function is locally throwing.\n");
595       if (l->pure_const_state == IPA_CONST)
596         fprintf (dump_file, "Function is locally const.\n");
597       if (l->pure_const_state == IPA_PURE)
598         fprintf (dump_file, "Function is locally pure.\n");
599     }
600   return l;
601 }
602 
603 /* Called when new function is inserted to callgraph late.  */
604 static void
605 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
606 {
607  if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
608    return;
609   /* There are some shared nodes, in particular the initializers on
610      static declarations.  We do not need to scan them more than once
611      since all we would be interested in are the addressof
612      operations.  */
613   visited_nodes = pointer_set_create ();
614   set_function_state (node, analyze_function (node, true));
615   pointer_set_destroy (visited_nodes);
616   visited_nodes = NULL;
617 }
618 
619 /* Called when new clone is inserted to callgraph late.  */
620 
621 static void
622 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
623 	 	     void *data ATTRIBUTE_UNUSED)
624 {
625   if (get_function_state (src))
626     {
627       funct_state l = XNEW (struct funct_state_d);
628       gcc_assert (!get_function_state (dst));
629       memcpy (l, get_function_state (src), sizeof (*l));
630       set_function_state (dst, l);
631     }
632 }
633 
634 /* Called when new clone is inserted to callgraph late.  */
635 
636 static void
637 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
638 {
639   if (get_function_state (node))
640     {
641       free (get_function_state (node));
642       set_function_state (node, NULL);
643     }
644 }
645 
646 
647 static void
648 register_hooks (void)
649 {
650   static bool init_p = false;
651 
652   if (init_p)
653     return;
654 
655   init_p = true;
656 
657   node_removal_hook_holder =
658       cgraph_add_node_removal_hook (&remove_node_data, NULL);
659   node_duplication_hook_holder =
660       cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
661   function_insertion_hook_holder =
662       cgraph_add_function_insertion_hook (&add_new_function, NULL);
663 }
664 
665 
666 /* Analyze each function in the cgraph to see if it is locally PURE or
667    CONST.  */
668 
669 static void
670 generate_summary (void)
671 {
672   struct cgraph_node *node;
673 
674   register_hooks ();
675 
676   /* There are some shared nodes, in particular the initializers on
677      static declarations.  We do not need to scan them more than once
678      since all we would be interested in are the addressof
679      operations.  */
680   visited_nodes = pointer_set_create ();
681 
682   /* Process all of the functions.
683 
684      We process AVAIL_OVERWRITABLE functions.  We can not use the results
685      by default, but the info can be used at LTO with -fwhole-program or
686      when function got clonned and the clone is AVAILABLE.  */
687 
688   for (node = cgraph_nodes; node; node = node->next)
689     if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
690       set_function_state (node, analyze_function (node, true));
691 
692   pointer_set_destroy (visited_nodes);
693   visited_nodes = NULL;
694 }
695 
696 
697 /* Serialize the ipa info for lto.  */
698 
699 static void
700 pure_const_write_summary (cgraph_node_set set)
701 {
702   struct cgraph_node *node;
703   struct lto_simple_output_block *ob
704     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
705   unsigned int count = 0;
706   cgraph_node_set_iterator csi;
707 
708   for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
709     {
710       node = csi_node (csi);
711       if (node->analyzed && get_function_state (node) != NULL)
712 	count++;
713     }
714 
715   lto_output_uleb128_stream (ob->main_stream, count);
716 
717   /* Process all of the functions.  */
718   for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
719     {
720       node = csi_node (csi);
721       if (node->analyzed && get_function_state (node) != NULL)
722 	{
723 	  struct bitpack_d *bp;
724 	  funct_state fs;
725 	  int node_ref;
726 	  lto_cgraph_encoder_t encoder;
727 
728 	  fs = get_function_state (node);
729 
730 	  encoder = ob->decl_state->cgraph_node_encoder;
731 	  node_ref = lto_cgraph_encoder_encode (encoder, node);
732 	  lto_output_uleb128_stream (ob->main_stream, node_ref);
733 
734 	  /* Note that flags will need to be read in the opposite
735 	     order as we are pushing the bitflags into FLAGS.  */
736 	  bp = bitpack_create ();
737 	  bp_pack_value (bp, fs->pure_const_state, 2);
738 	  bp_pack_value (bp, fs->state_previously_known, 2);
739 	  bp_pack_value (bp, fs->looping_previously_known, 1);
740 	  bp_pack_value (bp, fs->looping, 1);
741 	  bp_pack_value (bp, fs->can_throw, 1);
742 	  lto_output_bitpack (ob->main_stream, bp);
743 	  bitpack_delete (bp);
744 	}
745     }
746 
747   lto_destroy_simple_output_block (ob);
748 }
749 
750 
751 /* Deserialize the ipa info for lto.  */
752 
753 static void
754 pure_const_read_summary (void)
755 {
756   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
757   struct lto_file_decl_data *file_data;
758   unsigned int j = 0;
759 
760   register_hooks ();
761   while ((file_data = file_data_vec[j++]))
762     {
763       const char *data;
764       size_t len;
765       struct lto_input_block *ib
766 	= lto_create_simple_input_block (file_data,
767 					 LTO_section_ipa_pure_const,
768 					 &data, &len);
769       if (ib)
770 	{
771 	  unsigned int i;
772 	  unsigned int count = lto_input_uleb128 (ib);
773 
774 	  for (i = 0; i < count; i++)
775 	    {
776 	      unsigned int index;
777 	      struct cgraph_node *node;
778 	      struct bitpack_d *bp;
779 	      funct_state fs;
780 	      lto_cgraph_encoder_t encoder;
781 
782 	      fs = XCNEW (struct funct_state_d);
783 	      index = lto_input_uleb128 (ib);
784 	      encoder = file_data->cgraph_node_encoder;
785 	      node = lto_cgraph_encoder_deref (encoder, index);
786 	      set_function_state (node, fs);
787 
788 	      /* Note that the flags must be read in the opposite
789 		 order in which they were written (the bitflags were
790 		 pushed into FLAGS).  */
791 	      bp = lto_input_bitpack (ib);
792 	      fs->pure_const_state
793 			= (enum pure_const_state_e) bp_unpack_value (bp, 2);
794 	      fs->state_previously_known
795 			= (enum pure_const_state_e) bp_unpack_value (bp, 2);
796 	      fs->looping_previously_known = bp_unpack_value (bp, 1);
797 	      fs->looping = bp_unpack_value (bp, 1);
798 	      fs->can_throw = bp_unpack_value (bp, 1);
799 	      bitpack_delete (bp);
800 	    }
801 
802 	  lto_destroy_simple_input_block (file_data,
803 					  LTO_section_ipa_pure_const,
804 					  ib, data, len);
805 	}
806     }
807 }
808 
809 
810 static bool
811 ignore_edge (struct cgraph_edge *e)
812 {
813   return (!e->can_throw_external);
814 }
815 
816 /* Return true if NODE is self recursive function.  */
817 
818 static bool
819 self_recursive_p (struct cgraph_node *node)
820 {
821   struct cgraph_edge *e;
822   for (e = node->callees; e; e = e->next_callee)
823     if (e->callee == node)
824       return true;
825   return false;
826 }
827 
828 /* Produce the global information by preforming a transitive closure
829    on the local information that was produced by generate_summary.
830    Note that there is no function_transform pass since this only
831    updates the function_decl.  */
832 
833 static unsigned int
834 propagate (void)
835 {
836   struct cgraph_node *node;
837   struct cgraph_node *w;
838   struct cgraph_node **order =
839     XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
840   int order_pos;
841   int i;
842   struct ipa_dfs_info * w_info;
843 
844   cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
845   cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
846   cgraph_remove_node_removal_hook (node_removal_hook_holder);
847   order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
848   if (dump_file)
849     {
850       dump_cgraph (dump_file);
851       ipa_utils_print_order(dump_file, "reduced", order, order_pos);
852     }
853 
854   /* Propagate the local information thru the call graph to produce
855      the global information.  All the nodes within a cycle will have
856      the same info so we collapse cycles first.  Then we can do the
857      propagation in one pass from the leaves to the roots.  */
858   for (i = 0; i < order_pos; i++ )
859     {
860       enum pure_const_state_e pure_const_state = IPA_CONST;
861       bool looping = false;
862       int count = 0;
863       node = order[i];
864 
865       /* Find the worst state for any node in the cycle.  */
866       w = node;
867       while (w)
868 	{
869 	  struct cgraph_edge *e;
870 	  funct_state w_l = get_function_state (w);
871 	  if (pure_const_state < w_l->pure_const_state)
872 	    pure_const_state = w_l->pure_const_state;
873 
874 	  if (w_l->looping)
875 	    looping = true;
876 	  if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
877 	    {
878 	      looping |= w_l->looping_previously_known;
879 	      if (pure_const_state < w_l->state_previously_known)
880 	        pure_const_state = w_l->state_previously_known;
881 	    }
882 
883 	  if (pure_const_state == IPA_NEITHER)
884 	    break;
885 
886 	  count++;
887 
888 	  if (count > 1)
889 	    looping = true;
890 
891 	  for (e = w->callees; e; e = e->next_callee)
892 	    {
893 	      struct cgraph_node *y = e->callee;
894 
895 	      if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
896 		{
897 		  funct_state y_l = get_function_state (y);
898 		  if (pure_const_state < y_l->pure_const_state)
899 		    pure_const_state = y_l->pure_const_state;
900 		  if (pure_const_state == IPA_NEITHER)
901 		    break;
902 		  if (y_l->looping)
903 		    looping = true;
904 		}
905 	      else
906 	        {
907 		  int flags = flags_from_decl_or_type (y->decl);
908 
909 		  if (flags & ECF_LOOPING_CONST_OR_PURE)
910 		    looping = true;
911 		  if (flags & ECF_CONST)
912 		    ;
913 		  else if ((flags & ECF_PURE) && pure_const_state == IPA_CONST)
914 		    pure_const_state = IPA_PURE;
915 		  else
916 		    pure_const_state = IPA_NEITHER, looping = true;
917 
918 		}
919 	    }
920 	  w_info = (struct ipa_dfs_info *) w->aux;
921 	  w = w_info->next_cycle;
922 	}
923 
924       /* Copy back the region's pure_const_state which is shared by
925 	 all nodes in the region.  */
926       w = node;
927       while (w)
928 	{
929 	  funct_state w_l = get_function_state (w);
930 	  enum pure_const_state_e this_state = pure_const_state;
931 	  bool this_looping = looping;
932 
933 	  if (w_l->state_previously_known != IPA_NEITHER
934 	      && this_state > w_l->state_previously_known)
935             this_state = w_l->state_previously_known;
936 	  if (!this_looping && self_recursive_p (w))
937 	    this_looping = true;
938 	  if (!w_l->looping_previously_known)
939 	    this_looping = false;
940 
941 	  /* All nodes within a cycle share the same info.  */
942 	  w_l->pure_const_state = this_state;
943 	  w_l->looping = this_looping;
944 
945 	  switch (this_state)
946 	    {
947 	    case IPA_CONST:
948 	      if (!TREE_READONLY (w->decl) && dump_file)
949 		fprintf (dump_file, "Function found to be %sconst: %s\n",
950 			 this_looping ? "looping " : "",
951 			 cgraph_node_name (w));
952 	      cgraph_set_readonly_flag (w, true);
953 	      cgraph_set_looping_const_or_pure_flag (w, this_looping);
954 	      break;
955 
956 	    case IPA_PURE:
957 	      if (!DECL_PURE_P (w->decl) && dump_file)
958 		fprintf (dump_file, "Function found to be %spure: %s\n",
959 			 this_looping ? "looping " : "",
960 			 cgraph_node_name (w));
961 	      cgraph_set_pure_flag (w, true);
962 	      cgraph_set_looping_const_or_pure_flag (w, this_looping);
963 	      break;
964 
965 	    default:
966 	      break;
967 	    }
968 	  w_info = (struct ipa_dfs_info *) w->aux;
969 	  w = w_info->next_cycle;
970 	}
971     }
972 
973   /* Cleanup. */
974   for (node = cgraph_nodes; node; node = node->next)
975     {
976       /* Get rid of the aux information.  */
977       if (node->aux)
978 	{
979 	  w_info = (struct ipa_dfs_info *) node->aux;
980 	  free (node->aux);
981 	  node->aux = NULL;
982 	}
983     }
984   order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
985   if (dump_file)
986     {
987       dump_cgraph (dump_file);
988       ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
989     }
990   /* Propagate the local information thru the call graph to produce
991      the global information.  All the nodes within a cycle will have
992      the same info so we collapse cycles first.  Then we can do the
993      propagation in one pass from the leaves to the roots.  */
994   for (i = 0; i < order_pos; i++ )
995     {
996       bool can_throw = false;
997       node = order[i];
998 
999       /* Find the worst state for any node in the cycle.  */
1000       w = node;
1001       while (w)
1002 	{
1003 	  struct cgraph_edge *e;
1004 	  funct_state w_l = get_function_state (w);
1005 
1006 	  if (w_l->can_throw
1007 	      || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1008 	    can_throw = true;
1009 
1010 	  if (can_throw)
1011 	    break;
1012 
1013 	  for (e = w->callees; e; e = e->next_callee)
1014 	    {
1015 	      struct cgraph_node *y = e->callee;
1016 
1017 	      if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1018 		{
1019 		  funct_state y_l = get_function_state (y);
1020 
1021 		  if (can_throw)
1022 		    break;
1023 		  if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1024 		      && e->can_throw_external)
1025 		    can_throw = true;
1026 		}
1027 	      else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1028 	        can_throw = true;
1029 	    }
1030 	  w_info = (struct ipa_dfs_info *) w->aux;
1031 	  w = w_info->next_cycle;
1032 	}
1033 
1034       /* Copy back the region's pure_const_state which is shared by
1035 	 all nodes in the region.  */
1036       w = node;
1037       while (w)
1038 	{
1039 	  funct_state w_l = get_function_state (w);
1040 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1041 	    {
1042 	      struct cgraph_edge *e;
1043 	      cgraph_set_nothrow_flag (w, true);
1044 	      for (e = w->callers; e; e = e->next_caller)
1045 	        e->can_throw_external = false;
1046 	      if (dump_file)
1047 		fprintf (dump_file, "Function found to be nothrow: %s\n",
1048 			 cgraph_node_name (w));
1049 	    }
1050 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1051 	    w_l->can_throw = true;
1052 	  w_info = (struct ipa_dfs_info *) w->aux;
1053 	  w = w_info->next_cycle;
1054 	}
1055     }
1056 
1057   /* Cleanup. */
1058   for (node = cgraph_nodes; node; node = node->next)
1059     {
1060       /* Get rid of the aux information.  */
1061       if (node->aux)
1062 	{
1063 	  w_info = (struct ipa_dfs_info *) node->aux;
1064 	  free (node->aux);
1065 	  node->aux = NULL;
1066 	}
1067       if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
1068 	free (get_function_state (node));
1069     }
1070 
1071   free (order);
1072   VEC_free (funct_state, heap, funct_state_vec);
1073   finish_state ();
1074   return 0;
1075 }
1076 
1077 static bool
1078 gate_pure_const (void)
1079 {
1080   return (flag_ipa_pure_const
1081 	  /* Don't bother doing anything if the program has errors.  */
1082 	  && !(errorcount || sorrycount));
1083 }
1084 
1085 struct ipa_opt_pass_d pass_ipa_pure_const =
1086 {
1087  {
1088   IPA_PASS,
1089   "pure-const",		                /* name */
1090   gate_pure_const,			/* gate */
1091   propagate,			        /* execute */
1092   NULL,					/* sub */
1093   NULL,					/* next */
1094   0,					/* static_pass_number */
1095   TV_IPA_PURE_CONST,		        /* tv_id */
1096   0,	                                /* properties_required */
1097   0,					/* properties_provided */
1098   0,					/* properties_destroyed */
1099   0,					/* todo_flags_start */
1100   0                                     /* todo_flags_finish */
1101  },
1102  generate_summary,		        /* generate_summary */
1103  pure_const_write_summary,		/* write_summary */
1104  pure_const_read_summary,		/* read_summary */
1105  NULL,					/* function_read_summary */
1106  NULL,					/* stmt_fixup */
1107  0,					/* TODOs */
1108  NULL,			                /* function_transform */
1109  NULL					/* variable_transform */
1110 };
1111 
1112 /* Simple local pass for pure const discovery reusing the analysis from
1113    ipa_pure_const.   This pass is effective when executed together with
1114    other optimization passes in early optimization pass queue.  */
1115 
1116 static unsigned int
1117 local_pure_const (void)
1118 {
1119   bool changed = false;
1120   funct_state l;
1121   struct cgraph_node *node;
1122 
1123   /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1124      we must not promote functions that are called by already processed functions.  */
1125 
1126   if (function_called_by_processed_nodes_p ())
1127     {
1128       if (dump_file)
1129         fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1130       return 0;
1131     }
1132   node = cgraph_node (current_function_decl);
1133   if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1134     {
1135       if (dump_file)
1136         fprintf (dump_file, "Function has wrong visibility; ignoring\n");
1137       return 0;
1138     }
1139 
1140   l = analyze_function (node, false);
1141 
1142   switch (l->pure_const_state)
1143     {
1144     case IPA_CONST:
1145       if (!TREE_READONLY (current_function_decl))
1146 	{
1147 	  cgraph_set_readonly_flag (node, true);
1148 	  cgraph_set_looping_const_or_pure_flag (node, l->looping);
1149 	  changed = true;
1150 	  if (dump_file)
1151 	    fprintf (dump_file, "Function found to be %sconst: %s\n",
1152 		     l->looping ? "looping " : "",
1153 		     lang_hooks.decl_printable_name (current_function_decl,
1154 						     2));
1155 	}
1156       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1157 	       && !l->looping)
1158 	{
1159 	  cgraph_set_looping_const_or_pure_flag (node, false);
1160 	  changed = true;
1161 	  if (dump_file)
1162 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
1163 		     lang_hooks.decl_printable_name (current_function_decl,
1164 						     2));
1165 	}
1166       break;
1167 
1168     case IPA_PURE:
1169       if (!TREE_READONLY (current_function_decl))
1170 	{
1171 	  cgraph_set_pure_flag (node, true);
1172 	  cgraph_set_looping_const_or_pure_flag (node, l->looping);
1173 	  changed = true;
1174 	  if (dump_file)
1175 	    fprintf (dump_file, "Function found to be %spure: %s\n",
1176 		     l->looping ? "looping " : "",
1177 		     lang_hooks.decl_printable_name (current_function_decl,
1178 						     2));
1179 	}
1180       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1181 	       && !l->looping)
1182 	{
1183 	  cgraph_set_looping_const_or_pure_flag (node, false);
1184 	  changed = true;
1185 	  if (dump_file)
1186 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
1187 		     lang_hooks.decl_printable_name (current_function_decl,
1188 						     2));
1189 	}
1190       break;
1191 
1192     default:
1193       break;
1194     }
1195   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1196     {
1197       struct cgraph_edge *e;
1198 
1199       cgraph_set_nothrow_flag (node, true);
1200       for (e = node->callers; e; e = e->next_caller)
1201 	e->can_throw_external = false;
1202       changed = true;
1203       if (dump_file)
1204 	fprintf (dump_file, "Function found to be nothrow: %s\n",
1205 		 lang_hooks.decl_printable_name (current_function_decl,
1206 						 2));
1207     }
1208   if (l)
1209     free (l);
1210   if (changed)
1211     return execute_fixup_cfg ();
1212   else
1213     return 0;
1214 }
1215 
1216 struct gimple_opt_pass pass_local_pure_const =
1217 {
1218  {
1219   GIMPLE_PASS,
1220   "local-pure-const",	                /* name */
1221   gate_pure_const,			/* gate */
1222   local_pure_const,		        /* execute */
1223   NULL,					/* sub */
1224   NULL,					/* next */
1225   0,					/* static_pass_number */
1226   TV_IPA_PURE_CONST,		        /* tv_id */
1227   0,	                                /* properties_required */
1228   0,					/* properties_provided */
1229   0,					/* properties_destroyed */
1230   0,					/* todo_flags_start */
1231   0                                     /* todo_flags_finish */
1232  }
1233 };
1234