xref: /dflybsd-src/contrib/gcc-4.7/gcc/store-motion.c (revision 04febcfb30580676d3e95f58a16c5137ee478b32)
1*e4b17023SJohn Marino /* Store motion via Lazy Code Motion on the reverse CFG.
2*e4b17023SJohn Marino    Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3*e4b17023SJohn Marino    2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4*e4b17023SJohn Marino 
5*e4b17023SJohn Marino This file is part of GCC.
6*e4b17023SJohn Marino 
7*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under
8*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free
9*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later
10*e4b17023SJohn Marino version.
11*e4b17023SJohn Marino 
12*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or
14*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15*e4b17023SJohn Marino for more details.
16*e4b17023SJohn Marino 
17*e4b17023SJohn Marino You should have received a copy of the GNU General Public License
18*e4b17023SJohn Marino along with GCC; see the file COPYING3.  If not see
19*e4b17023SJohn Marino <http://www.gnu.org/licenses/>.  */
20*e4b17023SJohn Marino 
21*e4b17023SJohn Marino #include "config.h"
22*e4b17023SJohn Marino #include "system.h"
23*e4b17023SJohn Marino #include "coretypes.h"
24*e4b17023SJohn Marino #include "tm.h"
25*e4b17023SJohn Marino #include "diagnostic-core.h"
26*e4b17023SJohn Marino #include "toplev.h"
27*e4b17023SJohn Marino 
28*e4b17023SJohn Marino #include "rtl.h"
29*e4b17023SJohn Marino #include "tree.h"
30*e4b17023SJohn Marino #include "tm_p.h"
31*e4b17023SJohn Marino #include "regs.h"
32*e4b17023SJohn Marino #include "hard-reg-set.h"
33*e4b17023SJohn Marino #include "flags.h"
34*e4b17023SJohn Marino #include "insn-config.h"
35*e4b17023SJohn Marino #include "recog.h"
36*e4b17023SJohn Marino #include "basic-block.h"
37*e4b17023SJohn Marino #include "output.h"
38*e4b17023SJohn Marino #include "function.h"
39*e4b17023SJohn Marino #include "expr.h"
40*e4b17023SJohn Marino #include "except.h"
41*e4b17023SJohn Marino #include "ggc.h"
42*e4b17023SJohn Marino #include "intl.h"
43*e4b17023SJohn Marino #include "timevar.h"
44*e4b17023SJohn Marino #include "tree-pass.h"
45*e4b17023SJohn Marino #include "hashtab.h"
46*e4b17023SJohn Marino #include "df.h"
47*e4b17023SJohn Marino #include "dbgcnt.h"
48*e4b17023SJohn Marino 
49*e4b17023SJohn Marino /* This pass implements downward store motion.
50*e4b17023SJohn Marino    As of May 1, 2009, the pass is not enabled by default on any target,
51*e4b17023SJohn Marino    but bootstrap completes on ia64 and x86_64 with the pass enabled.  */
52*e4b17023SJohn Marino 
53*e4b17023SJohn Marino /* TODO:
54*e4b17023SJohn Marino    - remove_reachable_equiv_notes is an incomprehensible pile of goo and
55*e4b17023SJohn Marino      a compile time hog that needs a rewrite (maybe cache st_exprs to
56*e4b17023SJohn Marino      invalidate REG_EQUAL/REG_EQUIV notes for?).
57*e4b17023SJohn Marino    - pattern_regs in st_expr should be a regset (on its own obstack).
58*e4b17023SJohn Marino    - antic_stores and avail_stores should be VECs instead of lists.
59*e4b17023SJohn Marino    - store_motion_mems should be a VEC instead of a list.
60*e4b17023SJohn Marino    - there should be an alloc pool for struct st_expr objects.
61*e4b17023SJohn Marino    - investigate whether it is helpful to make the address of an st_expr
62*e4b17023SJohn Marino      a cselib VALUE.
63*e4b17023SJohn Marino    - when GIMPLE alias information is exported, the effectiveness of this
64*e4b17023SJohn Marino      pass should be re-evaluated.
65*e4b17023SJohn Marino */
66*e4b17023SJohn Marino 
67*e4b17023SJohn Marino /* This is a list of store expressions (MEMs).  The structure is used
68*e4b17023SJohn Marino    as an expression table to track stores which look interesting, and
69*e4b17023SJohn Marino    might be moveable towards the exit block.  */
70*e4b17023SJohn Marino 
71*e4b17023SJohn Marino struct st_expr
72*e4b17023SJohn Marino {
73*e4b17023SJohn Marino   /* Pattern of this mem.  */
74*e4b17023SJohn Marino   rtx pattern;
75*e4b17023SJohn Marino   /* List of registers mentioned by the mem.  */
76*e4b17023SJohn Marino   rtx pattern_regs;
77*e4b17023SJohn Marino   /* INSN list of stores that are locally anticipatable.  */
78*e4b17023SJohn Marino   rtx antic_stores;
79*e4b17023SJohn Marino   /* INSN list of stores that are locally available.  */
80*e4b17023SJohn Marino   rtx avail_stores;
81*e4b17023SJohn Marino   /* Next in the list.  */
82*e4b17023SJohn Marino   struct st_expr * next;
83*e4b17023SJohn Marino   /* Store ID in the dataflow bitmaps.  */
84*e4b17023SJohn Marino   int index;
85*e4b17023SJohn Marino   /* Hash value for the hash table.  */
86*e4b17023SJohn Marino   unsigned int hash_index;
87*e4b17023SJohn Marino   /* Register holding the stored expression when a store is moved.
88*e4b17023SJohn Marino      This field is also used as a cache in find_moveable_store, see
89*e4b17023SJohn Marino      LAST_AVAIL_CHECK_FAILURE below.  */
90*e4b17023SJohn Marino   rtx reaching_reg;
91*e4b17023SJohn Marino };
92*e4b17023SJohn Marino 
93*e4b17023SJohn Marino /* Head of the list of load/store memory refs.  */
94*e4b17023SJohn Marino static struct st_expr * store_motion_mems = NULL;
95*e4b17023SJohn Marino 
96*e4b17023SJohn Marino /* Hashtable for the load/store memory refs.  */
97*e4b17023SJohn Marino static htab_t store_motion_mems_table = NULL;
98*e4b17023SJohn Marino 
99*e4b17023SJohn Marino /* These bitmaps will hold the local dataflow properties per basic block.  */
100*e4b17023SJohn Marino static sbitmap *st_kill, *st_avloc, *st_antloc, *st_transp;
101*e4b17023SJohn Marino 
102*e4b17023SJohn Marino /* Nonzero for expressions which should be inserted on a specific edge.  */
103*e4b17023SJohn Marino static sbitmap *st_insert_map;
104*e4b17023SJohn Marino 
105*e4b17023SJohn Marino /* Nonzero for expressions which should be deleted in a specific block.  */
106*e4b17023SJohn Marino static sbitmap *st_delete_map;
107*e4b17023SJohn Marino 
108*e4b17023SJohn Marino /* Global holding the number of store expressions we are dealing with.  */
109*e4b17023SJohn Marino static int num_stores;
110*e4b17023SJohn Marino 
111*e4b17023SJohn Marino /* Contains the edge_list returned by pre_edge_lcm.  */
112*e4b17023SJohn Marino static struct edge_list *edge_list;
113*e4b17023SJohn Marino 
114*e4b17023SJohn Marino static hashval_t
pre_st_expr_hash(const void * p)115*e4b17023SJohn Marino pre_st_expr_hash (const void *p)
116*e4b17023SJohn Marino {
117*e4b17023SJohn Marino   int do_not_record_p = 0;
118*e4b17023SJohn Marino   const struct st_expr *const x = (const struct st_expr *) p;
119*e4b17023SJohn Marino   return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
120*e4b17023SJohn Marino }
121*e4b17023SJohn Marino 
122*e4b17023SJohn Marino static int
pre_st_expr_eq(const void * p1,const void * p2)123*e4b17023SJohn Marino pre_st_expr_eq (const void *p1, const void *p2)
124*e4b17023SJohn Marino {
125*e4b17023SJohn Marino   const struct st_expr *const ptr1 = (const struct st_expr *) p1,
126*e4b17023SJohn Marino     *const ptr2 = (const struct st_expr *) p2;
127*e4b17023SJohn Marino   return exp_equiv_p (ptr1->pattern, ptr2->pattern, 0, true);
128*e4b17023SJohn Marino }
129*e4b17023SJohn Marino 
130*e4b17023SJohn Marino /* This will search the st_expr list for a matching expression. If it
131*e4b17023SJohn Marino    doesn't find one, we create one and initialize it.  */
132*e4b17023SJohn Marino 
133*e4b17023SJohn Marino static struct st_expr *
st_expr_entry(rtx x)134*e4b17023SJohn Marino st_expr_entry (rtx x)
135*e4b17023SJohn Marino {
136*e4b17023SJohn Marino   int do_not_record_p = 0;
137*e4b17023SJohn Marino   struct st_expr * ptr;
138*e4b17023SJohn Marino   unsigned int hash;
139*e4b17023SJohn Marino   void **slot;
140*e4b17023SJohn Marino   struct st_expr e;
141*e4b17023SJohn Marino 
142*e4b17023SJohn Marino   hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
143*e4b17023SJohn Marino 		   NULL,  /*have_reg_qty=*/false);
144*e4b17023SJohn Marino 
145*e4b17023SJohn Marino   e.pattern = x;
146*e4b17023SJohn Marino   slot = htab_find_slot_with_hash (store_motion_mems_table, &e, hash, INSERT);
147*e4b17023SJohn Marino   if (*slot)
148*e4b17023SJohn Marino     return (struct st_expr *)*slot;
149*e4b17023SJohn Marino 
150*e4b17023SJohn Marino   ptr = XNEW (struct st_expr);
151*e4b17023SJohn Marino 
152*e4b17023SJohn Marino   ptr->next         = store_motion_mems;
153*e4b17023SJohn Marino   ptr->pattern      = x;
154*e4b17023SJohn Marino   ptr->pattern_regs = NULL_RTX;
155*e4b17023SJohn Marino   ptr->antic_stores = NULL_RTX;
156*e4b17023SJohn Marino   ptr->avail_stores = NULL_RTX;
157*e4b17023SJohn Marino   ptr->reaching_reg = NULL_RTX;
158*e4b17023SJohn Marino   ptr->index        = 0;
159*e4b17023SJohn Marino   ptr->hash_index   = hash;
160*e4b17023SJohn Marino   store_motion_mems = ptr;
161*e4b17023SJohn Marino   *slot = ptr;
162*e4b17023SJohn Marino 
163*e4b17023SJohn Marino   return ptr;
164*e4b17023SJohn Marino }
165*e4b17023SJohn Marino 
166*e4b17023SJohn Marino /* Free up an individual st_expr entry.  */
167*e4b17023SJohn Marino 
168*e4b17023SJohn Marino static void
free_st_expr_entry(struct st_expr * ptr)169*e4b17023SJohn Marino free_st_expr_entry (struct st_expr * ptr)
170*e4b17023SJohn Marino {
171*e4b17023SJohn Marino   free_INSN_LIST_list (& ptr->antic_stores);
172*e4b17023SJohn Marino   free_INSN_LIST_list (& ptr->avail_stores);
173*e4b17023SJohn Marino 
174*e4b17023SJohn Marino   free (ptr);
175*e4b17023SJohn Marino }
176*e4b17023SJohn Marino 
177*e4b17023SJohn Marino /* Free up all memory associated with the st_expr list.  */
178*e4b17023SJohn Marino 
179*e4b17023SJohn Marino static void
free_store_motion_mems(void)180*e4b17023SJohn Marino free_store_motion_mems (void)
181*e4b17023SJohn Marino {
182*e4b17023SJohn Marino   if (store_motion_mems_table)
183*e4b17023SJohn Marino     htab_delete (store_motion_mems_table);
184*e4b17023SJohn Marino   store_motion_mems_table = NULL;
185*e4b17023SJohn Marino 
186*e4b17023SJohn Marino   while (store_motion_mems)
187*e4b17023SJohn Marino     {
188*e4b17023SJohn Marino       struct st_expr * tmp = store_motion_mems;
189*e4b17023SJohn Marino       store_motion_mems = store_motion_mems->next;
190*e4b17023SJohn Marino       free_st_expr_entry (tmp);
191*e4b17023SJohn Marino     }
192*e4b17023SJohn Marino   store_motion_mems = NULL;
193*e4b17023SJohn Marino }
194*e4b17023SJohn Marino 
195*e4b17023SJohn Marino /* Assign each element of the list of mems a monotonically increasing value.  */
196*e4b17023SJohn Marino 
197*e4b17023SJohn Marino static int
enumerate_store_motion_mems(void)198*e4b17023SJohn Marino enumerate_store_motion_mems (void)
199*e4b17023SJohn Marino {
200*e4b17023SJohn Marino   struct st_expr * ptr;
201*e4b17023SJohn Marino   int n = 0;
202*e4b17023SJohn Marino 
203*e4b17023SJohn Marino   for (ptr = store_motion_mems; ptr != NULL; ptr = ptr->next)
204*e4b17023SJohn Marino     ptr->index = n++;
205*e4b17023SJohn Marino 
206*e4b17023SJohn Marino   return n;
207*e4b17023SJohn Marino }
208*e4b17023SJohn Marino 
209*e4b17023SJohn Marino /* Return first item in the list.  */
210*e4b17023SJohn Marino 
211*e4b17023SJohn Marino static inline struct st_expr *
first_st_expr(void)212*e4b17023SJohn Marino first_st_expr (void)
213*e4b17023SJohn Marino {
214*e4b17023SJohn Marino   return store_motion_mems;
215*e4b17023SJohn Marino }
216*e4b17023SJohn Marino 
217*e4b17023SJohn Marino /* Return the next item in the list after the specified one.  */
218*e4b17023SJohn Marino 
219*e4b17023SJohn Marino static inline struct st_expr *
next_st_expr(struct st_expr * ptr)220*e4b17023SJohn Marino next_st_expr (struct st_expr * ptr)
221*e4b17023SJohn Marino {
222*e4b17023SJohn Marino   return ptr->next;
223*e4b17023SJohn Marino }
224*e4b17023SJohn Marino 
225*e4b17023SJohn Marino /* Dump debugging info about the store_motion_mems list.  */
226*e4b17023SJohn Marino 
227*e4b17023SJohn Marino static void
print_store_motion_mems(FILE * file)228*e4b17023SJohn Marino print_store_motion_mems (FILE * file)
229*e4b17023SJohn Marino {
230*e4b17023SJohn Marino   struct st_expr * ptr;
231*e4b17023SJohn Marino 
232*e4b17023SJohn Marino   fprintf (dump_file, "STORE_MOTION list of MEM exprs considered:\n");
233*e4b17023SJohn Marino 
234*e4b17023SJohn Marino   for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
235*e4b17023SJohn Marino     {
236*e4b17023SJohn Marino       fprintf (file, "  Pattern (%3d): ", ptr->index);
237*e4b17023SJohn Marino 
238*e4b17023SJohn Marino       print_rtl (file, ptr->pattern);
239*e4b17023SJohn Marino 
240*e4b17023SJohn Marino       fprintf (file, "\n	 ANTIC stores : ");
241*e4b17023SJohn Marino 
242*e4b17023SJohn Marino       if (ptr->antic_stores)
243*e4b17023SJohn Marino 	print_rtl (file, ptr->antic_stores);
244*e4b17023SJohn Marino       else
245*e4b17023SJohn Marino 	fprintf (file, "(nil)");
246*e4b17023SJohn Marino 
247*e4b17023SJohn Marino       fprintf (file, "\n	 AVAIL stores : ");
248*e4b17023SJohn Marino 
249*e4b17023SJohn Marino       if (ptr->avail_stores)
250*e4b17023SJohn Marino 	print_rtl (file, ptr->avail_stores);
251*e4b17023SJohn Marino       else
252*e4b17023SJohn Marino 	fprintf (file, "(nil)");
253*e4b17023SJohn Marino 
254*e4b17023SJohn Marino       fprintf (file, "\n\n");
255*e4b17023SJohn Marino     }
256*e4b17023SJohn Marino 
257*e4b17023SJohn Marino   fprintf (file, "\n");
258*e4b17023SJohn Marino }
259*e4b17023SJohn Marino 
260*e4b17023SJohn Marino /* Return zero if some of the registers in list X are killed
261*e4b17023SJohn Marino    due to set of registers in bitmap REGS_SET.  */
262*e4b17023SJohn Marino 
263*e4b17023SJohn Marino static bool
store_ops_ok(const_rtx x,int * regs_set)264*e4b17023SJohn Marino store_ops_ok (const_rtx x, int *regs_set)
265*e4b17023SJohn Marino {
266*e4b17023SJohn Marino   const_rtx reg;
267*e4b17023SJohn Marino 
268*e4b17023SJohn Marino   for (; x; x = XEXP (x, 1))
269*e4b17023SJohn Marino     {
270*e4b17023SJohn Marino       reg = XEXP (x, 0);
271*e4b17023SJohn Marino       if (regs_set[REGNO(reg)])
272*e4b17023SJohn Marino 	return false;
273*e4b17023SJohn Marino     }
274*e4b17023SJohn Marino 
275*e4b17023SJohn Marino   return true;
276*e4b17023SJohn Marino }
277*e4b17023SJohn Marino 
278*e4b17023SJohn Marino /* Helper for extract_mentioned_regs.  */
279*e4b17023SJohn Marino 
280*e4b17023SJohn Marino static int
extract_mentioned_regs_1(rtx * loc,void * data)281*e4b17023SJohn Marino extract_mentioned_regs_1 (rtx *loc, void *data)
282*e4b17023SJohn Marino {
283*e4b17023SJohn Marino   rtx *mentioned_regs_p = (rtx *) data;
284*e4b17023SJohn Marino 
285*e4b17023SJohn Marino   if (REG_P (*loc))
286*e4b17023SJohn Marino     *mentioned_regs_p = alloc_EXPR_LIST (0, *loc, *mentioned_regs_p);
287*e4b17023SJohn Marino 
288*e4b17023SJohn Marino   return 0;
289*e4b17023SJohn Marino }
290*e4b17023SJohn Marino 
291*e4b17023SJohn Marino /* Returns a list of registers mentioned in X.
292*e4b17023SJohn Marino    FIXME: A regset would be prettier and less expensive.  */
293*e4b17023SJohn Marino 
294*e4b17023SJohn Marino static rtx
extract_mentioned_regs(rtx x)295*e4b17023SJohn Marino extract_mentioned_regs (rtx x)
296*e4b17023SJohn Marino {
297*e4b17023SJohn Marino   rtx mentioned_regs = NULL;
298*e4b17023SJohn Marino   for_each_rtx (&x, extract_mentioned_regs_1, &mentioned_regs);
299*e4b17023SJohn Marino   return mentioned_regs;
300*e4b17023SJohn Marino }
301*e4b17023SJohn Marino 
302*e4b17023SJohn Marino /* Check to see if the load X is aliased with STORE_PATTERN.
303*e4b17023SJohn Marino    AFTER is true if we are checking the case when STORE_PATTERN occurs
304*e4b17023SJohn Marino    after the X.  */
305*e4b17023SJohn Marino 
306*e4b17023SJohn Marino static bool
load_kills_store(const_rtx x,const_rtx store_pattern,int after)307*e4b17023SJohn Marino load_kills_store (const_rtx x, const_rtx store_pattern, int after)
308*e4b17023SJohn Marino {
309*e4b17023SJohn Marino   if (after)
310*e4b17023SJohn Marino     return anti_dependence (x, store_pattern);
311*e4b17023SJohn Marino   else
312*e4b17023SJohn Marino     return true_dependence (store_pattern, GET_MODE (store_pattern), x);
313*e4b17023SJohn Marino }
314*e4b17023SJohn Marino 
315*e4b17023SJohn Marino /* Go through the entire rtx X, looking for any loads which might alias
316*e4b17023SJohn Marino    STORE_PATTERN.  Return true if found.
317*e4b17023SJohn Marino    AFTER is true if we are checking the case when STORE_PATTERN occurs
318*e4b17023SJohn Marino    after the insn X.  */
319*e4b17023SJohn Marino 
320*e4b17023SJohn Marino static bool
find_loads(const_rtx x,const_rtx store_pattern,int after)321*e4b17023SJohn Marino find_loads (const_rtx x, const_rtx store_pattern, int after)
322*e4b17023SJohn Marino {
323*e4b17023SJohn Marino   const char * fmt;
324*e4b17023SJohn Marino   int i, j;
325*e4b17023SJohn Marino   int ret = false;
326*e4b17023SJohn Marino 
327*e4b17023SJohn Marino   if (!x)
328*e4b17023SJohn Marino     return false;
329*e4b17023SJohn Marino 
330*e4b17023SJohn Marino   if (GET_CODE (x) == SET)
331*e4b17023SJohn Marino     x = SET_SRC (x);
332*e4b17023SJohn Marino 
333*e4b17023SJohn Marino   if (MEM_P (x))
334*e4b17023SJohn Marino     {
335*e4b17023SJohn Marino       if (load_kills_store (x, store_pattern, after))
336*e4b17023SJohn Marino 	return true;
337*e4b17023SJohn Marino     }
338*e4b17023SJohn Marino 
339*e4b17023SJohn Marino   /* Recursively process the insn.  */
340*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (GET_CODE (x));
341*e4b17023SJohn Marino 
342*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
343*e4b17023SJohn Marino     {
344*e4b17023SJohn Marino       if (fmt[i] == 'e')
345*e4b17023SJohn Marino 	ret |= find_loads (XEXP (x, i), store_pattern, after);
346*e4b17023SJohn Marino       else if (fmt[i] == 'E')
347*e4b17023SJohn Marino 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
348*e4b17023SJohn Marino 	  ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
349*e4b17023SJohn Marino     }
350*e4b17023SJohn Marino   return ret;
351*e4b17023SJohn Marino }
352*e4b17023SJohn Marino 
353*e4b17023SJohn Marino /* Go through pattern PAT looking for any loads which might kill the
354*e4b17023SJohn Marino    store in X.  Return true if found.
355*e4b17023SJohn Marino    AFTER is true if we are checking the case when loads kill X occurs
356*e4b17023SJohn Marino    after the insn for PAT.  */
357*e4b17023SJohn Marino 
358*e4b17023SJohn Marino static inline bool
store_killed_in_pat(const_rtx x,const_rtx pat,int after)359*e4b17023SJohn Marino store_killed_in_pat (const_rtx x, const_rtx pat, int after)
360*e4b17023SJohn Marino {
361*e4b17023SJohn Marino   if (GET_CODE (pat) == SET)
362*e4b17023SJohn Marino     {
363*e4b17023SJohn Marino       rtx dest = SET_DEST (pat);
364*e4b17023SJohn Marino 
365*e4b17023SJohn Marino       if (GET_CODE (dest) == ZERO_EXTRACT)
366*e4b17023SJohn Marino 	dest = XEXP (dest, 0);
367*e4b17023SJohn Marino 
368*e4b17023SJohn Marino       /* Check for memory stores to aliased objects.  */
369*e4b17023SJohn Marino       if (MEM_P (dest)
370*e4b17023SJohn Marino 	  && !exp_equiv_p (dest, x, 0, true))
371*e4b17023SJohn Marino 	{
372*e4b17023SJohn Marino 	  if (after)
373*e4b17023SJohn Marino 	    {
374*e4b17023SJohn Marino 	      if (output_dependence (dest, x))
375*e4b17023SJohn Marino 		return true;
376*e4b17023SJohn Marino 	    }
377*e4b17023SJohn Marino 	  else
378*e4b17023SJohn Marino 	    {
379*e4b17023SJohn Marino 	      if (output_dependence (x, dest))
380*e4b17023SJohn Marino 		return true;
381*e4b17023SJohn Marino 	    }
382*e4b17023SJohn Marino 	}
383*e4b17023SJohn Marino     }
384*e4b17023SJohn Marino 
385*e4b17023SJohn Marino   if (find_loads (pat, x, after))
386*e4b17023SJohn Marino     return true;
387*e4b17023SJohn Marino 
388*e4b17023SJohn Marino   return false;
389*e4b17023SJohn Marino }
390*e4b17023SJohn Marino 
391*e4b17023SJohn Marino /* Check if INSN kills the store pattern X (is aliased with it).
392*e4b17023SJohn Marino    AFTER is true if we are checking the case when store X occurs
393*e4b17023SJohn Marino    after the insn.  Return true if it does.  */
394*e4b17023SJohn Marino 
395*e4b17023SJohn Marino static bool
store_killed_in_insn(const_rtx x,const_rtx x_regs,const_rtx insn,int after)396*e4b17023SJohn Marino store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
397*e4b17023SJohn Marino {
398*e4b17023SJohn Marino   const_rtx reg, base, note, pat;
399*e4b17023SJohn Marino 
400*e4b17023SJohn Marino   if (! NONDEBUG_INSN_P (insn))
401*e4b17023SJohn Marino     return false;
402*e4b17023SJohn Marino 
403*e4b17023SJohn Marino   if (CALL_P (insn))
404*e4b17023SJohn Marino     {
405*e4b17023SJohn Marino       /* A normal or pure call might read from pattern,
406*e4b17023SJohn Marino 	 but a const call will not.  */
407*e4b17023SJohn Marino       if (!RTL_CONST_CALL_P (insn))
408*e4b17023SJohn Marino 	return true;
409*e4b17023SJohn Marino 
410*e4b17023SJohn Marino       /* But even a const call reads its parameters.  Check whether the
411*e4b17023SJohn Marino 	 base of some of registers used in mem is stack pointer.  */
412*e4b17023SJohn Marino       for (reg = x_regs; reg; reg = XEXP (reg, 1))
413*e4b17023SJohn Marino 	{
414*e4b17023SJohn Marino 	  base = find_base_term (XEXP (reg, 0));
415*e4b17023SJohn Marino 	  if (!base
416*e4b17023SJohn Marino 	      || (GET_CODE (base) == ADDRESS
417*e4b17023SJohn Marino 		  && GET_MODE (base) == Pmode
418*e4b17023SJohn Marino 		  && XEXP (base, 0) == stack_pointer_rtx))
419*e4b17023SJohn Marino 	    return true;
420*e4b17023SJohn Marino 	}
421*e4b17023SJohn Marino 
422*e4b17023SJohn Marino       return false;
423*e4b17023SJohn Marino     }
424*e4b17023SJohn Marino 
425*e4b17023SJohn Marino   pat = PATTERN (insn);
426*e4b17023SJohn Marino   if (GET_CODE (pat) == SET)
427*e4b17023SJohn Marino     {
428*e4b17023SJohn Marino       if (store_killed_in_pat (x, pat, after))
429*e4b17023SJohn Marino 	return true;
430*e4b17023SJohn Marino     }
431*e4b17023SJohn Marino   else if (GET_CODE (pat) == PARALLEL)
432*e4b17023SJohn Marino     {
433*e4b17023SJohn Marino       int i;
434*e4b17023SJohn Marino 
435*e4b17023SJohn Marino       for (i = 0; i < XVECLEN (pat, 0); i++)
436*e4b17023SJohn Marino 	if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
437*e4b17023SJohn Marino 	  return true;
438*e4b17023SJohn Marino     }
439*e4b17023SJohn Marino   else if (find_loads (PATTERN (insn), x, after))
440*e4b17023SJohn Marino     return true;
441*e4b17023SJohn Marino 
442*e4b17023SJohn Marino   /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
443*e4b17023SJohn Marino      location aliased with X, then this insn kills X.  */
444*e4b17023SJohn Marino   note = find_reg_equal_equiv_note (insn);
445*e4b17023SJohn Marino   if (! note)
446*e4b17023SJohn Marino     return false;
447*e4b17023SJohn Marino   note = XEXP (note, 0);
448*e4b17023SJohn Marino 
449*e4b17023SJohn Marino   /* However, if the note represents a must alias rather than a may
450*e4b17023SJohn Marino      alias relationship, then it does not kill X.  */
451*e4b17023SJohn Marino   if (exp_equiv_p (note, x, 0, true))
452*e4b17023SJohn Marino     return false;
453*e4b17023SJohn Marino 
454*e4b17023SJohn Marino   /* See if there are any aliased loads in the note.  */
455*e4b17023SJohn Marino   return find_loads (note, x, after);
456*e4b17023SJohn Marino }
457*e4b17023SJohn Marino 
458*e4b17023SJohn Marino /* Returns true if the expression X is loaded or clobbered on or after INSN
459*e4b17023SJohn Marino    within basic block BB.  REGS_SET_AFTER is bitmap of registers set in
460*e4b17023SJohn Marino    or after the insn.  X_REGS is list of registers mentioned in X. If the store
461*e4b17023SJohn Marino    is killed, return the last insn in that it occurs in FAIL_INSN.  */
462*e4b17023SJohn Marino 
463*e4b17023SJohn Marino static bool
store_killed_after(const_rtx x,const_rtx x_regs,const_rtx insn,const_basic_block bb,int * regs_set_after,rtx * fail_insn)464*e4b17023SJohn Marino store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
465*e4b17023SJohn Marino 		    int *regs_set_after, rtx *fail_insn)
466*e4b17023SJohn Marino {
467*e4b17023SJohn Marino   rtx last = BB_END (bb), act;
468*e4b17023SJohn Marino 
469*e4b17023SJohn Marino   if (!store_ops_ok (x_regs, regs_set_after))
470*e4b17023SJohn Marino     {
471*e4b17023SJohn Marino       /* We do not know where it will happen.  */
472*e4b17023SJohn Marino       if (fail_insn)
473*e4b17023SJohn Marino 	*fail_insn = NULL_RTX;
474*e4b17023SJohn Marino       return true;
475*e4b17023SJohn Marino     }
476*e4b17023SJohn Marino 
477*e4b17023SJohn Marino   /* Scan from the end, so that fail_insn is determined correctly.  */
478*e4b17023SJohn Marino   for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
479*e4b17023SJohn Marino     if (store_killed_in_insn (x, x_regs, act, false))
480*e4b17023SJohn Marino       {
481*e4b17023SJohn Marino 	if (fail_insn)
482*e4b17023SJohn Marino 	  *fail_insn = act;
483*e4b17023SJohn Marino 	return true;
484*e4b17023SJohn Marino       }
485*e4b17023SJohn Marino 
486*e4b17023SJohn Marino   return false;
487*e4b17023SJohn Marino }
488*e4b17023SJohn Marino 
489*e4b17023SJohn Marino /* Returns true if the expression X is loaded or clobbered on or before INSN
490*e4b17023SJohn Marino    within basic block BB. X_REGS is list of registers mentioned in X.
491*e4b17023SJohn Marino    REGS_SET_BEFORE is bitmap of registers set before or in this insn.  */
492*e4b17023SJohn Marino static bool
store_killed_before(const_rtx x,const_rtx x_regs,const_rtx insn,const_basic_block bb,int * regs_set_before)493*e4b17023SJohn Marino store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
494*e4b17023SJohn Marino 		     int *regs_set_before)
495*e4b17023SJohn Marino {
496*e4b17023SJohn Marino   rtx first = BB_HEAD (bb);
497*e4b17023SJohn Marino 
498*e4b17023SJohn Marino   if (!store_ops_ok (x_regs, regs_set_before))
499*e4b17023SJohn Marino     return true;
500*e4b17023SJohn Marino 
501*e4b17023SJohn Marino   for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
502*e4b17023SJohn Marino     if (store_killed_in_insn (x, x_regs, insn, true))
503*e4b17023SJohn Marino       return true;
504*e4b17023SJohn Marino 
505*e4b17023SJohn Marino   return false;
506*e4b17023SJohn Marino }
507*e4b17023SJohn Marino 
508*e4b17023SJohn Marino /* The last insn in the basic block that compute_store_table is processing,
509*e4b17023SJohn Marino    where store_killed_after is true for X.
510*e4b17023SJohn Marino    Since we go through the basic block from BB_END to BB_HEAD, this is
511*e4b17023SJohn Marino    also the available store at the end of the basic block.  Therefore
512*e4b17023SJohn Marino    this is in effect a cache, to avoid calling store_killed_after for
513*e4b17023SJohn Marino    equivalent aliasing store expressions.
514*e4b17023SJohn Marino    This value is only meaningful during the computation of the store
515*e4b17023SJohn Marino    table.  We hi-jack the REACHING_REG field of struct st_expr to save
516*e4b17023SJohn Marino    a bit of memory.  */
517*e4b17023SJohn Marino #define LAST_AVAIL_CHECK_FAILURE(x)	((x)->reaching_reg)
518*e4b17023SJohn Marino 
519*e4b17023SJohn Marino /* Determine whether INSN is MEM store pattern that we will consider moving.
520*e4b17023SJohn Marino    REGS_SET_BEFORE is bitmap of registers set before (and including) the
521*e4b17023SJohn Marino    current insn, REGS_SET_AFTER is bitmap of registers set after (and
522*e4b17023SJohn Marino    including) the insn in this basic block.  We must be passing through BB from
523*e4b17023SJohn Marino    head to end, as we are using this fact to speed things up.
524*e4b17023SJohn Marino 
525*e4b17023SJohn Marino    The results are stored this way:
526*e4b17023SJohn Marino 
527*e4b17023SJohn Marino    -- the first anticipatable expression is added into ANTIC_STORES
528*e4b17023SJohn Marino    -- if the processed expression is not anticipatable, NULL_RTX is added
529*e4b17023SJohn Marino       there instead, so that we can use it as indicator that no further
530*e4b17023SJohn Marino       expression of this type may be anticipatable
531*e4b17023SJohn Marino    -- if the expression is available, it is added as head of AVAIL_STORES;
532*e4b17023SJohn Marino       consequently, all of them but this head are dead and may be deleted.
533*e4b17023SJohn Marino    -- if the expression is not available, the insn due to that it fails to be
534*e4b17023SJohn Marino       available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).
535*e4b17023SJohn Marino 
536*e4b17023SJohn Marino    The things are complicated a bit by fact that there already may be stores
537*e4b17023SJohn Marino    to the same MEM from other blocks; also caller must take care of the
538*e4b17023SJohn Marino    necessary cleanup of the temporary markers after end of the basic block.
539*e4b17023SJohn Marino    */
540*e4b17023SJohn Marino 
541*e4b17023SJohn Marino static void
find_moveable_store(rtx insn,int * regs_set_before,int * regs_set_after)542*e4b17023SJohn Marino find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
543*e4b17023SJohn Marino {
544*e4b17023SJohn Marino   struct st_expr * ptr;
545*e4b17023SJohn Marino   rtx dest, set, tmp;
546*e4b17023SJohn Marino   int check_anticipatable, check_available;
547*e4b17023SJohn Marino   basic_block bb = BLOCK_FOR_INSN (insn);
548*e4b17023SJohn Marino 
549*e4b17023SJohn Marino   set = single_set (insn);
550*e4b17023SJohn Marino   if (!set)
551*e4b17023SJohn Marino     return;
552*e4b17023SJohn Marino 
553*e4b17023SJohn Marino   dest = SET_DEST (set);
554*e4b17023SJohn Marino 
555*e4b17023SJohn Marino   if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
556*e4b17023SJohn Marino       || GET_MODE (dest) == BLKmode)
557*e4b17023SJohn Marino     return;
558*e4b17023SJohn Marino 
559*e4b17023SJohn Marino   if (side_effects_p (dest))
560*e4b17023SJohn Marino     return;
561*e4b17023SJohn Marino 
562*e4b17023SJohn Marino   /* If we are handling exceptions, we must be careful with memory references
563*e4b17023SJohn Marino      that may trap.  If we are not, the behavior is undefined, so we may just
564*e4b17023SJohn Marino      continue.  */
565*e4b17023SJohn Marino   if (cfun->can_throw_non_call_exceptions && may_trap_p (dest))
566*e4b17023SJohn Marino     return;
567*e4b17023SJohn Marino 
568*e4b17023SJohn Marino   /* Even if the destination cannot trap, the source may.  In this case we'd
569*e4b17023SJohn Marino      need to handle updating the REG_EH_REGION note.  */
570*e4b17023SJohn Marino   if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
571*e4b17023SJohn Marino     return;
572*e4b17023SJohn Marino 
573*e4b17023SJohn Marino   /* Make sure that the SET_SRC of this store insns can be assigned to
574*e4b17023SJohn Marino      a register, or we will fail later on in replace_store_insn, which
575*e4b17023SJohn Marino      assumes that we can do this.  But sometimes the target machine has
576*e4b17023SJohn Marino      oddities like MEM read-modify-write instruction.  See for example
577*e4b17023SJohn Marino      PR24257.  */
578*e4b17023SJohn Marino   if (!can_assign_to_reg_without_clobbers_p (SET_SRC (set)))
579*e4b17023SJohn Marino     return;
580*e4b17023SJohn Marino 
581*e4b17023SJohn Marino   ptr = st_expr_entry (dest);
582*e4b17023SJohn Marino   if (!ptr->pattern_regs)
583*e4b17023SJohn Marino     ptr->pattern_regs = extract_mentioned_regs (dest);
584*e4b17023SJohn Marino 
585*e4b17023SJohn Marino   /* Do not check for anticipatability if we either found one anticipatable
586*e4b17023SJohn Marino      store already, or tested for one and found out that it was killed.  */
587*e4b17023SJohn Marino   check_anticipatable = 0;
588*e4b17023SJohn Marino   if (!ptr->antic_stores)
589*e4b17023SJohn Marino     check_anticipatable = 1;
590*e4b17023SJohn Marino   else
591*e4b17023SJohn Marino     {
592*e4b17023SJohn Marino       tmp = XEXP (ptr->antic_stores, 0);
593*e4b17023SJohn Marino       if (tmp != NULL_RTX
594*e4b17023SJohn Marino 	  && BLOCK_FOR_INSN (tmp) != bb)
595*e4b17023SJohn Marino 	check_anticipatable = 1;
596*e4b17023SJohn Marino     }
597*e4b17023SJohn Marino   if (check_anticipatable)
598*e4b17023SJohn Marino     {
599*e4b17023SJohn Marino       if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
600*e4b17023SJohn Marino 	tmp = NULL_RTX;
601*e4b17023SJohn Marino       else
602*e4b17023SJohn Marino 	tmp = insn;
603*e4b17023SJohn Marino       ptr->antic_stores = alloc_INSN_LIST (tmp, ptr->antic_stores);
604*e4b17023SJohn Marino     }
605*e4b17023SJohn Marino 
606*e4b17023SJohn Marino   /* It is not necessary to check whether store is available if we did
607*e4b17023SJohn Marino      it successfully before; if we failed before, do not bother to check
608*e4b17023SJohn Marino      until we reach the insn that caused us to fail.  */
609*e4b17023SJohn Marino   check_available = 0;
610*e4b17023SJohn Marino   if (!ptr->avail_stores)
611*e4b17023SJohn Marino     check_available = 1;
612*e4b17023SJohn Marino   else
613*e4b17023SJohn Marino     {
614*e4b17023SJohn Marino       tmp = XEXP (ptr->avail_stores, 0);
615*e4b17023SJohn Marino       if (BLOCK_FOR_INSN (tmp) != bb)
616*e4b17023SJohn Marino 	check_available = 1;
617*e4b17023SJohn Marino     }
618*e4b17023SJohn Marino   if (check_available)
619*e4b17023SJohn Marino     {
620*e4b17023SJohn Marino       /* Check that we have already reached the insn at that the check
621*e4b17023SJohn Marino 	 failed last time.  */
622*e4b17023SJohn Marino       if (LAST_AVAIL_CHECK_FAILURE (ptr))
623*e4b17023SJohn Marino 	{
624*e4b17023SJohn Marino 	  for (tmp = BB_END (bb);
625*e4b17023SJohn Marino 	       tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
626*e4b17023SJohn Marino 	       tmp = PREV_INSN (tmp))
627*e4b17023SJohn Marino 	    continue;
628*e4b17023SJohn Marino 	  if (tmp == insn)
629*e4b17023SJohn Marino 	    check_available = 0;
630*e4b17023SJohn Marino 	}
631*e4b17023SJohn Marino       else
632*e4b17023SJohn Marino 	check_available = store_killed_after (dest, ptr->pattern_regs, insn,
633*e4b17023SJohn Marino 					      bb, regs_set_after,
634*e4b17023SJohn Marino 					      &LAST_AVAIL_CHECK_FAILURE (ptr));
635*e4b17023SJohn Marino     }
636*e4b17023SJohn Marino   if (!check_available)
637*e4b17023SJohn Marino     ptr->avail_stores = alloc_INSN_LIST (insn, ptr->avail_stores);
638*e4b17023SJohn Marino }
639*e4b17023SJohn Marino 
640*e4b17023SJohn Marino /* Find available and anticipatable stores.  */
641*e4b17023SJohn Marino 
642*e4b17023SJohn Marino static int
compute_store_table(void)643*e4b17023SJohn Marino compute_store_table (void)
644*e4b17023SJohn Marino {
645*e4b17023SJohn Marino   int ret;
646*e4b17023SJohn Marino   basic_block bb;
647*e4b17023SJohn Marino #ifdef ENABLE_CHECKING
648*e4b17023SJohn Marino   unsigned regno;
649*e4b17023SJohn Marino #endif
650*e4b17023SJohn Marino   rtx insn, tmp;
651*e4b17023SJohn Marino   df_ref *def_rec;
652*e4b17023SJohn Marino   int *last_set_in, *already_set;
653*e4b17023SJohn Marino   struct st_expr * ptr, **prev_next_ptr_ptr;
654*e4b17023SJohn Marino   unsigned int max_gcse_regno = max_reg_num ();
655*e4b17023SJohn Marino 
656*e4b17023SJohn Marino   store_motion_mems = NULL;
657*e4b17023SJohn Marino   store_motion_mems_table = htab_create (13, pre_st_expr_hash,
658*e4b17023SJohn Marino 					 pre_st_expr_eq, NULL);
659*e4b17023SJohn Marino   last_set_in = XCNEWVEC (int, max_gcse_regno);
660*e4b17023SJohn Marino   already_set = XNEWVEC (int, max_gcse_regno);
661*e4b17023SJohn Marino 
662*e4b17023SJohn Marino   /* Find all the stores we care about.  */
663*e4b17023SJohn Marino   FOR_EACH_BB (bb)
664*e4b17023SJohn Marino     {
665*e4b17023SJohn Marino       /* First compute the registers set in this block.  */
666*e4b17023SJohn Marino       FOR_BB_INSNS (bb, insn)
667*e4b17023SJohn Marino 	{
668*e4b17023SJohn Marino 
669*e4b17023SJohn Marino 	  if (! NONDEBUG_INSN_P (insn))
670*e4b17023SJohn Marino 	    continue;
671*e4b17023SJohn Marino 
672*e4b17023SJohn Marino 	  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
673*e4b17023SJohn Marino 	    last_set_in[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
674*e4b17023SJohn Marino 	}
675*e4b17023SJohn Marino 
676*e4b17023SJohn Marino       /* Now find the stores.  */
677*e4b17023SJohn Marino       memset (already_set, 0, sizeof (int) * max_gcse_regno);
678*e4b17023SJohn Marino       FOR_BB_INSNS (bb, insn)
679*e4b17023SJohn Marino 	{
680*e4b17023SJohn Marino 	  if (! NONDEBUG_INSN_P (insn))
681*e4b17023SJohn Marino 	    continue;
682*e4b17023SJohn Marino 
683*e4b17023SJohn Marino 	  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
684*e4b17023SJohn Marino 	    already_set[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
685*e4b17023SJohn Marino 
686*e4b17023SJohn Marino 	  /* Now that we've marked regs, look for stores.  */
687*e4b17023SJohn Marino 	  find_moveable_store (insn, already_set, last_set_in);
688*e4b17023SJohn Marino 
689*e4b17023SJohn Marino 	  /* Unmark regs that are no longer set.  */
690*e4b17023SJohn Marino 	  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
691*e4b17023SJohn Marino 	    if (last_set_in[DF_REF_REGNO (*def_rec)] == INSN_UID (insn))
692*e4b17023SJohn Marino 	      last_set_in[DF_REF_REGNO (*def_rec)] = 0;
693*e4b17023SJohn Marino 	}
694*e4b17023SJohn Marino 
695*e4b17023SJohn Marino #ifdef ENABLE_CHECKING
696*e4b17023SJohn Marino       /* last_set_in should now be all-zero.  */
697*e4b17023SJohn Marino       for (regno = 0; regno < max_gcse_regno; regno++)
698*e4b17023SJohn Marino 	gcc_assert (!last_set_in[regno]);
699*e4b17023SJohn Marino #endif
700*e4b17023SJohn Marino 
701*e4b17023SJohn Marino       /* Clear temporary marks.  */
702*e4b17023SJohn Marino       for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
703*e4b17023SJohn Marino 	{
704*e4b17023SJohn Marino 	  LAST_AVAIL_CHECK_FAILURE (ptr) = NULL_RTX;
705*e4b17023SJohn Marino 	  if (ptr->antic_stores
706*e4b17023SJohn Marino 	      && (tmp = XEXP (ptr->antic_stores, 0)) == NULL_RTX)
707*e4b17023SJohn Marino 	    ptr->antic_stores = XEXP (ptr->antic_stores, 1);
708*e4b17023SJohn Marino 	}
709*e4b17023SJohn Marino     }
710*e4b17023SJohn Marino 
711*e4b17023SJohn Marino   /* Remove the stores that are not available anywhere, as there will
712*e4b17023SJohn Marino      be no opportunity to optimize them.  */
713*e4b17023SJohn Marino   for (ptr = store_motion_mems, prev_next_ptr_ptr = &store_motion_mems;
714*e4b17023SJohn Marino        ptr != NULL;
715*e4b17023SJohn Marino        ptr = *prev_next_ptr_ptr)
716*e4b17023SJohn Marino     {
717*e4b17023SJohn Marino       if (! ptr->avail_stores)
718*e4b17023SJohn Marino 	{
719*e4b17023SJohn Marino 	  *prev_next_ptr_ptr = ptr->next;
720*e4b17023SJohn Marino 	  htab_remove_elt_with_hash (store_motion_mems_table,
721*e4b17023SJohn Marino 				     ptr, ptr->hash_index);
722*e4b17023SJohn Marino 	  free_st_expr_entry (ptr);
723*e4b17023SJohn Marino 	}
724*e4b17023SJohn Marino       else
725*e4b17023SJohn Marino 	prev_next_ptr_ptr = &ptr->next;
726*e4b17023SJohn Marino     }
727*e4b17023SJohn Marino 
728*e4b17023SJohn Marino   ret = enumerate_store_motion_mems ();
729*e4b17023SJohn Marino 
730*e4b17023SJohn Marino   if (dump_file)
731*e4b17023SJohn Marino     print_store_motion_mems (dump_file);
732*e4b17023SJohn Marino 
733*e4b17023SJohn Marino   free (last_set_in);
734*e4b17023SJohn Marino   free (already_set);
735*e4b17023SJohn Marino   return ret;
736*e4b17023SJohn Marino }
737*e4b17023SJohn Marino 
738*e4b17023SJohn Marino /* In all code following after this, REACHING_REG has its original
739*e4b17023SJohn Marino    meaning again.  Avoid confusion, and undef the accessor macro for
740*e4b17023SJohn Marino    the temporary marks usage in compute_store_table.  */
741*e4b17023SJohn Marino #undef LAST_AVAIL_CHECK_FAILURE
742*e4b17023SJohn Marino 
743*e4b17023SJohn Marino /* Insert an instruction at the beginning of a basic block, and update
744*e4b17023SJohn Marino    the BB_HEAD if needed.  */
745*e4b17023SJohn Marino 
746*e4b17023SJohn Marino static void
insert_insn_start_basic_block(rtx insn,basic_block bb)747*e4b17023SJohn Marino insert_insn_start_basic_block (rtx insn, basic_block bb)
748*e4b17023SJohn Marino {
749*e4b17023SJohn Marino   /* Insert at start of successor block.  */
750*e4b17023SJohn Marino   rtx prev = PREV_INSN (BB_HEAD (bb));
751*e4b17023SJohn Marino   rtx before = BB_HEAD (bb);
752*e4b17023SJohn Marino   while (before != 0)
753*e4b17023SJohn Marino     {
754*e4b17023SJohn Marino       if (! LABEL_P (before)
755*e4b17023SJohn Marino 	  && !NOTE_INSN_BASIC_BLOCK_P (before))
756*e4b17023SJohn Marino 	break;
757*e4b17023SJohn Marino       prev = before;
758*e4b17023SJohn Marino       if (prev == BB_END (bb))
759*e4b17023SJohn Marino 	break;
760*e4b17023SJohn Marino       before = NEXT_INSN (before);
761*e4b17023SJohn Marino     }
762*e4b17023SJohn Marino 
763*e4b17023SJohn Marino   insn = emit_insn_after_noloc (insn, prev, bb);
764*e4b17023SJohn Marino 
765*e4b17023SJohn Marino   if (dump_file)
766*e4b17023SJohn Marino     {
767*e4b17023SJohn Marino       fprintf (dump_file, "STORE_MOTION  insert store at start of BB %d:\n",
768*e4b17023SJohn Marino 	       bb->index);
769*e4b17023SJohn Marino       print_inline_rtx (dump_file, insn, 6);
770*e4b17023SJohn Marino       fprintf (dump_file, "\n");
771*e4b17023SJohn Marino     }
772*e4b17023SJohn Marino }
773*e4b17023SJohn Marino 
774*e4b17023SJohn Marino /* This routine will insert a store on an edge. EXPR is the st_expr entry for
775*e4b17023SJohn Marino    the memory reference, and E is the edge to insert it on.  Returns nonzero
776*e4b17023SJohn Marino    if an edge insertion was performed.  */
777*e4b17023SJohn Marino 
778*e4b17023SJohn Marino static int
insert_store(struct st_expr * expr,edge e)779*e4b17023SJohn Marino insert_store (struct st_expr * expr, edge e)
780*e4b17023SJohn Marino {
781*e4b17023SJohn Marino   rtx reg, insn;
782*e4b17023SJohn Marino   basic_block bb;
783*e4b17023SJohn Marino   edge tmp;
784*e4b17023SJohn Marino   edge_iterator ei;
785*e4b17023SJohn Marino 
786*e4b17023SJohn Marino   /* We did all the deleted before this insert, so if we didn't delete a
787*e4b17023SJohn Marino      store, then we haven't set the reaching reg yet either.  */
788*e4b17023SJohn Marino   if (expr->reaching_reg == NULL_RTX)
789*e4b17023SJohn Marino     return 0;
790*e4b17023SJohn Marino 
791*e4b17023SJohn Marino   if (e->flags & EDGE_FAKE)
792*e4b17023SJohn Marino     return 0;
793*e4b17023SJohn Marino 
794*e4b17023SJohn Marino   reg = expr->reaching_reg;
795*e4b17023SJohn Marino   insn = gen_move_insn (copy_rtx (expr->pattern), reg);
796*e4b17023SJohn Marino 
797*e4b17023SJohn Marino   /* If we are inserting this expression on ALL predecessor edges of a BB,
798*e4b17023SJohn Marino      insert it at the start of the BB, and reset the insert bits on the other
799*e4b17023SJohn Marino      edges so we don't try to insert it on the other edges.  */
800*e4b17023SJohn Marino   bb = e->dest;
801*e4b17023SJohn Marino   FOR_EACH_EDGE (tmp, ei, e->dest->preds)
802*e4b17023SJohn Marino     if (!(tmp->flags & EDGE_FAKE))
803*e4b17023SJohn Marino       {
804*e4b17023SJohn Marino 	int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
805*e4b17023SJohn Marino 
806*e4b17023SJohn Marino 	gcc_assert (index != EDGE_INDEX_NO_EDGE);
807*e4b17023SJohn Marino 	if (! TEST_BIT (st_insert_map[index], expr->index))
808*e4b17023SJohn Marino 	  break;
809*e4b17023SJohn Marino       }
810*e4b17023SJohn Marino 
811*e4b17023SJohn Marino   /* If tmp is NULL, we found an insertion on every edge, blank the
812*e4b17023SJohn Marino      insertion vector for these edges, and insert at the start of the BB.  */
813*e4b17023SJohn Marino   if (!tmp && bb != EXIT_BLOCK_PTR)
814*e4b17023SJohn Marino     {
815*e4b17023SJohn Marino       FOR_EACH_EDGE (tmp, ei, e->dest->preds)
816*e4b17023SJohn Marino 	{
817*e4b17023SJohn Marino 	  int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
818*e4b17023SJohn Marino 	  RESET_BIT (st_insert_map[index], expr->index);
819*e4b17023SJohn Marino 	}
820*e4b17023SJohn Marino       insert_insn_start_basic_block (insn, bb);
821*e4b17023SJohn Marino       return 0;
822*e4b17023SJohn Marino     }
823*e4b17023SJohn Marino 
824*e4b17023SJohn Marino   /* We can't put stores in the front of blocks pointed to by abnormal
825*e4b17023SJohn Marino      edges since that may put a store where one didn't used to be.  */
826*e4b17023SJohn Marino   gcc_assert (!(e->flags & EDGE_ABNORMAL));
827*e4b17023SJohn Marino 
828*e4b17023SJohn Marino   insert_insn_on_edge (insn, e);
829*e4b17023SJohn Marino 
830*e4b17023SJohn Marino   if (dump_file)
831*e4b17023SJohn Marino     {
832*e4b17023SJohn Marino       fprintf (dump_file, "STORE_MOTION  insert insn on edge (%d, %d):\n",
833*e4b17023SJohn Marino 	       e->src->index, e->dest->index);
834*e4b17023SJohn Marino       print_inline_rtx (dump_file, insn, 6);
835*e4b17023SJohn Marino       fprintf (dump_file, "\n");
836*e4b17023SJohn Marino     }
837*e4b17023SJohn Marino 
838*e4b17023SJohn Marino   return 1;
839*e4b17023SJohn Marino }
840*e4b17023SJohn Marino 
841*e4b17023SJohn Marino /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
842*e4b17023SJohn Marino    memory location in SMEXPR set in basic block BB.
843*e4b17023SJohn Marino 
844*e4b17023SJohn Marino    This could be rather expensive.  */
845*e4b17023SJohn Marino 
846*e4b17023SJohn Marino static void
remove_reachable_equiv_notes(basic_block bb,struct st_expr * smexpr)847*e4b17023SJohn Marino remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
848*e4b17023SJohn Marino {
849*e4b17023SJohn Marino   edge_iterator *stack, ei;
850*e4b17023SJohn Marino   int sp;
851*e4b17023SJohn Marino   edge act;
852*e4b17023SJohn Marino   sbitmap visited = sbitmap_alloc (last_basic_block);
853*e4b17023SJohn Marino   rtx last, insn, note;
854*e4b17023SJohn Marino   rtx mem = smexpr->pattern;
855*e4b17023SJohn Marino 
856*e4b17023SJohn Marino   stack = XNEWVEC (edge_iterator, n_basic_blocks);
857*e4b17023SJohn Marino   sp = 0;
858*e4b17023SJohn Marino   ei = ei_start (bb->succs);
859*e4b17023SJohn Marino 
860*e4b17023SJohn Marino   sbitmap_zero (visited);
861*e4b17023SJohn Marino 
862*e4b17023SJohn Marino   act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
863*e4b17023SJohn Marino   while (1)
864*e4b17023SJohn Marino     {
865*e4b17023SJohn Marino       if (!act)
866*e4b17023SJohn Marino 	{
867*e4b17023SJohn Marino 	  if (!sp)
868*e4b17023SJohn Marino 	    {
869*e4b17023SJohn Marino 	      free (stack);
870*e4b17023SJohn Marino 	      sbitmap_free (visited);
871*e4b17023SJohn Marino 	      return;
872*e4b17023SJohn Marino 	    }
873*e4b17023SJohn Marino 	  act = ei_edge (stack[--sp]);
874*e4b17023SJohn Marino 	}
875*e4b17023SJohn Marino       bb = act->dest;
876*e4b17023SJohn Marino 
877*e4b17023SJohn Marino       if (bb == EXIT_BLOCK_PTR
878*e4b17023SJohn Marino 	  || TEST_BIT (visited, bb->index))
879*e4b17023SJohn Marino 	{
880*e4b17023SJohn Marino 	  if (!ei_end_p (ei))
881*e4b17023SJohn Marino 	      ei_next (&ei);
882*e4b17023SJohn Marino 	  act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
883*e4b17023SJohn Marino 	  continue;
884*e4b17023SJohn Marino 	}
885*e4b17023SJohn Marino       SET_BIT (visited, bb->index);
886*e4b17023SJohn Marino 
887*e4b17023SJohn Marino       if (TEST_BIT (st_antloc[bb->index], smexpr->index))
888*e4b17023SJohn Marino 	{
889*e4b17023SJohn Marino 	  for (last = smexpr->antic_stores;
890*e4b17023SJohn Marino 	       BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
891*e4b17023SJohn Marino 	       last = XEXP (last, 1))
892*e4b17023SJohn Marino 	    continue;
893*e4b17023SJohn Marino 	  last = XEXP (last, 0);
894*e4b17023SJohn Marino 	}
895*e4b17023SJohn Marino       else
896*e4b17023SJohn Marino 	last = NEXT_INSN (BB_END (bb));
897*e4b17023SJohn Marino 
898*e4b17023SJohn Marino       for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
899*e4b17023SJohn Marino 	if (NONDEBUG_INSN_P (insn))
900*e4b17023SJohn Marino 	  {
901*e4b17023SJohn Marino 	    note = find_reg_equal_equiv_note (insn);
902*e4b17023SJohn Marino 	    if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
903*e4b17023SJohn Marino 	      continue;
904*e4b17023SJohn Marino 
905*e4b17023SJohn Marino 	    if (dump_file)
906*e4b17023SJohn Marino 	      fprintf (dump_file, "STORE_MOTION  drop REG_EQUAL note at insn %d:\n",
907*e4b17023SJohn Marino 		       INSN_UID (insn));
908*e4b17023SJohn Marino 	    remove_note (insn, note);
909*e4b17023SJohn Marino 	  }
910*e4b17023SJohn Marino 
911*e4b17023SJohn Marino       if (!ei_end_p (ei))
912*e4b17023SJohn Marino 	ei_next (&ei);
913*e4b17023SJohn Marino       act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
914*e4b17023SJohn Marino 
915*e4b17023SJohn Marino       if (EDGE_COUNT (bb->succs) > 0)
916*e4b17023SJohn Marino 	{
917*e4b17023SJohn Marino 	  if (act)
918*e4b17023SJohn Marino 	    stack[sp++] = ei;
919*e4b17023SJohn Marino 	  ei = ei_start (bb->succs);
920*e4b17023SJohn Marino 	  act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
921*e4b17023SJohn Marino 	}
922*e4b17023SJohn Marino     }
923*e4b17023SJohn Marino }
924*e4b17023SJohn Marino 
925*e4b17023SJohn Marino /* This routine will replace a store with a SET to a specified register.  */
926*e4b17023SJohn Marino 
927*e4b17023SJohn Marino static void
replace_store_insn(rtx reg,rtx del,basic_block bb,struct st_expr * smexpr)928*e4b17023SJohn Marino replace_store_insn (rtx reg, rtx del, basic_block bb, struct st_expr *smexpr)
929*e4b17023SJohn Marino {
930*e4b17023SJohn Marino   rtx insn, mem, note, set, ptr;
931*e4b17023SJohn Marino 
932*e4b17023SJohn Marino   mem = smexpr->pattern;
933*e4b17023SJohn Marino   insn = gen_move_insn (reg, SET_SRC (single_set (del)));
934*e4b17023SJohn Marino 
935*e4b17023SJohn Marino   for (ptr = smexpr->antic_stores; ptr; ptr = XEXP (ptr, 1))
936*e4b17023SJohn Marino     if (XEXP (ptr, 0) == del)
937*e4b17023SJohn Marino       {
938*e4b17023SJohn Marino 	XEXP (ptr, 0) = insn;
939*e4b17023SJohn Marino 	break;
940*e4b17023SJohn Marino       }
941*e4b17023SJohn Marino 
942*e4b17023SJohn Marino   /* Move the notes from the deleted insn to its replacement.  */
943*e4b17023SJohn Marino   REG_NOTES (insn) = REG_NOTES (del);
944*e4b17023SJohn Marino 
945*e4b17023SJohn Marino   /* Emit the insn AFTER all the notes are transferred.
946*e4b17023SJohn Marino      This is cheaper since we avoid df rescanning for the note change.  */
947*e4b17023SJohn Marino   insn = emit_insn_after (insn, del);
948*e4b17023SJohn Marino 
949*e4b17023SJohn Marino   if (dump_file)
950*e4b17023SJohn Marino     {
951*e4b17023SJohn Marino       fprintf (dump_file,
952*e4b17023SJohn Marino 	       "STORE_MOTION  delete insn in BB %d:\n      ", bb->index);
953*e4b17023SJohn Marino       print_inline_rtx (dump_file, del, 6);
954*e4b17023SJohn Marino       fprintf (dump_file, "\nSTORE_MOTION  replaced with insn:\n      ");
955*e4b17023SJohn Marino       print_inline_rtx (dump_file, insn, 6);
956*e4b17023SJohn Marino       fprintf (dump_file, "\n");
957*e4b17023SJohn Marino     }
958*e4b17023SJohn Marino 
959*e4b17023SJohn Marino   delete_insn (del);
960*e4b17023SJohn Marino 
961*e4b17023SJohn Marino   /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
962*e4b17023SJohn Marino      they are no longer accurate provided that they are reached by this
963*e4b17023SJohn Marino      definition, so drop them.  */
964*e4b17023SJohn Marino   for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
965*e4b17023SJohn Marino     if (NONDEBUG_INSN_P (insn))
966*e4b17023SJohn Marino       {
967*e4b17023SJohn Marino 	set = single_set (insn);
968*e4b17023SJohn Marino 	if (!set)
969*e4b17023SJohn Marino 	  continue;
970*e4b17023SJohn Marino 	if (exp_equiv_p (SET_DEST (set), mem, 0, true))
971*e4b17023SJohn Marino 	  return;
972*e4b17023SJohn Marino 	note = find_reg_equal_equiv_note (insn);
973*e4b17023SJohn Marino 	if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
974*e4b17023SJohn Marino 	  continue;
975*e4b17023SJohn Marino 
976*e4b17023SJohn Marino 	if (dump_file)
977*e4b17023SJohn Marino 	  fprintf (dump_file, "STORE_MOTION  drop REG_EQUAL note at insn %d:\n",
978*e4b17023SJohn Marino 		   INSN_UID (insn));
979*e4b17023SJohn Marino 	remove_note (insn, note);
980*e4b17023SJohn Marino       }
981*e4b17023SJohn Marino   remove_reachable_equiv_notes (bb, smexpr);
982*e4b17023SJohn Marino }
983*e4b17023SJohn Marino 
984*e4b17023SJohn Marino 
985*e4b17023SJohn Marino /* Delete a store, but copy the value that would have been stored into
986*e4b17023SJohn Marino    the reaching_reg for later storing.  */
987*e4b17023SJohn Marino 
988*e4b17023SJohn Marino static void
delete_store(struct st_expr * expr,basic_block bb)989*e4b17023SJohn Marino delete_store (struct st_expr * expr, basic_block bb)
990*e4b17023SJohn Marino {
991*e4b17023SJohn Marino   rtx reg, i, del;
992*e4b17023SJohn Marino 
993*e4b17023SJohn Marino   if (expr->reaching_reg == NULL_RTX)
994*e4b17023SJohn Marino     expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
995*e4b17023SJohn Marino 
996*e4b17023SJohn Marino   reg = expr->reaching_reg;
997*e4b17023SJohn Marino 
998*e4b17023SJohn Marino   for (i = expr->avail_stores; i; i = XEXP (i, 1))
999*e4b17023SJohn Marino     {
1000*e4b17023SJohn Marino       del = XEXP (i, 0);
1001*e4b17023SJohn Marino       if (BLOCK_FOR_INSN (del) == bb)
1002*e4b17023SJohn Marino 	{
1003*e4b17023SJohn Marino 	  /* We know there is only one since we deleted redundant
1004*e4b17023SJohn Marino 	     ones during the available computation.  */
1005*e4b17023SJohn Marino 	  replace_store_insn (reg, del, bb, expr);
1006*e4b17023SJohn Marino 	  break;
1007*e4b17023SJohn Marino 	}
1008*e4b17023SJohn Marino     }
1009*e4b17023SJohn Marino }
1010*e4b17023SJohn Marino 
1011*e4b17023SJohn Marino /* Fill in available, anticipatable, transparent and kill vectors in
1012*e4b17023SJohn Marino    STORE_DATA, based on lists of available and anticipatable stores.  */
1013*e4b17023SJohn Marino static void
build_store_vectors(void)1014*e4b17023SJohn Marino build_store_vectors (void)
1015*e4b17023SJohn Marino {
1016*e4b17023SJohn Marino   basic_block bb;
1017*e4b17023SJohn Marino   int *regs_set_in_block;
1018*e4b17023SJohn Marino   rtx insn, st;
1019*e4b17023SJohn Marino   struct st_expr * ptr;
1020*e4b17023SJohn Marino   unsigned int max_gcse_regno = max_reg_num ();
1021*e4b17023SJohn Marino 
1022*e4b17023SJohn Marino   /* Build the gen_vector. This is any store in the table which is not killed
1023*e4b17023SJohn Marino      by aliasing later in its block.  */
1024*e4b17023SJohn Marino   st_avloc = sbitmap_vector_alloc (last_basic_block, num_stores);
1025*e4b17023SJohn Marino   sbitmap_vector_zero (st_avloc, last_basic_block);
1026*e4b17023SJohn Marino 
1027*e4b17023SJohn Marino   st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
1028*e4b17023SJohn Marino   sbitmap_vector_zero (st_antloc, last_basic_block);
1029*e4b17023SJohn Marino 
1030*e4b17023SJohn Marino   for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1031*e4b17023SJohn Marino     {
1032*e4b17023SJohn Marino       for (st = ptr->avail_stores; st != NULL; st = XEXP (st, 1))
1033*e4b17023SJohn Marino 	{
1034*e4b17023SJohn Marino 	  insn = XEXP (st, 0);
1035*e4b17023SJohn Marino 	  bb = BLOCK_FOR_INSN (insn);
1036*e4b17023SJohn Marino 
1037*e4b17023SJohn Marino 	  /* If we've already seen an available expression in this block,
1038*e4b17023SJohn Marino 	     we can delete this one (It occurs earlier in the block). We'll
1039*e4b17023SJohn Marino 	     copy the SRC expression to an unused register in case there
1040*e4b17023SJohn Marino 	     are any side effects.  */
1041*e4b17023SJohn Marino 	  if (TEST_BIT (st_avloc[bb->index], ptr->index))
1042*e4b17023SJohn Marino 	    {
1043*e4b17023SJohn Marino 	      rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
1044*e4b17023SJohn Marino 	      if (dump_file)
1045*e4b17023SJohn Marino 		fprintf (dump_file, "Removing redundant store:\n");
1046*e4b17023SJohn Marino 	      replace_store_insn (r, XEXP (st, 0), bb, ptr);
1047*e4b17023SJohn Marino 	      continue;
1048*e4b17023SJohn Marino 	    }
1049*e4b17023SJohn Marino 	  SET_BIT (st_avloc[bb->index], ptr->index);
1050*e4b17023SJohn Marino 	}
1051*e4b17023SJohn Marino 
1052*e4b17023SJohn Marino       for (st = ptr->antic_stores; st != NULL; st = XEXP (st, 1))
1053*e4b17023SJohn Marino 	{
1054*e4b17023SJohn Marino 	  insn = XEXP (st, 0);
1055*e4b17023SJohn Marino 	  bb = BLOCK_FOR_INSN (insn);
1056*e4b17023SJohn Marino 	  SET_BIT (st_antloc[bb->index], ptr->index);
1057*e4b17023SJohn Marino 	}
1058*e4b17023SJohn Marino     }
1059*e4b17023SJohn Marino 
1060*e4b17023SJohn Marino   st_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
1061*e4b17023SJohn Marino   sbitmap_vector_zero (st_kill, last_basic_block);
1062*e4b17023SJohn Marino 
1063*e4b17023SJohn Marino   st_transp = sbitmap_vector_alloc (last_basic_block, num_stores);
1064*e4b17023SJohn Marino   sbitmap_vector_zero (st_transp, last_basic_block);
1065*e4b17023SJohn Marino   regs_set_in_block = XNEWVEC (int, max_gcse_regno);
1066*e4b17023SJohn Marino 
1067*e4b17023SJohn Marino   FOR_EACH_BB (bb)
1068*e4b17023SJohn Marino     {
1069*e4b17023SJohn Marino       memset (regs_set_in_block, 0, sizeof (int) * max_gcse_regno);
1070*e4b17023SJohn Marino 
1071*e4b17023SJohn Marino       FOR_BB_INSNS (bb, insn)
1072*e4b17023SJohn Marino 	if (NONDEBUG_INSN_P (insn))
1073*e4b17023SJohn Marino 	  {
1074*e4b17023SJohn Marino 	    df_ref *def_rec;
1075*e4b17023SJohn Marino 	    for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
1076*e4b17023SJohn Marino 	      {
1077*e4b17023SJohn Marino 		unsigned int ref_regno = DF_REF_REGNO (*def_rec);
1078*e4b17023SJohn Marino 		if (ref_regno < max_gcse_regno)
1079*e4b17023SJohn Marino 		  regs_set_in_block[DF_REF_REGNO (*def_rec)] = 1;
1080*e4b17023SJohn Marino 	      }
1081*e4b17023SJohn Marino 	  }
1082*e4b17023SJohn Marino 
1083*e4b17023SJohn Marino       for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1084*e4b17023SJohn Marino 	{
1085*e4b17023SJohn Marino 	  if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
1086*e4b17023SJohn Marino 				  bb, regs_set_in_block, NULL))
1087*e4b17023SJohn Marino 	    {
1088*e4b17023SJohn Marino 	      /* It should not be necessary to consider the expression
1089*e4b17023SJohn Marino 		 killed if it is both anticipatable and available.  */
1090*e4b17023SJohn Marino 	      if (!TEST_BIT (st_antloc[bb->index], ptr->index)
1091*e4b17023SJohn Marino 		  || !TEST_BIT (st_avloc[bb->index], ptr->index))
1092*e4b17023SJohn Marino 		SET_BIT (st_kill[bb->index], ptr->index);
1093*e4b17023SJohn Marino 	    }
1094*e4b17023SJohn Marino 	  else
1095*e4b17023SJohn Marino 	    SET_BIT (st_transp[bb->index], ptr->index);
1096*e4b17023SJohn Marino 	}
1097*e4b17023SJohn Marino     }
1098*e4b17023SJohn Marino 
1099*e4b17023SJohn Marino   free (regs_set_in_block);
1100*e4b17023SJohn Marino 
1101*e4b17023SJohn Marino   if (dump_file)
1102*e4b17023SJohn Marino     {
1103*e4b17023SJohn Marino       dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
1104*e4b17023SJohn Marino       dump_sbitmap_vector (dump_file, "st_kill", "", st_kill, last_basic_block);
1105*e4b17023SJohn Marino       dump_sbitmap_vector (dump_file, "st_transp", "", st_transp, last_basic_block);
1106*e4b17023SJohn Marino       dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
1107*e4b17023SJohn Marino     }
1108*e4b17023SJohn Marino }
1109*e4b17023SJohn Marino 
1110*e4b17023SJohn Marino /* Free memory used by store motion.  */
1111*e4b17023SJohn Marino 
1112*e4b17023SJohn Marino static void
free_store_memory(void)1113*e4b17023SJohn Marino free_store_memory (void)
1114*e4b17023SJohn Marino {
1115*e4b17023SJohn Marino   free_store_motion_mems ();
1116*e4b17023SJohn Marino 
1117*e4b17023SJohn Marino   if (st_avloc)
1118*e4b17023SJohn Marino     sbitmap_vector_free (st_avloc);
1119*e4b17023SJohn Marino   if (st_kill)
1120*e4b17023SJohn Marino     sbitmap_vector_free (st_kill);
1121*e4b17023SJohn Marino   if (st_transp)
1122*e4b17023SJohn Marino     sbitmap_vector_free (st_transp);
1123*e4b17023SJohn Marino   if (st_antloc)
1124*e4b17023SJohn Marino     sbitmap_vector_free (st_antloc);
1125*e4b17023SJohn Marino   if (st_insert_map)
1126*e4b17023SJohn Marino     sbitmap_vector_free (st_insert_map);
1127*e4b17023SJohn Marino   if (st_delete_map)
1128*e4b17023SJohn Marino     sbitmap_vector_free (st_delete_map);
1129*e4b17023SJohn Marino 
1130*e4b17023SJohn Marino   st_avloc = st_kill = st_transp = st_antloc = NULL;
1131*e4b17023SJohn Marino   st_insert_map = st_delete_map = NULL;
1132*e4b17023SJohn Marino }
1133*e4b17023SJohn Marino 
1134*e4b17023SJohn Marino /* Perform store motion. Much like gcse, except we move expressions the
1135*e4b17023SJohn Marino    other way by looking at the flowgraph in reverse.
1136*e4b17023SJohn Marino    Return non-zero if transformations are performed by the pass.  */
1137*e4b17023SJohn Marino 
1138*e4b17023SJohn Marino static int
one_store_motion_pass(void)1139*e4b17023SJohn Marino one_store_motion_pass (void)
1140*e4b17023SJohn Marino {
1141*e4b17023SJohn Marino   basic_block bb;
1142*e4b17023SJohn Marino   int x;
1143*e4b17023SJohn Marino   struct st_expr * ptr;
1144*e4b17023SJohn Marino   int did_edge_inserts = 0;
1145*e4b17023SJohn Marino   int n_stores_deleted = 0;
1146*e4b17023SJohn Marino   int n_stores_created = 0;
1147*e4b17023SJohn Marino 
1148*e4b17023SJohn Marino   init_alias_analysis ();
1149*e4b17023SJohn Marino 
1150*e4b17023SJohn Marino   /* Find all the available and anticipatable stores.  */
1151*e4b17023SJohn Marino   num_stores = compute_store_table ();
1152*e4b17023SJohn Marino   if (num_stores == 0)
1153*e4b17023SJohn Marino     {
1154*e4b17023SJohn Marino       htab_delete (store_motion_mems_table);
1155*e4b17023SJohn Marino       store_motion_mems_table = NULL;
1156*e4b17023SJohn Marino       end_alias_analysis ();
1157*e4b17023SJohn Marino       return 0;
1158*e4b17023SJohn Marino     }
1159*e4b17023SJohn Marino 
1160*e4b17023SJohn Marino   /* Now compute kill & transp vectors.  */
1161*e4b17023SJohn Marino   build_store_vectors ();
1162*e4b17023SJohn Marino   add_noreturn_fake_exit_edges ();
1163*e4b17023SJohn Marino   connect_infinite_loops_to_exit ();
1164*e4b17023SJohn Marino 
1165*e4b17023SJohn Marino   edge_list = pre_edge_rev_lcm (num_stores, st_transp, st_avloc,
1166*e4b17023SJohn Marino 				st_antloc, st_kill, &st_insert_map,
1167*e4b17023SJohn Marino 				&st_delete_map);
1168*e4b17023SJohn Marino 
1169*e4b17023SJohn Marino   /* Now we want to insert the new stores which are going to be needed.  */
1170*e4b17023SJohn Marino   for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1171*e4b17023SJohn Marino     {
1172*e4b17023SJohn Marino       /* If any of the edges we have above are abnormal, we can't move this
1173*e4b17023SJohn Marino 	 store.  */
1174*e4b17023SJohn Marino       for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
1175*e4b17023SJohn Marino 	if (TEST_BIT (st_insert_map[x], ptr->index)
1176*e4b17023SJohn Marino 	    && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
1177*e4b17023SJohn Marino 	  break;
1178*e4b17023SJohn Marino 
1179*e4b17023SJohn Marino       if (x >= 0)
1180*e4b17023SJohn Marino 	{
1181*e4b17023SJohn Marino 	  if (dump_file != NULL)
1182*e4b17023SJohn Marino 	    fprintf (dump_file,
1183*e4b17023SJohn Marino 		     "Can't replace store %d: abnormal edge from %d to %d\n",
1184*e4b17023SJohn Marino 		     ptr->index, INDEX_EDGE (edge_list, x)->src->index,
1185*e4b17023SJohn Marino 		     INDEX_EDGE (edge_list, x)->dest->index);
1186*e4b17023SJohn Marino 	  continue;
1187*e4b17023SJohn Marino 	}
1188*e4b17023SJohn Marino 
1189*e4b17023SJohn Marino       /* Now we want to insert the new stores which are going to be needed.  */
1190*e4b17023SJohn Marino 
1191*e4b17023SJohn Marino       FOR_EACH_BB (bb)
1192*e4b17023SJohn Marino 	if (TEST_BIT (st_delete_map[bb->index], ptr->index))
1193*e4b17023SJohn Marino 	  {
1194*e4b17023SJohn Marino 	    delete_store (ptr, bb);
1195*e4b17023SJohn Marino 	    n_stores_deleted++;
1196*e4b17023SJohn Marino 	  }
1197*e4b17023SJohn Marino 
1198*e4b17023SJohn Marino       for (x = 0; x < NUM_EDGES (edge_list); x++)
1199*e4b17023SJohn Marino 	if (TEST_BIT (st_insert_map[x], ptr->index))
1200*e4b17023SJohn Marino 	  {
1201*e4b17023SJohn Marino 	    did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
1202*e4b17023SJohn Marino 	    n_stores_created++;
1203*e4b17023SJohn Marino 	  }
1204*e4b17023SJohn Marino     }
1205*e4b17023SJohn Marino 
1206*e4b17023SJohn Marino   if (did_edge_inserts)
1207*e4b17023SJohn Marino     commit_edge_insertions ();
1208*e4b17023SJohn Marino 
1209*e4b17023SJohn Marino   free_store_memory ();
1210*e4b17023SJohn Marino   free_edge_list (edge_list);
1211*e4b17023SJohn Marino   remove_fake_exit_edges ();
1212*e4b17023SJohn Marino   end_alias_analysis ();
1213*e4b17023SJohn Marino 
1214*e4b17023SJohn Marino   if (dump_file)
1215*e4b17023SJohn Marino     {
1216*e4b17023SJohn Marino       fprintf (dump_file, "STORE_MOTION of %s, %d basic blocks, ",
1217*e4b17023SJohn Marino 	       current_function_name (), n_basic_blocks);
1218*e4b17023SJohn Marino       fprintf (dump_file, "%d insns deleted, %d insns created\n",
1219*e4b17023SJohn Marino 	       n_stores_deleted, n_stores_created);
1220*e4b17023SJohn Marino     }
1221*e4b17023SJohn Marino 
1222*e4b17023SJohn Marino   return (n_stores_deleted > 0 || n_stores_created > 0);
1223*e4b17023SJohn Marino }
1224*e4b17023SJohn Marino 
1225*e4b17023SJohn Marino 
1226*e4b17023SJohn Marino static bool
gate_rtl_store_motion(void)1227*e4b17023SJohn Marino gate_rtl_store_motion (void)
1228*e4b17023SJohn Marino {
1229*e4b17023SJohn Marino   return optimize > 0 && flag_gcse_sm
1230*e4b17023SJohn Marino     && !cfun->calls_setjmp
1231*e4b17023SJohn Marino     && optimize_function_for_speed_p (cfun)
1232*e4b17023SJohn Marino     && dbg_cnt (store_motion);
1233*e4b17023SJohn Marino }
1234*e4b17023SJohn Marino 
1235*e4b17023SJohn Marino static unsigned int
execute_rtl_store_motion(void)1236*e4b17023SJohn Marino execute_rtl_store_motion (void)
1237*e4b17023SJohn Marino {
1238*e4b17023SJohn Marino   delete_unreachable_blocks ();
1239*e4b17023SJohn Marino   df_analyze ();
1240*e4b17023SJohn Marino   flag_rerun_cse_after_global_opts |= one_store_motion_pass ();
1241*e4b17023SJohn Marino   return 0;
1242*e4b17023SJohn Marino }
1243*e4b17023SJohn Marino 
1244*e4b17023SJohn Marino struct rtl_opt_pass pass_rtl_store_motion =
1245*e4b17023SJohn Marino {
1246*e4b17023SJohn Marino  {
1247*e4b17023SJohn Marino   RTL_PASS,
1248*e4b17023SJohn Marino   "store_motion",                       /* name */
1249*e4b17023SJohn Marino   gate_rtl_store_motion,                /* gate */
1250*e4b17023SJohn Marino   execute_rtl_store_motion,		/* execute */
1251*e4b17023SJohn Marino   NULL,                                 /* sub */
1252*e4b17023SJohn Marino   NULL,                                 /* next */
1253*e4b17023SJohn Marino   0,                                    /* static_pass_number */
1254*e4b17023SJohn Marino   TV_LSM,                               /* tv_id */
1255*e4b17023SJohn Marino   PROP_cfglayout,                       /* properties_required */
1256*e4b17023SJohn Marino   0,                                    /* properties_provided */
1257*e4b17023SJohn Marino   0,                                    /* properties_destroyed */
1258*e4b17023SJohn Marino   0,                                    /* todo_flags_start */
1259*e4b17023SJohn Marino   TODO_df_finish | TODO_verify_rtl_sharing |
1260*e4b17023SJohn Marino   TODO_verify_flow | TODO_ggc_collect   /* todo_flags_finish */
1261*e4b17023SJohn Marino  }
1262*e4b17023SJohn Marino };
1263