xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/postreload-gcse.c (revision 413d532bcc3f62d122e56d92e13ac64825a40baf)
1 /* Post reload partially redundant load elimination
2    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "toplev.h"
26 
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "basic-block.h"
37 #include "output.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "except.h"
41 #include "intl.h"
42 #include "obstack.h"
43 #include "hashtab.h"
44 #include "params.h"
45 #include "target.h"
46 #include "timevar.h"
47 #include "tree-pass.h"
48 #include "dbgcnt.h"
49 
50 /* The following code implements gcse after reload, the purpose of this
51    pass is to cleanup redundant loads generated by reload and other
52    optimizations that come after gcse. It searches for simple inter-block
53    redundancies and tries to eliminate them by adding moves and loads
54    in cold places.
55 
56    Perform partially redundant load elimination, try to eliminate redundant
57    loads created by the reload pass.  We try to look for full or partial
58    redundant loads fed by one or more loads/stores in predecessor BBs,
59    and try adding loads to make them fully redundant.  We also check if
60    it's worth adding loads to be able to delete the redundant load.
61 
62    Algorithm:
63    1. Build available expressions hash table:
64        For each load/store instruction, if the loaded/stored memory didn't
65        change until the end of the basic block add this memory expression to
66        the hash table.
67    2. Perform Redundancy elimination:
68       For each load instruction do the following:
69 	 perform partial redundancy elimination, check if it's worth adding
70 	 loads to make the load fully redundant.  If so add loads and
71 	 register copies and delete the load.
72    3. Delete instructions made redundant in step 2.
73 
74    Future enhancement:
75      If the loaded register is used/defined between load and some store,
76      look for some other free register between load and all its stores,
77      and replace the load with a copy from this register to the loaded
78      register.
79 */
80 
81 
82 /* Keep statistics of this pass.  */
83 static struct
84 {
85   int moves_inserted;
86   int copies_inserted;
87   int insns_deleted;
88 } stats;
89 
90 /* We need to keep a hash table of expressions.  The table entries are of
91    type 'struct expr', and for each expression there is a single linked
92    list of occurrences.  */
93 
94 /* The table itself.  */
95 static htab_t expr_table;
96 
97 /* Expression elements in the hash table.  */
98 struct expr
99 {
100   /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
101   rtx expr;
102 
103   /* The same hash for this entry.  */
104   hashval_t hash;
105 
106   /* List of available occurrence in basic blocks in the function.  */
107   struct occr *avail_occr;
108 };
109 
110 static struct obstack expr_obstack;
111 
112 /* Occurrence of an expression.
113    There is at most one occurrence per basic block.  If a pattern appears
114    more than once, the last appearance is used.  */
115 
116 struct occr
117 {
118   /* Next occurrence of this expression.  */
119   struct occr *next;
120   /* The insn that computes the expression.  */
121   rtx insn;
122   /* Nonzero if this [anticipatable] occurrence has been deleted.  */
123   char deleted_p;
124 };
125 
126 static struct obstack occr_obstack;
127 
128 /* The following structure holds the information about the occurrences of
129    the redundant instructions.  */
130 struct unoccr
131 {
132   struct unoccr *next;
133   edge pred;
134   rtx insn;
135 };
136 
137 static struct obstack unoccr_obstack;
138 
139 /* Array where each element is the CUID if the insn that last set the hard
140    register with the number of the element, since the start of the current
141    basic block.
142 
143    This array is used during the building of the hash table (step 1) to
144    determine if a reg is killed before the end of a basic block.
145 
146    It is also used when eliminating partial redundancies (step 2) to see
147    if a reg was modified since the start of a basic block.  */
148 static int *reg_avail_info;
149 
150 /* A list of insns that may modify memory within the current basic block.  */
151 struct modifies_mem
152 {
153   rtx insn;
154   struct modifies_mem *next;
155 };
156 static struct modifies_mem *modifies_mem_list;
157 
158 /* The modifies_mem structs also go on an obstack, only this obstack is
159    freed each time after completing the analysis or transformations on
160    a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
161    object on the obstack to keep track of the bottom of the obstack.  */
162 static struct obstack modifies_mem_obstack;
163 static struct modifies_mem  *modifies_mem_obstack_bottom;
164 
165 /* Mapping of insn UIDs to CUIDs.
166    CUIDs are like UIDs except they increase monotonically in each basic
167    block, have no gaps, and only apply to real insns.  */
168 static int *uid_cuid;
169 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
170 
171 
172 /* Helpers for memory allocation/freeing.  */
173 static void alloc_mem (void);
174 static void free_mem (void);
175 
176 /* Support for hash table construction and transformations.  */
177 static bool oprs_unchanged_p (rtx, rtx, bool);
178 static void record_last_reg_set_info (rtx, rtx);
179 static void record_last_reg_set_info_regno (rtx, int);
180 static void record_last_mem_set_info (rtx);
181 static void record_last_set_info (rtx, const_rtx, void *);
182 static void record_opr_changes (rtx);
183 
184 static void find_mem_conflicts (rtx, const_rtx, void *);
185 static int load_killed_in_block_p (int, rtx, bool);
186 static void reset_opr_set_tables (void);
187 
188 /* Hash table support.  */
189 static hashval_t hash_expr (rtx, int *);
190 static hashval_t hash_expr_for_htab (const void *);
191 static int expr_equiv_p (const void *, const void *);
192 static void insert_expr_in_table (rtx, rtx);
193 static struct expr *lookup_expr_in_table (rtx);
194 static int dump_hash_table_entry (void **, void *);
195 static void dump_hash_table (FILE *);
196 
197 /* Helpers for eliminate_partially_redundant_load.  */
198 static bool reg_killed_on_edge (rtx, edge);
199 static bool reg_used_on_edge (rtx, edge);
200 
201 static rtx get_avail_load_store_reg (rtx);
202 
203 static bool bb_has_well_behaved_predecessors (basic_block);
204 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
205 static void hash_scan_set (rtx);
206 static void compute_hash_table (void);
207 
208 /* The work horses of this pass.  */
209 static void eliminate_partially_redundant_load (basic_block,
210 						rtx,
211 						struct expr *);
212 static void eliminate_partially_redundant_loads (void);
213 
214 
215 /* Allocate memory for the CUID mapping array and register/memory
216    tracking tables.  */
217 
218 static void
219 alloc_mem (void)
220 {
221   int i;
222   basic_block bb;
223   rtx insn;
224 
225   /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
226   uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
227   i = 1;
228   FOR_EACH_BB (bb)
229     FOR_BB_INSNS (bb, insn)
230       {
231         if (INSN_P (insn))
232 	  uid_cuid[INSN_UID (insn)] = i++;
233 	else
234 	  uid_cuid[INSN_UID (insn)] = i;
235       }
236 
237   /* Allocate the available expressions hash table.  We don't want to
238      make the hash table too small, but unnecessarily making it too large
239      also doesn't help.  The i/4 is a gcse.c relic, and seems like a
240      reasonable choice.  */
241   expr_table = htab_create (MAX (i / 4, 13),
242 			    hash_expr_for_htab, expr_equiv_p, NULL);
243 
244   /* We allocate everything on obstacks because we often can roll back
245      the whole obstack to some point.  Freeing obstacks is very fast.  */
246   gcc_obstack_init (&expr_obstack);
247   gcc_obstack_init (&occr_obstack);
248   gcc_obstack_init (&unoccr_obstack);
249   gcc_obstack_init (&modifies_mem_obstack);
250 
251   /* Working array used to track the last set for each register
252      in the current block.  */
253   reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
254 
255   /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
256      can roll it back in reset_opr_set_tables.  */
257   modifies_mem_obstack_bottom =
258     (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
259 					   sizeof (struct modifies_mem));
260 }
261 
262 /* Free memory allocated by alloc_mem.  */
263 
264 static void
265 free_mem (void)
266 {
267   free (uid_cuid);
268 
269   htab_delete (expr_table);
270 
271   obstack_free (&expr_obstack, NULL);
272   obstack_free (&occr_obstack, NULL);
273   obstack_free (&unoccr_obstack, NULL);
274   obstack_free (&modifies_mem_obstack, NULL);
275 
276   free (reg_avail_info);
277 }
278 
279 
280 /* Hash expression X.
281    DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
282    or if the expression contains something we don't want to insert in the
283    table.  */
284 
285 static hashval_t
286 hash_expr (rtx x, int *do_not_record_p)
287 {
288   *do_not_record_p = 0;
289   return hash_rtx (x, GET_MODE (x), do_not_record_p,
290 		   NULL,  /*have_reg_qty=*/false);
291 }
292 
293 /* Callback for hashtab.
294    Return the hash value for expression EXP.  We don't actually hash
295    here, we just return the cached hash value.  */
296 
297 static hashval_t
298 hash_expr_for_htab (const void *expp)
299 {
300   const struct expr *const exp = (const struct expr *) expp;
301   return exp->hash;
302 }
303 
304 /* Callback for hashtab.
305    Return nonzero if exp1 is equivalent to exp2.  */
306 
307 static int
308 expr_equiv_p (const void *exp1p, const void *exp2p)
309 {
310   const struct expr *const exp1 = (const struct expr *) exp1p;
311   const struct expr *const exp2 = (const struct expr *) exp2p;
312   int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
313 
314   gcc_assert (!equiv_p || exp1->hash == exp2->hash);
315   return equiv_p;
316 }
317 
318 
319 /* Insert expression X in INSN in the hash TABLE.
320    If it is already present, record it as the last occurrence in INSN's
321    basic block.  */
322 
323 static void
324 insert_expr_in_table (rtx x, rtx insn)
325 {
326   int do_not_record_p;
327   hashval_t hash;
328   struct expr *cur_expr, **slot;
329   struct occr *avail_occr, *last_occr = NULL;
330 
331   hash = hash_expr (x, &do_not_record_p);
332 
333   /* Do not insert expression in the table if it contains volatile operands,
334      or if hash_expr determines the expression is something we don't want
335      to or can't handle.  */
336   if (do_not_record_p)
337     return;
338 
339   /* We anticipate that redundant expressions are rare, so for convenience
340      allocate a new hash table element here already and set its fields.
341      If we don't do this, we need a hack with a static struct expr.  Anyway,
342      obstack_free is really fast and one more obstack_alloc doesn't hurt if
343      we're going to see more expressions later on.  */
344   cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
345 					    sizeof (struct expr));
346   cur_expr->expr = x;
347   cur_expr->hash = hash;
348   cur_expr->avail_occr = NULL;
349 
350   slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
351 						    hash, INSERT);
352 
353   if (! (*slot))
354     /* The expression isn't found, so insert it.  */
355     *slot = cur_expr;
356   else
357     {
358       /* The expression is already in the table, so roll back the
359 	 obstack and use the existing table entry.  */
360       obstack_free (&expr_obstack, cur_expr);
361       cur_expr = *slot;
362     }
363 
364   /* Search for another occurrence in the same basic block.  */
365   avail_occr = cur_expr->avail_occr;
366   while (avail_occr
367 	 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
368     {
369       /* If an occurrence isn't found, save a pointer to the end of
370 	 the list.  */
371       last_occr = avail_occr;
372       avail_occr = avail_occr->next;
373     }
374 
375   if (avail_occr)
376     /* Found another instance of the expression in the same basic block.
377        Prefer this occurrence to the currently recorded one.  We want
378        the last one in the block and the block is scanned from start
379        to end.  */
380     avail_occr->insn = insn;
381   else
382     {
383       /* First occurrence of this expression in this basic block.  */
384       avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
385 						  sizeof (struct occr));
386 
387       /* First occurrence of this expression in any block?  */
388       if (cur_expr->avail_occr == NULL)
389         cur_expr->avail_occr = avail_occr;
390       else
391         last_occr->next = avail_occr;
392 
393       avail_occr->insn = insn;
394       avail_occr->next = NULL;
395       avail_occr->deleted_p = 0;
396     }
397 }
398 
399 
400 /* Lookup pattern PAT in the expression hash table.
401    The result is a pointer to the table entry, or NULL if not found.  */
402 
403 static struct expr *
404 lookup_expr_in_table (rtx pat)
405 {
406   int do_not_record_p;
407   struct expr **slot, *tmp_expr;
408   hashval_t hash = hash_expr (pat, &do_not_record_p);
409 
410   if (do_not_record_p)
411     return NULL;
412 
413   tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
414 					    sizeof (struct expr));
415   tmp_expr->expr = pat;
416   tmp_expr->hash = hash;
417   tmp_expr->avail_occr = NULL;
418 
419   slot = (struct expr **) htab_find_slot_with_hash (expr_table, tmp_expr,
420                                                     hash, INSERT);
421   obstack_free (&expr_obstack, tmp_expr);
422 
423   if (!slot)
424     return NULL;
425   else
426     return (*slot);
427 }
428 
429 
430 /* Dump all expressions and occurrences that are currently in the
431    expression hash table to FILE.  */
432 
433 /* This helper is called via htab_traverse.  */
434 static int
435 dump_hash_table_entry (void **slot, void *filep)
436 {
437   struct expr *expr = (struct expr *) *slot;
438   FILE *file = (FILE *) filep;
439   struct occr *occr;
440 
441   fprintf (file, "expr: ");
442   print_rtl (file, expr->expr);
443   fprintf (file,"\nhashcode: %u\n", expr->hash);
444   fprintf (file,"list of occurrences:\n");
445   occr = expr->avail_occr;
446   while (occr)
447     {
448       rtx insn = occr->insn;
449       print_rtl_single (file, insn);
450       fprintf (file, "\n");
451       occr = occr->next;
452     }
453   fprintf (file, "\n");
454   return 1;
455 }
456 
457 static void
458 dump_hash_table (FILE *file)
459 {
460   fprintf (file, "\n\nexpression hash table\n");
461   fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
462            (long) htab_size (expr_table),
463            (long) htab_elements (expr_table),
464            htab_collisions (expr_table));
465   if (htab_elements (expr_table) > 0)
466     {
467       fprintf (file, "\n\ntable entries:\n");
468       htab_traverse (expr_table, dump_hash_table_entry, file);
469     }
470   fprintf (file, "\n");
471 }
472 
473 /* Return true if register X is recorded as being set by an instruction
474    whose CUID is greater than the one given.  */
475 
476 static bool
477 reg_changed_after_insn_p (rtx x, int cuid)
478 {
479   unsigned int regno, end_regno;
480 
481   regno = REGNO (x);
482   end_regno = END_HARD_REGNO (x);
483   do
484     if (reg_avail_info[regno] > cuid)
485       return true;
486   while (++regno < end_regno);
487   return false;
488 }
489 
490 /* Return nonzero if the operands of expression X are unchanged
491    1) from the start of INSN's basic block up to but not including INSN
492       if AFTER_INSN is false, or
493    2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
494 
495 static bool
496 oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
497 {
498   int i, j;
499   enum rtx_code code;
500   const char *fmt;
501 
502   if (x == 0)
503     return 1;
504 
505   code = GET_CODE (x);
506   switch (code)
507     {
508     case REG:
509       /* We are called after register allocation.  */
510       gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
511       if (after_insn)
512 	return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
513       else
514 	return !reg_changed_after_insn_p (x, 0);
515 
516     case MEM:
517       if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
518 	return 0;
519       else
520 	return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
521 
522     case PC:
523     case CC0: /*FIXME*/
524     case CONST:
525     case CONST_INT:
526     case CONST_DOUBLE:
527     case CONST_FIXED:
528     case CONST_VECTOR:
529     case SYMBOL_REF:
530     case LABEL_REF:
531     case ADDR_VEC:
532     case ADDR_DIFF_VEC:
533       return 1;
534 
535     case PRE_DEC:
536     case PRE_INC:
537     case POST_DEC:
538     case POST_INC:
539     case PRE_MODIFY:
540     case POST_MODIFY:
541       if (after_insn)
542 	return 0;
543       break;
544 
545     default:
546       break;
547     }
548 
549   for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
550     {
551       if (fmt[i] == 'e')
552 	{
553 	  if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
554 	    return 0;
555 	}
556       else if (fmt[i] == 'E')
557 	for (j = 0; j < XVECLEN (x, i); j++)
558 	  if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
559 	    return 0;
560     }
561 
562   return 1;
563 }
564 
565 
566 /* Used for communication between find_mem_conflicts and
567    load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
568    conflict between two memory references.
569    This is a bit of a hack to work around the limitations of note_stores.  */
570 static int mems_conflict_p;
571 
572 /* DEST is the output of an instruction.  If it is a memory reference, and
573    possibly conflicts with the load found in DATA, then set mems_conflict_p
574    to a nonzero value.  */
575 
576 static void
577 find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
578 		    void *data)
579 {
580   rtx mem_op = (rtx) data;
581 
582   while (GET_CODE (dest) == SUBREG
583 	 || GET_CODE (dest) == ZERO_EXTRACT
584 	 || GET_CODE (dest) == STRICT_LOW_PART)
585     dest = XEXP (dest, 0);
586 
587   /* If DEST is not a MEM, then it will not conflict with the load.  Note
588      that function calls are assumed to clobber memory, but are handled
589      elsewhere.  */
590   if (! MEM_P (dest))
591     return;
592 
593   if (true_dependence (dest, GET_MODE (dest), mem_op,
594 		       rtx_addr_varies_p))
595     mems_conflict_p = 1;
596 }
597 
598 
599 /* Return nonzero if the expression in X (a memory reference) is killed
600    in the current basic block before (if AFTER_INSN is false) or after
601    (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
602 
603    This function assumes that the modifies_mem table is flushed when
604    the hash table construction or redundancy elimination phases start
605    processing a new basic block.  */
606 
607 static int
608 load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
609 {
610   struct modifies_mem *list_entry = modifies_mem_list;
611 
612   while (list_entry)
613     {
614       rtx setter = list_entry->insn;
615 
616       /* Ignore entries in the list that do not apply.  */
617       if ((after_insn
618 	   && INSN_CUID (setter) < uid_limit)
619 	  || (! after_insn
620 	      && INSN_CUID (setter) > uid_limit))
621 	{
622 	  list_entry = list_entry->next;
623 	  continue;
624 	}
625 
626       /* If SETTER is a call everything is clobbered.  Note that calls
627 	 to pure functions are never put on the list, so we need not
628 	 worry about them.  */
629       if (CALL_P (setter))
630 	return 1;
631 
632       /* SETTER must be an insn of some kind that sets memory.  Call
633 	 note_stores to examine each hunk of memory that is modified.
634 	 It will set mems_conflict_p to nonzero if there may be a
635 	 conflict between X and SETTER.  */
636       mems_conflict_p = 0;
637       note_stores (PATTERN (setter), find_mem_conflicts, x);
638       if (mems_conflict_p)
639 	return 1;
640 
641       list_entry = list_entry->next;
642     }
643   return 0;
644 }
645 
646 
647 /* Record register first/last/block set information for REGNO in INSN.  */
648 
649 static inline void
650 record_last_reg_set_info (rtx insn, rtx reg)
651 {
652   unsigned int regno, end_regno;
653 
654   regno = REGNO (reg);
655   end_regno = END_HARD_REGNO (reg);
656   do
657     reg_avail_info[regno] = INSN_CUID (insn);
658   while (++regno < end_regno);
659 }
660 
661 static inline void
662 record_last_reg_set_info_regno (rtx insn, int regno)
663 {
664   reg_avail_info[regno] = INSN_CUID (insn);
665 }
666 
667 
668 /* Record memory modification information for INSN.  We do not actually care
669    about the memory location(s) that are set, or even how they are set (consider
670    a CALL_INSN).  We merely need to record which insns modify memory.  */
671 
672 static void
673 record_last_mem_set_info (rtx insn)
674 {
675   struct modifies_mem *list_entry;
676 
677   list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
678 						      sizeof (struct modifies_mem));
679   list_entry->insn = insn;
680   list_entry->next = modifies_mem_list;
681   modifies_mem_list = list_entry;
682 }
683 
684 /* Called from compute_hash_table via note_stores to handle one
685    SET or CLOBBER in an insn.  DATA is really the instruction in which
686    the SET is taking place.  */
687 
688 static void
689 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
690 {
691   rtx last_set_insn = (rtx) data;
692 
693   if (GET_CODE (dest) == SUBREG)
694     dest = SUBREG_REG (dest);
695 
696   if (REG_P (dest))
697     record_last_reg_set_info (last_set_insn, dest);
698   else if (MEM_P (dest))
699     {
700       /* Ignore pushes, they don't clobber memory.  They may still
701 	 clobber the stack pointer though.  Some targets do argument
702 	 pushes without adding REG_INC notes.  See e.g. PR25196,
703 	 where a pushsi2 on i386 doesn't have REG_INC notes.  Note
704 	 such changes here too.  */
705       if (! push_operand (dest, GET_MODE (dest)))
706 	record_last_mem_set_info (last_set_insn);
707       else
708 	record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
709     }
710 }
711 
712 
713 /* Reset tables used to keep track of what's still available since the
714    start of the block.  */
715 
716 static void
717 reset_opr_set_tables (void)
718 {
719   memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
720   obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
721   modifies_mem_list = NULL;
722 }
723 
724 
725 /* Record things set by INSN.
726    This data is used by oprs_unchanged_p.  */
727 
728 static void
729 record_opr_changes (rtx insn)
730 {
731   rtx note;
732 
733   /* Find all stores and record them.  */
734   note_stores (PATTERN (insn), record_last_set_info, insn);
735 
736   /* Also record autoincremented REGs for this insn as changed.  */
737   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
738     if (REG_NOTE_KIND (note) == REG_INC)
739       record_last_reg_set_info (insn, XEXP (note, 0));
740 
741   /* Finally, if this is a call, record all call clobbers.  */
742   if (CALL_P (insn))
743     {
744       unsigned int regno;
745       rtx link, x;
746 
747       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
748 	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
749 	  record_last_reg_set_info_regno (insn, regno);
750 
751       for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
752 	if (GET_CODE (XEXP (link, 0)) == CLOBBER)
753 	  {
754 	    x = XEXP (XEXP (link, 0), 0);
755 	    if (REG_P (x))
756 	      {
757 		gcc_assert (HARD_REGISTER_P (x));
758 		record_last_reg_set_info (insn, x);
759 	      }
760 	  }
761 
762       if (! RTL_CONST_OR_PURE_CALL_P (insn))
763 	record_last_mem_set_info (insn);
764     }
765 }
766 
767 
768 /* Scan the pattern of INSN and add an entry to the hash TABLE.
769    After reload we are interested in loads/stores only.  */
770 
771 static void
772 hash_scan_set (rtx insn)
773 {
774   rtx pat = PATTERN (insn);
775   rtx src = SET_SRC (pat);
776   rtx dest = SET_DEST (pat);
777 
778   /* We are only interested in loads and stores.  */
779   if (! MEM_P (src) && ! MEM_P (dest))
780     return;
781 
782   /* Don't mess with jumps and nops.  */
783   if (JUMP_P (insn) || set_noop_p (pat))
784     return;
785 
786   if (REG_P (dest))
787     {
788       if (/* Don't CSE something if we can't do a reg/reg copy.  */
789 	  can_copy_p (GET_MODE (dest))
790 	  /* Is SET_SRC something we want to gcse?  */
791 	  && general_operand (src, GET_MODE (src))
792 #ifdef STACK_REGS
793 	  /* Never consider insns touching the register stack.  It may
794 	     create situations that reg-stack cannot handle (e.g. a stack
795 	     register live across an abnormal edge).  */
796 	  && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
797 #endif
798 	  /* An expression is not available if its operands are
799 	     subsequently modified, including this insn.  */
800 	  && oprs_unchanged_p (src, insn, true))
801 	{
802 	  insert_expr_in_table (src, insn);
803 	}
804     }
805   else if (REG_P (src))
806     {
807       /* Only record sets of pseudo-regs in the hash table.  */
808       if (/* Don't CSE something if we can't do a reg/reg copy.  */
809 	  can_copy_p (GET_MODE (src))
810 	  /* Is SET_DEST something we want to gcse?  */
811 	  && general_operand (dest, GET_MODE (dest))
812 #ifdef STACK_REGS
813 	  /* As above for STACK_REGS.  */
814 	  && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
815 #endif
816 	  && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
817 	  /* Check if the memory expression is killed after insn.  */
818 	  && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
819 	  && oprs_unchanged_p (XEXP (dest, 0), insn, true))
820 	{
821 	  insert_expr_in_table (dest, insn);
822 	}
823     }
824 }
825 
826 
827 /* Create hash table of memory expressions available at end of basic
828    blocks.  Basically you should think of this hash table as the
829    representation of AVAIL_OUT.  This is the set of expressions that
830    is generated in a basic block and not killed before the end of the
831    same basic block.  Notice that this is really a local computation.  */
832 
833 static void
834 compute_hash_table (void)
835 {
836   basic_block bb;
837 
838   FOR_EACH_BB (bb)
839     {
840       rtx insn;
841 
842       /* First pass over the instructions records information used to
843 	 determine when registers and memory are last set.
844 	 Since we compute a "local" AVAIL_OUT, reset the tables that
845 	 help us keep track of what has been modified since the start
846 	 of the block.  */
847       reset_opr_set_tables ();
848       FOR_BB_INSNS (bb, insn)
849 	{
850 	  if (INSN_P (insn))
851             record_opr_changes (insn);
852 	}
853 
854       /* The next pass actually builds the hash table.  */
855       FOR_BB_INSNS (bb, insn)
856 	if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
857 	  hash_scan_set (insn);
858     }
859 }
860 
861 
862 /* Check if register REG is killed in any insn waiting to be inserted on
863    edge E.  This function is required to check that our data flow analysis
864    is still valid prior to commit_edge_insertions.  */
865 
866 static bool
867 reg_killed_on_edge (rtx reg, edge e)
868 {
869   rtx insn;
870 
871   for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
872     if (INSN_P (insn) && reg_set_p (reg, insn))
873       return true;
874 
875   return false;
876 }
877 
878 /* Similar to above - check if register REG is used in any insn waiting
879    to be inserted on edge E.
880    Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
881    with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
882 
883 static bool
884 reg_used_on_edge (rtx reg, edge e)
885 {
886   rtx insn;
887 
888   for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
889     if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
890       return true;
891 
892   return false;
893 }
894 
895 /* Return the loaded/stored register of a load/store instruction.  */
896 
897 static rtx
898 get_avail_load_store_reg (rtx insn)
899 {
900   if (REG_P (SET_DEST (PATTERN (insn))))
901     /* A load.  */
902     return SET_DEST(PATTERN(insn));
903   else
904     {
905       /* A store.  */
906       gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
907       return SET_SRC (PATTERN (insn));
908     }
909 }
910 
911 /* Return nonzero if the predecessors of BB are "well behaved".  */
912 
913 static bool
914 bb_has_well_behaved_predecessors (basic_block bb)
915 {
916   edge pred;
917   edge_iterator ei;
918 
919   if (EDGE_COUNT (bb->preds) == 0)
920     return false;
921 
922   FOR_EACH_EDGE (pred, ei, bb->preds)
923     {
924       if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
925 	return false;
926 
927       if (JUMP_TABLE_DATA_P (BB_END (pred->src)))
928 	return false;
929     }
930   return true;
931 }
932 
933 
934 /* Search for the occurrences of expression in BB.  */
935 
936 static struct occr*
937 get_bb_avail_insn (basic_block bb, struct occr *occr)
938 {
939   for (; occr != NULL; occr = occr->next)
940     if (BLOCK_FOR_INSN (occr->insn) == bb)
941       return occr;
942   return NULL;
943 }
944 
945 
946 /* This handles the case where several stores feed a partially redundant
947    load. It checks if the redundancy elimination is possible and if it's
948    worth it.
949 
950    Redundancy elimination is possible if,
951    1) None of the operands of an insn have been modified since the start
952       of the current basic block.
953    2) In any predecessor of the current basic block, the same expression
954       is generated.
955 
956    See the function body for the heuristics that determine if eliminating
957    a redundancy is also worth doing, assuming it is possible.  */
958 
959 static void
960 eliminate_partially_redundant_load (basic_block bb, rtx insn,
961 				    struct expr *expr)
962 {
963   edge pred;
964   rtx avail_insn = NULL_RTX;
965   rtx avail_reg;
966   rtx dest, pat;
967   struct occr *a_occr;
968   struct unoccr *occr, *avail_occrs = NULL;
969   struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
970   int npred_ok = 0;
971   gcov_type ok_count = 0; /* Redundant load execution count.  */
972   gcov_type critical_count = 0; /* Execution count of critical edges.  */
973   edge_iterator ei;
974   bool critical_edge_split = false;
975 
976   /* The execution count of the loads to be added to make the
977      load fully redundant.  */
978   gcov_type not_ok_count = 0;
979   basic_block pred_bb;
980 
981   pat = PATTERN (insn);
982   dest = SET_DEST (pat);
983 
984   /* Check that the loaded register is not used, set, or killed from the
985      beginning of the block.  */
986   if (reg_changed_after_insn_p (dest, 0)
987       || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
988     return;
989 
990   /* Check potential for replacing load with copy for predecessors.  */
991   FOR_EACH_EDGE (pred, ei, bb->preds)
992     {
993       rtx next_pred_bb_end;
994 
995       avail_insn = NULL_RTX;
996       avail_reg = NULL_RTX;
997       pred_bb = pred->src;
998       next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
999       for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
1000 	   a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
1001 	{
1002 	  /* Check if the loaded register is not used.  */
1003 	  avail_insn = a_occr->insn;
1004 	  avail_reg = get_avail_load_store_reg (avail_insn);
1005 	  gcc_assert (avail_reg);
1006 
1007 	  /* Make sure we can generate a move from register avail_reg to
1008 	     dest.  */
1009 	  extract_insn (gen_move_insn (copy_rtx (dest),
1010 				       copy_rtx (avail_reg)));
1011 	  if (! constrain_operands (1)
1012 	      || reg_killed_on_edge (avail_reg, pred)
1013 	      || reg_used_on_edge (dest, pred))
1014 	    {
1015 	      avail_insn = NULL;
1016 	      continue;
1017 	    }
1018 	  if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1019 	    /* AVAIL_INSN remains non-null.  */
1020 	    break;
1021 	  else
1022 	    avail_insn = NULL;
1023 	}
1024 
1025       if (EDGE_CRITICAL_P (pred))
1026 	critical_count += pred->count;
1027 
1028       if (avail_insn != NULL_RTX)
1029 	{
1030 	  npred_ok++;
1031 	  ok_count += pred->count;
1032 	  if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1033 						    copy_rtx (avail_reg)))))
1034 	    {
1035 	      /* Check if there is going to be a split.  */
1036 	      if (EDGE_CRITICAL_P (pred))
1037 		critical_edge_split = true;
1038 	    }
1039 	  else /* Its a dead move no need to generate.  */
1040 	    continue;
1041 	  occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1042 						  sizeof (struct unoccr));
1043 	  occr->insn = avail_insn;
1044 	  occr->pred = pred;
1045 	  occr->next = avail_occrs;
1046 	  avail_occrs = occr;
1047 	  if (! rollback_unoccr)
1048 	    rollback_unoccr = occr;
1049 	}
1050       else
1051 	{
1052 	  /* Adding a load on a critical edge will cause a split.  */
1053 	  if (EDGE_CRITICAL_P (pred))
1054 	    critical_edge_split = true;
1055 	  not_ok_count += pred->count;
1056 	  unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1057 						    sizeof (struct unoccr));
1058 	  unoccr->insn = NULL_RTX;
1059 	  unoccr->pred = pred;
1060 	  unoccr->next = unavail_occrs;
1061 	  unavail_occrs = unoccr;
1062 	  if (! rollback_unoccr)
1063 	    rollback_unoccr = unoccr;
1064 	}
1065     }
1066 
1067   if (/* No load can be replaced by copy.  */
1068       npred_ok == 0
1069       /* Prevent exploding the code.  */
1070       || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1071       /* If we don't have profile information we cannot tell if splitting
1072          a critical edge is profitable or not so don't do it.  */
1073       || ((! profile_info || ! flag_branch_probabilities
1074 	   || targetm.cannot_modify_jumps_p ())
1075 	  && critical_edge_split))
1076     goto cleanup;
1077 
1078   /* Check if it's worth applying the partial redundancy elimination.  */
1079   if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1080     goto cleanup;
1081   if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1082     goto cleanup;
1083 
1084   /* Generate moves to the loaded register from where
1085      the memory is available.  */
1086   for (occr = avail_occrs; occr; occr = occr->next)
1087     {
1088       avail_insn = occr->insn;
1089       pred = occr->pred;
1090       /* Set avail_reg to be the register having the value of the
1091 	 memory.  */
1092       avail_reg = get_avail_load_store_reg (avail_insn);
1093       gcc_assert (avail_reg);
1094 
1095       insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1096 					  copy_rtx (avail_reg)),
1097 			   pred);
1098       stats.moves_inserted++;
1099 
1100       if (dump_file)
1101 	fprintf (dump_file,
1102 		 "generating move from %d to %d on edge from %d to %d\n",
1103 		 REGNO (avail_reg),
1104 		 REGNO (dest),
1105 		 pred->src->index,
1106 		 pred->dest->index);
1107     }
1108 
1109   /* Regenerate loads where the memory is unavailable.  */
1110   for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1111     {
1112       pred = unoccr->pred;
1113       insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1114       stats.copies_inserted++;
1115 
1116       if (dump_file)
1117 	{
1118 	  fprintf (dump_file,
1119 		   "generating on edge from %d to %d a copy of load: ",
1120 		   pred->src->index,
1121 		   pred->dest->index);
1122 	  print_rtl (dump_file, PATTERN (insn));
1123 	  fprintf (dump_file, "\n");
1124 	}
1125     }
1126 
1127   /* Delete the insn if it is not available in this block and mark it
1128      for deletion if it is available. If insn is available it may help
1129      discover additional redundancies, so mark it for later deletion.  */
1130   for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
1131        a_occr && (a_occr->insn != insn);
1132        a_occr = get_bb_avail_insn (bb, a_occr->next));
1133 
1134   if (!a_occr)
1135     {
1136       stats.insns_deleted++;
1137 
1138       if (dump_file)
1139 	{
1140 	  fprintf (dump_file, "deleting insn:\n");
1141           print_rtl_single (dump_file, insn);
1142           fprintf (dump_file, "\n");
1143 	}
1144       delete_insn (insn);
1145     }
1146   else
1147     a_occr->deleted_p = 1;
1148 
1149 cleanup:
1150   if (rollback_unoccr)
1151     obstack_free (&unoccr_obstack, rollback_unoccr);
1152 }
1153 
1154 /* Performing the redundancy elimination as described before.  */
1155 
1156 static void
1157 eliminate_partially_redundant_loads (void)
1158 {
1159   rtx insn;
1160   basic_block bb;
1161 
1162   /* Note we start at block 1.  */
1163 
1164   if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1165     return;
1166 
1167   FOR_BB_BETWEEN (bb,
1168 		  ENTRY_BLOCK_PTR->next_bb->next_bb,
1169 		  EXIT_BLOCK_PTR,
1170 		  next_bb)
1171     {
1172       /* Don't try anything on basic blocks with strange predecessors.  */
1173       if (! bb_has_well_behaved_predecessors (bb))
1174 	continue;
1175 
1176       /* Do not try anything on cold basic blocks.  */
1177       if (optimize_bb_for_size_p (bb))
1178 	continue;
1179 
1180       /* Reset the table of things changed since the start of the current
1181 	 basic block.  */
1182       reset_opr_set_tables ();
1183 
1184       /* Look at all insns in the current basic block and see if there are
1185 	 any loads in it that we can record.  */
1186       FOR_BB_INSNS (bb, insn)
1187 	{
1188 	  /* Is it a load - of the form (set (reg) (mem))?  */
1189 	  if (NONJUMP_INSN_P (insn)
1190               && GET_CODE (PATTERN (insn)) == SET
1191 	      && REG_P (SET_DEST (PATTERN (insn)))
1192 	      && MEM_P (SET_SRC (PATTERN (insn))))
1193 	    {
1194 	      rtx pat = PATTERN (insn);
1195 	      rtx src = SET_SRC (pat);
1196 	      struct expr *expr;
1197 
1198 	      if (!MEM_VOLATILE_P (src)
1199 		  && GET_MODE (src) != BLKmode
1200 		  && general_operand (src, GET_MODE (src))
1201 		  /* Are the operands unchanged since the start of the
1202 		     block?  */
1203 		  && oprs_unchanged_p (src, insn, false)
1204 		  && !(flag_non_call_exceptions && may_trap_p (src))
1205 		  && !side_effects_p (src)
1206 		  /* Is the expression recorded?  */
1207 		  && (expr = lookup_expr_in_table (src)) != NULL)
1208 		{
1209 		  /* We now have a load (insn) and an available memory at
1210 		     its BB start (expr). Try to remove the loads if it is
1211 		     redundant.  */
1212 		  eliminate_partially_redundant_load (bb, insn, expr);
1213 		}
1214 	    }
1215 
1216 	  /* Keep track of everything modified by this insn, so that we
1217 	     know what has been modified since the start of the current
1218 	     basic block.  */
1219 	  if (INSN_P (insn))
1220 	    record_opr_changes (insn);
1221 	}
1222     }
1223 
1224   commit_edge_insertions ();
1225 }
1226 
1227 /* Go over the expression hash table and delete insns that were
1228    marked for later deletion.  */
1229 
1230 /* This helper is called via htab_traverse.  */
1231 static int
1232 delete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED)
1233 {
1234   struct expr *expr = (struct expr *) *slot;
1235   struct occr *occr;
1236 
1237   for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1238     {
1239       if (occr->deleted_p && dbg_cnt (gcse2_delete))
1240 	{
1241 	  delete_insn (occr->insn);
1242 	  stats.insns_deleted++;
1243 
1244 	  if (dump_file)
1245 	    {
1246 	      fprintf (dump_file, "deleting insn:\n");
1247 	      print_rtl_single (dump_file, occr->insn);
1248 	      fprintf (dump_file, "\n");
1249 	    }
1250 	}
1251     }
1252 
1253   return 1;
1254 }
1255 
1256 static void
1257 delete_redundant_insns (void)
1258 {
1259   htab_traverse (expr_table, delete_redundant_insns_1, NULL);
1260   if (dump_file)
1261     fprintf (dump_file, "\n");
1262 }
1263 
1264 /* Main entry point of the GCSE after reload - clean some redundant loads
1265    due to spilling.  */
1266 
1267 static void
1268 gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1269 {
1270 
1271   memset (&stats, 0, sizeof (stats));
1272 
1273   /* Allocate memory for this pass.
1274      Also computes and initializes the insns' CUIDs.  */
1275   alloc_mem ();
1276 
1277   /* We need alias analysis.  */
1278   init_alias_analysis ();
1279 
1280   compute_hash_table ();
1281 
1282   if (dump_file)
1283     dump_hash_table (dump_file);
1284 
1285   if (htab_elements (expr_table) > 0)
1286     {
1287       eliminate_partially_redundant_loads ();
1288       delete_redundant_insns ();
1289 
1290       if (dump_file)
1291 	{
1292 	  fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1293 	  fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1294 	  fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1295 	  fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1296 	  fprintf (dump_file, "\n\n");
1297 	}
1298     }
1299 
1300   /* We are finished with alias.  */
1301   end_alias_analysis ();
1302 
1303   free_mem ();
1304 }
1305 
1306 
1307 static bool
1308 gate_handle_gcse2 (void)
1309 {
1310   return (optimize > 0 && flag_gcse_after_reload
1311 	  && optimize_function_for_speed_p (cfun));
1312 }
1313 
1314 
1315 static unsigned int
1316 rest_of_handle_gcse2 (void)
1317 {
1318   gcse_after_reload_main (get_insns ());
1319   rebuild_jump_labels (get_insns ());
1320   return 0;
1321 }
1322 
1323 struct rtl_opt_pass pass_gcse2 =
1324 {
1325  {
1326   RTL_PASS,
1327   "gcse2",                              /* name */
1328   gate_handle_gcse2,                    /* gate */
1329   rest_of_handle_gcse2,                 /* execute */
1330   NULL,                                 /* sub */
1331   NULL,                                 /* next */
1332   0,                                    /* static_pass_number */
1333   TV_GCSE_AFTER_RELOAD,                 /* tv_id */
1334   0,                                    /* properties_required */
1335   0,                                    /* properties_provided */
1336   0,                                    /* properties_destroyed */
1337   0,                                    /* todo_flags_start */
1338   TODO_dump_func | TODO_verify_rtl_sharing
1339   | TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
1340  }
1341 };
1342 
1343