xref: /openbsd-src/gnu/gcc/gcc/jump.c (revision 404b540a9034ac75a6199ad1a32d1bbc7a0d4210)
1 /* Optimize jump instructions, for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3    1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24    of the compiler.  Now it contains basically a set of utility functions to
25    operate with jumps.
26 
27    Each CODE_LABEL has a count of the times it is used
28    stored in the LABEL_NUSES internal field, and each JUMP_INSN
29    has one label that it refers to stored in the
30    JUMP_LABEL internal field.  With this we can detect labels that
31    become unused because of the deletion of all the jumps that
32    formerly used them.  The JUMP_LABEL info is sometimes looked
33    at by later passes.
34 
35    The subroutines redirect_jump and invert_jump are used
36    from other passes as well.  */
37 
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "rtl.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "hard-reg-set.h"
46 #include "regs.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
49 #include "recog.h"
50 #include "function.h"
51 #include "expr.h"
52 #include "real.h"
53 #include "except.h"
54 #include "diagnostic.h"
55 #include "toplev.h"
56 #include "reload.h"
57 #include "predict.h"
58 #include "timevar.h"
59 #include "tree-pass.h"
60 #include "target.h"
61 
62 /* Optimize jump y; x: ... y: jumpif... x?
63    Don't know if it is worth bothering with.  */
64 /* Optimize two cases of conditional jump to conditional jump?
65    This can never delete any instruction or make anything dead,
66    or even change what is live at any point.
67    So perhaps let combiner do it.  */
68 
69 static void init_label_info (rtx);
70 static void mark_all_labels (rtx);
71 static void delete_computation (rtx);
72 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
73 static int invert_exp_1 (rtx, rtx);
74 static int returnjump_p_1 (rtx *, void *);
75 static void delete_prior_computation (rtx, rtx);
76 
77 /* Alternate entry into the jump optimizer.  This entry point only rebuilds
78    the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79    instructions.  */
80 void
rebuild_jump_labels(rtx f)81 rebuild_jump_labels (rtx f)
82 {
83   rtx insn;
84 
85   timevar_push (TV_REBUILD_JUMP);
86   init_label_info (f);
87   mark_all_labels (f);
88 
89   /* Keep track of labels used from static data; we don't track them
90      closely enough to delete them here, so make sure their reference
91      count doesn't drop to zero.  */
92 
93   for (insn = forced_labels; insn; insn = XEXP (insn, 1))
94     if (LABEL_P (XEXP (insn, 0)))
95       LABEL_NUSES (XEXP (insn, 0))++;
96   timevar_pop (TV_REBUILD_JUMP);
97 }
98 
99 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100    non-fallthru insn.  This is not generally true, as multiple barriers
101    may have crept in, or the BARRIER may be separated from the last
102    real insn by one or more NOTEs.
103 
104    This simple pass moves barriers and removes duplicates so that the
105    old code is happy.
106  */
107 unsigned int
cleanup_barriers(void)108 cleanup_barriers (void)
109 {
110   rtx insn, next, prev;
111   for (insn = get_insns (); insn; insn = next)
112     {
113       next = NEXT_INSN (insn);
114       if (BARRIER_P (insn))
115 	{
116 	  prev = prev_nonnote_insn (insn);
117 	  if (BARRIER_P (prev))
118 	    delete_insn (insn);
119 	  else if (prev != PREV_INSN (insn))
120 	    reorder_insns (insn, insn, prev);
121 	}
122     }
123   return 0;
124 }
125 
126 struct tree_opt_pass pass_cleanup_barriers =
127 {
128   "barriers",                           /* name */
129   NULL,                                 /* gate */
130   cleanup_barriers,                     /* execute */
131   NULL,                                 /* sub */
132   NULL,                                 /* next */
133   0,                                    /* static_pass_number */
134   0,                                    /* tv_id */
135   0,                                    /* properties_required */
136   0,                                    /* properties_provided */
137   0,                                    /* properties_destroyed */
138   0,                                    /* todo_flags_start */
139   TODO_dump_func,                       /* todo_flags_finish */
140   0                                     /* letter */
141 };
142 
143 unsigned int
purge_line_number_notes(void)144 purge_line_number_notes (void)
145 {
146   rtx last_note = 0;
147   rtx insn;
148   /* Delete extraneous line number notes.
149      Note that two consecutive notes for different lines are not really
150      extraneous.  There should be some indication where that line belonged,
151      even if it became empty.  */
152 
153   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
154     if (NOTE_P (insn))
155       {
156 	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
157 	  /* Any previous line note was for the prologue; gdb wants a new
158 	     note after the prologue even if it is for the same line.  */
159 	  last_note = NULL_RTX;
160 	else if (NOTE_LINE_NUMBER (insn) >= 0)
161 	  {
162 	    /* Delete this note if it is identical to previous note.  */
163 	    if (last_note
164 #ifdef USE_MAPPED_LOCATION
165 		&& NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last_note)
166 #else
167 		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
168 		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note)
169 #endif
170 )
171 	      {
172 		delete_related_insns (insn);
173 		continue;
174 	      }
175 
176 	    last_note = insn;
177 	  }
178       }
179   return 0;
180 }
181 
182 struct tree_opt_pass pass_purge_lineno_notes =
183 {
184   "elnotes",                            /* name */
185   NULL,                                 /* gate */
186   purge_line_number_notes,              /* execute */
187   NULL,                                 /* sub */
188   NULL,                                 /* next */
189   0,                                    /* static_pass_number */
190   0,                                    /* tv_id */
191   0,                                    /* properties_required */
192   0,                                    /* properties_provided */
193   0,                                    /* properties_destroyed */
194   0,                                    /* todo_flags_start */
195   TODO_dump_func,                       /* todo_flags_finish */
196   0                                     /* letter */
197 };
198 
199 
200 /* Initialize LABEL_NUSES and JUMP_LABEL fields.  Delete any REG_LABEL
201    notes whose labels don't occur in the insn any more.  Returns the
202    largest INSN_UID found.  */
203 static void
init_label_info(rtx f)204 init_label_info (rtx f)
205 {
206   rtx insn;
207 
208   for (insn = f; insn; insn = NEXT_INSN (insn))
209     if (LABEL_P (insn))
210       LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
211     else if (JUMP_P (insn))
212       JUMP_LABEL (insn) = 0;
213     else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
214       {
215 	rtx note, next;
216 
217 	for (note = REG_NOTES (insn); note; note = next)
218 	  {
219 	    next = XEXP (note, 1);
220 	    if (REG_NOTE_KIND (note) == REG_LABEL
221 		&& ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
222 	      remove_note (insn, note);
223 	  }
224       }
225 }
226 
227 /* Mark the label each jump jumps to.
228    Combine consecutive labels, and count uses of labels.  */
229 
230 static void
mark_all_labels(rtx f)231 mark_all_labels (rtx f)
232 {
233   rtx insn;
234 
235   for (insn = f; insn; insn = NEXT_INSN (insn))
236     if (INSN_P (insn))
237       {
238 	mark_jump_label (PATTERN (insn), insn, 0);
239 	if (! INSN_DELETED_P (insn) && JUMP_P (insn))
240 	  {
241 	    /* When we know the LABEL_REF contained in a REG used in
242 	       an indirect jump, we'll have a REG_LABEL note so that
243 	       flow can tell where it's going.  */
244 	    if (JUMP_LABEL (insn) == 0)
245 	      {
246 		rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
247 		if (label_note)
248 		  {
249 		    /* But a LABEL_REF around the REG_LABEL note, so
250 		       that we can canonicalize it.  */
251 		    rtx label_ref = gen_rtx_LABEL_REF (Pmode,
252 						       XEXP (label_note, 0));
253 
254 		    mark_jump_label (label_ref, insn, 0);
255 		    XEXP (label_note, 0) = XEXP (label_ref, 0);
256 		    JUMP_LABEL (insn) = XEXP (label_note, 0);
257 		  }
258 	      }
259 	  }
260       }
261 }
262 
263 /* Move all block-beg, block-end and loop-beg notes between START and END out
264    before START.  START and END may be such notes.  Returns the values of the
265    new starting and ending insns, which may be different if the original ones
266    were such notes.  Return true if there were only such notes and no real
267    instructions.  */
268 
269 bool
squeeze_notes(rtx * startp,rtx * endp)270 squeeze_notes (rtx* startp, rtx* endp)
271 {
272   rtx start = *startp;
273   rtx end = *endp;
274 
275   rtx insn;
276   rtx next;
277   rtx last = NULL;
278   rtx past_end = NEXT_INSN (end);
279 
280   for (insn = start; insn != past_end; insn = next)
281     {
282       next = NEXT_INSN (insn);
283       if (NOTE_P (insn)
284 	  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
285 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
286 	{
287 	  /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass.  */
288 	  gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
289 		      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
290 
291 	  if (insn == start)
292 	    start = next;
293 	  else
294 	    {
295 	      rtx prev = PREV_INSN (insn);
296 	      PREV_INSN (insn) = PREV_INSN (start);
297 	      NEXT_INSN (insn) = start;
298 	      NEXT_INSN (PREV_INSN (insn)) = insn;
299 	      PREV_INSN (NEXT_INSN (insn)) = insn;
300 	      NEXT_INSN (prev) = next;
301 	      PREV_INSN (next) = prev;
302 	    }
303 	}
304       else
305 	last = insn;
306     }
307 
308   /* There were no real instructions.  */
309   if (start == past_end)
310     return true;
311 
312   end = last;
313 
314   *startp = start;
315   *endp = end;
316   return false;
317 }
318 
319 /* Return the label before INSN, or put a new label there.  */
320 
321 rtx
get_label_before(rtx insn)322 get_label_before (rtx insn)
323 {
324   rtx label;
325 
326   /* Find an existing label at this point
327      or make a new one if there is none.  */
328   label = prev_nonnote_insn (insn);
329 
330   if (label == 0 || !LABEL_P (label))
331     {
332       rtx prev = PREV_INSN (insn);
333 
334       label = gen_label_rtx ();
335       emit_label_after (label, prev);
336       LABEL_NUSES (label) = 0;
337     }
338   return label;
339 }
340 
341 /* Return the label after INSN, or put a new label there.  */
342 
343 rtx
get_label_after(rtx insn)344 get_label_after (rtx insn)
345 {
346   rtx label;
347 
348   /* Find an existing label at this point
349      or make a new one if there is none.  */
350   label = next_nonnote_insn (insn);
351 
352   if (label == 0 || !LABEL_P (label))
353     {
354       label = gen_label_rtx ();
355       emit_label_after (label, insn);
356       LABEL_NUSES (label) = 0;
357     }
358   return label;
359 }
360 
361 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
362    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
363    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
364    know whether it's source is floating point or integer comparison.  Machine
365    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
366    to help this function avoid overhead in these cases.  */
367 enum rtx_code
reversed_comparison_code_parts(enum rtx_code code,rtx arg0,rtx arg1,rtx insn)368 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
369 {
370   enum machine_mode mode;
371 
372   /* If this is not actually a comparison, we can't reverse it.  */
373   if (GET_RTX_CLASS (code) != RTX_COMPARE
374       && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
375     return UNKNOWN;
376 
377   mode = GET_MODE (arg0);
378   if (mode == VOIDmode)
379     mode = GET_MODE (arg1);
380 
381   /* First see if machine description supplies us way to reverse the
382      comparison.  Give it priority over everything else to allow
383      machine description to do tricks.  */
384   if (GET_MODE_CLASS (mode) == MODE_CC
385       && REVERSIBLE_CC_MODE (mode))
386     {
387 #ifdef REVERSE_CONDITION
388       return REVERSE_CONDITION (code, mode);
389 #endif
390       return reverse_condition (code);
391     }
392 
393   /* Try a few special cases based on the comparison code.  */
394   switch (code)
395     {
396     case GEU:
397     case GTU:
398     case LEU:
399     case LTU:
400     case NE:
401     case EQ:
402       /* It is always safe to reverse EQ and NE, even for the floating
403 	 point.  Similarly the unsigned comparisons are never used for
404 	 floating point so we can reverse them in the default way.  */
405       return reverse_condition (code);
406     case ORDERED:
407     case UNORDERED:
408     case LTGT:
409     case UNEQ:
410       /* In case we already see unordered comparison, we can be sure to
411 	 be dealing with floating point so we don't need any more tests.  */
412       return reverse_condition_maybe_unordered (code);
413     case UNLT:
414     case UNLE:
415     case UNGT:
416     case UNGE:
417       /* We don't have safe way to reverse these yet.  */
418       return UNKNOWN;
419     default:
420       break;
421     }
422 
423   if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
424     {
425       rtx prev;
426       /* Try to search for the comparison to determine the real mode.
427          This code is expensive, but with sane machine description it
428          will be never used, since REVERSIBLE_CC_MODE will return true
429          in all cases.  */
430       if (! insn)
431 	return UNKNOWN;
432 
433       for (prev = prev_nonnote_insn (insn);
434 	   prev != 0 && !LABEL_P (prev);
435 	   prev = prev_nonnote_insn (prev))
436 	{
437 	  rtx set = set_of (arg0, prev);
438 	  if (set && GET_CODE (set) == SET
439 	      && rtx_equal_p (SET_DEST (set), arg0))
440 	    {
441 	      rtx src = SET_SRC (set);
442 
443 	      if (GET_CODE (src) == COMPARE)
444 		{
445 		  rtx comparison = src;
446 		  arg0 = XEXP (src, 0);
447 		  mode = GET_MODE (arg0);
448 		  if (mode == VOIDmode)
449 		    mode = GET_MODE (XEXP (comparison, 1));
450 		  break;
451 		}
452 	      /* We can get past reg-reg moves.  This may be useful for model
453 	         of i387 comparisons that first move flag registers around.  */
454 	      if (REG_P (src))
455 		{
456 		  arg0 = src;
457 		  continue;
458 		}
459 	    }
460 	  /* If register is clobbered in some ununderstandable way,
461 	     give up.  */
462 	  if (set)
463 	    return UNKNOWN;
464 	}
465     }
466 
467   /* Test for an integer condition, or a floating-point comparison
468      in which NaNs can be ignored.  */
469   if (GET_CODE (arg0) == CONST_INT
470       || (GET_MODE (arg0) != VOIDmode
471 	  && GET_MODE_CLASS (mode) != MODE_CC
472 	  && !HONOR_NANS (mode)))
473     return reverse_condition (code);
474 
475   return UNKNOWN;
476 }
477 
478 /* A wrapper around the previous function to take COMPARISON as rtx
479    expression.  This simplifies many callers.  */
480 enum rtx_code
reversed_comparison_code(rtx comparison,rtx insn)481 reversed_comparison_code (rtx comparison, rtx insn)
482 {
483   if (!COMPARISON_P (comparison))
484     return UNKNOWN;
485   return reversed_comparison_code_parts (GET_CODE (comparison),
486 					 XEXP (comparison, 0),
487 					 XEXP (comparison, 1), insn);
488 }
489 
490 /* Return comparison with reversed code of EXP.
491    Return NULL_RTX in case we fail to do the reversal.  */
492 rtx
reversed_comparison(rtx exp,enum machine_mode mode)493 reversed_comparison (rtx exp, enum machine_mode mode)
494 {
495   enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
496   if (reversed_code == UNKNOWN)
497     return NULL_RTX;
498   else
499     return simplify_gen_relational (reversed_code, mode, VOIDmode,
500                                     XEXP (exp, 0), XEXP (exp, 1));
501 }
502 
503 
504 /* Given an rtx-code for a comparison, return the code for the negated
505    comparison.  If no such code exists, return UNKNOWN.
506 
507    WATCH OUT!  reverse_condition is not safe to use on a jump that might
508    be acting on the results of an IEEE floating point comparison, because
509    of the special treatment of non-signaling nans in comparisons.
510    Use reversed_comparison_code instead.  */
511 
512 enum rtx_code
reverse_condition(enum rtx_code code)513 reverse_condition (enum rtx_code code)
514 {
515   switch (code)
516     {
517     case EQ:
518       return NE;
519     case NE:
520       return EQ;
521     case GT:
522       return LE;
523     case GE:
524       return LT;
525     case LT:
526       return GE;
527     case LE:
528       return GT;
529     case GTU:
530       return LEU;
531     case GEU:
532       return LTU;
533     case LTU:
534       return GEU;
535     case LEU:
536       return GTU;
537     case UNORDERED:
538       return ORDERED;
539     case ORDERED:
540       return UNORDERED;
541 
542     case UNLT:
543     case UNLE:
544     case UNGT:
545     case UNGE:
546     case UNEQ:
547     case LTGT:
548       return UNKNOWN;
549 
550     default:
551       gcc_unreachable ();
552     }
553 }
554 
555 /* Similar, but we're allowed to generate unordered comparisons, which
556    makes it safe for IEEE floating-point.  Of course, we have to recognize
557    that the target will support them too...  */
558 
559 enum rtx_code
reverse_condition_maybe_unordered(enum rtx_code code)560 reverse_condition_maybe_unordered (enum rtx_code code)
561 {
562   switch (code)
563     {
564     case EQ:
565       return NE;
566     case NE:
567       return EQ;
568     case GT:
569       return UNLE;
570     case GE:
571       return UNLT;
572     case LT:
573       return UNGE;
574     case LE:
575       return UNGT;
576     case LTGT:
577       return UNEQ;
578     case UNORDERED:
579       return ORDERED;
580     case ORDERED:
581       return UNORDERED;
582     case UNLT:
583       return GE;
584     case UNLE:
585       return GT;
586     case UNGT:
587       return LE;
588     case UNGE:
589       return LT;
590     case UNEQ:
591       return LTGT;
592 
593     default:
594       gcc_unreachable ();
595     }
596 }
597 
598 /* Similar, but return the code when two operands of a comparison are swapped.
599    This IS safe for IEEE floating-point.  */
600 
601 enum rtx_code
swap_condition(enum rtx_code code)602 swap_condition (enum rtx_code code)
603 {
604   switch (code)
605     {
606     case EQ:
607     case NE:
608     case UNORDERED:
609     case ORDERED:
610     case UNEQ:
611     case LTGT:
612       return code;
613 
614     case GT:
615       return LT;
616     case GE:
617       return LE;
618     case LT:
619       return GT;
620     case LE:
621       return GE;
622     case GTU:
623       return LTU;
624     case GEU:
625       return LEU;
626     case LTU:
627       return GTU;
628     case LEU:
629       return GEU;
630     case UNLT:
631       return UNGT;
632     case UNLE:
633       return UNGE;
634     case UNGT:
635       return UNLT;
636     case UNGE:
637       return UNLE;
638 
639     default:
640       gcc_unreachable ();
641     }
642 }
643 
644 /* Given a comparison CODE, return the corresponding unsigned comparison.
645    If CODE is an equality comparison or already an unsigned comparison,
646    CODE is returned.  */
647 
648 enum rtx_code
unsigned_condition(enum rtx_code code)649 unsigned_condition (enum rtx_code code)
650 {
651   switch (code)
652     {
653     case EQ:
654     case NE:
655     case GTU:
656     case GEU:
657     case LTU:
658     case LEU:
659       return code;
660 
661     case GT:
662       return GTU;
663     case GE:
664       return GEU;
665     case LT:
666       return LTU;
667     case LE:
668       return LEU;
669 
670     default:
671       gcc_unreachable ();
672     }
673 }
674 
675 /* Similarly, return the signed version of a comparison.  */
676 
677 enum rtx_code
signed_condition(enum rtx_code code)678 signed_condition (enum rtx_code code)
679 {
680   switch (code)
681     {
682     case EQ:
683     case NE:
684     case GT:
685     case GE:
686     case LT:
687     case LE:
688       return code;
689 
690     case GTU:
691       return GT;
692     case GEU:
693       return GE;
694     case LTU:
695       return LT;
696     case LEU:
697       return LE;
698 
699     default:
700       gcc_unreachable ();
701     }
702 }
703 
704 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
705    truth of CODE1 implies the truth of CODE2.  */
706 
707 int
comparison_dominates_p(enum rtx_code code1,enum rtx_code code2)708 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
709 {
710   /* UNKNOWN comparison codes can happen as a result of trying to revert
711      comparison codes.
712      They can't match anything, so we have to reject them here.  */
713   if (code1 == UNKNOWN || code2 == UNKNOWN)
714     return 0;
715 
716   if (code1 == code2)
717     return 1;
718 
719   switch (code1)
720     {
721     case UNEQ:
722       if (code2 == UNLE || code2 == UNGE)
723 	return 1;
724       break;
725 
726     case EQ:
727       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
728 	  || code2 == ORDERED)
729 	return 1;
730       break;
731 
732     case UNLT:
733       if (code2 == UNLE || code2 == NE)
734 	return 1;
735       break;
736 
737     case LT:
738       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
739 	return 1;
740       break;
741 
742     case UNGT:
743       if (code2 == UNGE || code2 == NE)
744 	return 1;
745       break;
746 
747     case GT:
748       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
749 	return 1;
750       break;
751 
752     case GE:
753     case LE:
754       if (code2 == ORDERED)
755 	return 1;
756       break;
757 
758     case LTGT:
759       if (code2 == NE || code2 == ORDERED)
760 	return 1;
761       break;
762 
763     case LTU:
764       if (code2 == LEU || code2 == NE)
765 	return 1;
766       break;
767 
768     case GTU:
769       if (code2 == GEU || code2 == NE)
770 	return 1;
771       break;
772 
773     case UNORDERED:
774       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
775 	  || code2 == UNGE || code2 == UNGT)
776 	return 1;
777       break;
778 
779     default:
780       break;
781     }
782 
783   return 0;
784 }
785 
786 /* Return 1 if INSN is an unconditional jump and nothing else.  */
787 
788 int
simplejump_p(rtx insn)789 simplejump_p (rtx insn)
790 {
791   return (JUMP_P (insn)
792 	  && GET_CODE (PATTERN (insn)) == SET
793 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
794 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
795 }
796 
797 /* Return nonzero if INSN is a (possibly) conditional jump
798    and nothing more.
799 
800    Use of this function is deprecated, since we need to support combined
801    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
802 
803 int
condjump_p(rtx insn)804 condjump_p (rtx insn)
805 {
806   rtx x = PATTERN (insn);
807 
808   if (GET_CODE (x) != SET
809       || GET_CODE (SET_DEST (x)) != PC)
810     return 0;
811 
812   x = SET_SRC (x);
813   if (GET_CODE (x) == LABEL_REF)
814     return 1;
815   else
816     return (GET_CODE (x) == IF_THEN_ELSE
817 	    && ((GET_CODE (XEXP (x, 2)) == PC
818 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
819 		     || GET_CODE (XEXP (x, 1)) == RETURN))
820 		|| (GET_CODE (XEXP (x, 1)) == PC
821 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
822 			|| GET_CODE (XEXP (x, 2)) == RETURN))));
823 }
824 
825 /* Return nonzero if INSN is a (possibly) conditional jump inside a
826    PARALLEL.
827 
828    Use this function is deprecated, since we need to support combined
829    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
830 
831 int
condjump_in_parallel_p(rtx insn)832 condjump_in_parallel_p (rtx insn)
833 {
834   rtx x = PATTERN (insn);
835 
836   if (GET_CODE (x) != PARALLEL)
837     return 0;
838   else
839     x = XVECEXP (x, 0, 0);
840 
841   if (GET_CODE (x) != SET)
842     return 0;
843   if (GET_CODE (SET_DEST (x)) != PC)
844     return 0;
845   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
846     return 1;
847   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
848     return 0;
849   if (XEXP (SET_SRC (x), 2) == pc_rtx
850       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
851 	  || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
852     return 1;
853   if (XEXP (SET_SRC (x), 1) == pc_rtx
854       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
855 	  || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
856     return 1;
857   return 0;
858 }
859 
860 /* Return set of PC, otherwise NULL.  */
861 
862 rtx
pc_set(rtx insn)863 pc_set (rtx insn)
864 {
865   rtx pat;
866   if (!JUMP_P (insn))
867     return NULL_RTX;
868   pat = PATTERN (insn);
869 
870   /* The set is allowed to appear either as the insn pattern or
871      the first set in a PARALLEL.  */
872   if (GET_CODE (pat) == PARALLEL)
873     pat = XVECEXP (pat, 0, 0);
874   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
875     return pat;
876 
877   return NULL_RTX;
878 }
879 
880 /* Return true when insn is an unconditional direct jump,
881    possibly bundled inside a PARALLEL.  */
882 
883 int
any_uncondjump_p(rtx insn)884 any_uncondjump_p (rtx insn)
885 {
886   rtx x = pc_set (insn);
887   if (!x)
888     return 0;
889   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
890     return 0;
891   if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
892     return 0;
893   return 1;
894 }
895 
896 /* Return true when insn is a conditional jump.  This function works for
897    instructions containing PC sets in PARALLELs.  The instruction may have
898    various other effects so before removing the jump you must verify
899    onlyjump_p.
900 
901    Note that unlike condjump_p it returns false for unconditional jumps.  */
902 
903 int
any_condjump_p(rtx insn)904 any_condjump_p (rtx insn)
905 {
906   rtx x = pc_set (insn);
907   enum rtx_code a, b;
908 
909   if (!x)
910     return 0;
911   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
912     return 0;
913 
914   a = GET_CODE (XEXP (SET_SRC (x), 1));
915   b = GET_CODE (XEXP (SET_SRC (x), 2));
916 
917   return ((b == PC && (a == LABEL_REF || a == RETURN))
918 	  || (a == PC && (b == LABEL_REF || b == RETURN)));
919 }
920 
921 /* Return the label of a conditional jump.  */
922 
923 rtx
condjump_label(rtx insn)924 condjump_label (rtx insn)
925 {
926   rtx x = pc_set (insn);
927 
928   if (!x)
929     return NULL_RTX;
930   x = SET_SRC (x);
931   if (GET_CODE (x) == LABEL_REF)
932     return x;
933   if (GET_CODE (x) != IF_THEN_ELSE)
934     return NULL_RTX;
935   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
936     return XEXP (x, 1);
937   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
938     return XEXP (x, 2);
939   return NULL_RTX;
940 }
941 
942 /* Return true if INSN is a (possibly conditional) return insn.  */
943 
944 static int
returnjump_p_1(rtx * loc,void * data ATTRIBUTE_UNUSED)945 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
946 {
947   rtx x = *loc;
948 
949   return x && (GET_CODE (x) == RETURN
950 	       || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
951 }
952 
953 int
returnjump_p(rtx insn)954 returnjump_p (rtx insn)
955 {
956   if (!JUMP_P (insn))
957     return 0;
958   return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
959 }
960 
961 /* Return true if INSN is a jump that only transfers control and
962    nothing more.  */
963 
964 int
onlyjump_p(rtx insn)965 onlyjump_p (rtx insn)
966 {
967   rtx set;
968 
969   if (!JUMP_P (insn))
970     return 0;
971 
972   set = single_set (insn);
973   if (set == NULL)
974     return 0;
975   if (GET_CODE (SET_DEST (set)) != PC)
976     return 0;
977   if (side_effects_p (SET_SRC (set)))
978     return 0;
979 
980   return 1;
981 }
982 
983 #ifdef HAVE_cc0
984 
985 /* Return nonzero if X is an RTX that only sets the condition codes
986    and has no side effects.  */
987 
988 int
only_sets_cc0_p(rtx x)989 only_sets_cc0_p (rtx x)
990 {
991   if (! x)
992     return 0;
993 
994   if (INSN_P (x))
995     x = PATTERN (x);
996 
997   return sets_cc0_p (x) == 1 && ! side_effects_p (x);
998 }
999 
1000 /* Return 1 if X is an RTX that does nothing but set the condition codes
1001    and CLOBBER or USE registers.
1002    Return -1 if X does explicitly set the condition codes,
1003    but also does other things.  */
1004 
1005 int
sets_cc0_p(rtx x)1006 sets_cc0_p (rtx x)
1007 {
1008   if (! x)
1009     return 0;
1010 
1011   if (INSN_P (x))
1012     x = PATTERN (x);
1013 
1014   if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1015     return 1;
1016   if (GET_CODE (x) == PARALLEL)
1017     {
1018       int i;
1019       int sets_cc0 = 0;
1020       int other_things = 0;
1021       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1022 	{
1023 	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1024 	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1025 	    sets_cc0 = 1;
1026 	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1027 	    other_things = 1;
1028 	}
1029       return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1030     }
1031   return 0;
1032 }
1033 #endif
1034 
1035 /* Follow any unconditional jump at LABEL;
1036    return the ultimate label reached by any such chain of jumps.
1037    Return null if the chain ultimately leads to a return instruction.
1038    If LABEL is not followed by a jump, return LABEL.
1039    If the chain loops or we can't find end, return LABEL,
1040    since that tells caller to avoid changing the insn.
1041 
1042    If RELOAD_COMPLETED is 0, we do not chain across a USE or CLOBBER.  */
1043 
1044 rtx
follow_jumps(rtx label)1045 follow_jumps (rtx label)
1046 {
1047   rtx insn;
1048   rtx next;
1049   rtx value = label;
1050   int depth;
1051 
1052   for (depth = 0;
1053        (depth < 10
1054 	&& (insn = next_active_insn (value)) != 0
1055 	&& JUMP_P (insn)
1056 	&& ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1057 	     && onlyjump_p (insn))
1058 	    || GET_CODE (PATTERN (insn)) == RETURN)
1059 	&& (next = NEXT_INSN (insn))
1060 	&& BARRIER_P (next));
1061        depth++)
1062     {
1063       rtx tem;
1064       if (!reload_completed && flag_test_coverage)
1065 	{
1066 	  /* ??? Optional.  Disables some optimizations, but makes
1067 	     gcov output more accurate with -O.  */
1068 	  for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1069 	    if (NOTE_P (tem) && NOTE_LINE_NUMBER (tem) > 0)
1070 	      return value;
1071 	}
1072 
1073       /* If we have found a cycle, make the insn jump to itself.  */
1074       if (JUMP_LABEL (insn) == label)
1075 	return label;
1076 
1077       tem = next_active_insn (JUMP_LABEL (insn));
1078       if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1079 		  || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1080 	break;
1081 
1082       value = JUMP_LABEL (insn);
1083     }
1084   if (depth == 10)
1085     return label;
1086   return value;
1087 }
1088 
1089 
1090 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1091    If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1092    in INSN, then store one of them in JUMP_LABEL (INSN).
1093    If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1094    referenced in INSN, add a REG_LABEL note containing that label to INSN.
1095    Also, when there are consecutive labels, canonicalize on the last of them.
1096 
1097    Note that two labels separated by a loop-beginning note
1098    must be kept distinct if we have not yet done loop-optimization,
1099    because the gap between them is where loop-optimize
1100    will want to move invariant code to.  CROSS_JUMP tells us
1101    that loop-optimization is done with.  */
1102 
1103 void
mark_jump_label(rtx x,rtx insn,int in_mem)1104 mark_jump_label (rtx x, rtx insn, int in_mem)
1105 {
1106   RTX_CODE code = GET_CODE (x);
1107   int i;
1108   const char *fmt;
1109 
1110   switch (code)
1111     {
1112     case PC:
1113     case CC0:
1114     case REG:
1115     case CONST_INT:
1116     case CONST_DOUBLE:
1117     case CLOBBER:
1118     case CALL:
1119       return;
1120 
1121     case MEM:
1122       in_mem = 1;
1123       break;
1124 
1125     case SYMBOL_REF:
1126       if (!in_mem)
1127 	return;
1128 
1129       /* If this is a constant-pool reference, see if it is a label.  */
1130       if (CONSTANT_POOL_ADDRESS_P (x))
1131 	mark_jump_label (get_pool_constant (x), insn, in_mem);
1132       break;
1133 
1134     case LABEL_REF:
1135       {
1136 	rtx label = XEXP (x, 0);
1137 
1138 	/* Ignore remaining references to unreachable labels that
1139 	   have been deleted.  */
1140 	if (NOTE_P (label)
1141 	    && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1142 	  break;
1143 
1144 	gcc_assert (LABEL_P (label));
1145 
1146 	/* Ignore references to labels of containing functions.  */
1147 	if (LABEL_REF_NONLOCAL_P (x))
1148 	  break;
1149 
1150 	XEXP (x, 0) = label;
1151 	if (! insn || ! INSN_DELETED_P (insn))
1152 	  ++LABEL_NUSES (label);
1153 
1154 	if (insn)
1155 	  {
1156 	    if (JUMP_P (insn))
1157 	      JUMP_LABEL (insn) = label;
1158 	    else
1159 	      {
1160 		/* Add a REG_LABEL note for LABEL unless there already
1161 		   is one.  All uses of a label, except for labels
1162 		   that are the targets of jumps, must have a
1163 		   REG_LABEL note.  */
1164 		if (! find_reg_note (insn, REG_LABEL, label))
1165 		  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1166 							REG_NOTES (insn));
1167 	      }
1168 	  }
1169 	return;
1170       }
1171 
1172   /* Do walk the labels in a vector, but not the first operand of an
1173      ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1174     case ADDR_VEC:
1175     case ADDR_DIFF_VEC:
1176       if (! INSN_DELETED_P (insn))
1177 	{
1178 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1179 
1180 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1181 	    mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1182 	}
1183       return;
1184 
1185     default:
1186       break;
1187     }
1188 
1189   fmt = GET_RTX_FORMAT (code);
1190   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1191     {
1192       if (fmt[i] == 'e')
1193 	mark_jump_label (XEXP (x, i), insn, in_mem);
1194       else if (fmt[i] == 'E')
1195 	{
1196 	  int j;
1197 	  for (j = 0; j < XVECLEN (x, i); j++)
1198 	    mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1199 	}
1200     }
1201 }
1202 
1203 /* If all INSN does is set the pc, delete it,
1204    and delete the insn that set the condition codes for it
1205    if that's what the previous thing was.  */
1206 
1207 void
delete_jump(rtx insn)1208 delete_jump (rtx insn)
1209 {
1210   rtx set = single_set (insn);
1211 
1212   if (set && GET_CODE (SET_DEST (set)) == PC)
1213     delete_computation (insn);
1214 }
1215 
1216 /* Recursively delete prior insns that compute the value (used only by INSN
1217    which the caller is deleting) stored in the register mentioned by NOTE
1218    which is a REG_DEAD note associated with INSN.  */
1219 
1220 static void
delete_prior_computation(rtx note,rtx insn)1221 delete_prior_computation (rtx note, rtx insn)
1222 {
1223   rtx our_prev;
1224   rtx reg = XEXP (note, 0);
1225 
1226   for (our_prev = prev_nonnote_insn (insn);
1227        our_prev && (NONJUMP_INSN_P (our_prev)
1228 		    || CALL_P (our_prev));
1229        our_prev = prev_nonnote_insn (our_prev))
1230     {
1231       rtx pat = PATTERN (our_prev);
1232 
1233       /* If we reach a CALL which is not calling a const function
1234 	 or the callee pops the arguments, then give up.  */
1235       if (CALL_P (our_prev)
1236 	  && (! CONST_OR_PURE_CALL_P (our_prev)
1237 	      || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1238 	break;
1239 
1240       /* If we reach a SEQUENCE, it is too complex to try to
1241 	 do anything with it, so give up.  We can be run during
1242 	 and after reorg, so SEQUENCE rtl can legitimately show
1243 	 up here.  */
1244       if (GET_CODE (pat) == SEQUENCE)
1245 	break;
1246 
1247       if (GET_CODE (pat) == USE
1248 	  && NONJUMP_INSN_P (XEXP (pat, 0)))
1249 	/* reorg creates USEs that look like this.  We leave them
1250 	   alone because reorg needs them for its own purposes.  */
1251 	break;
1252 
1253       if (reg_set_p (reg, pat))
1254 	{
1255 	  if (side_effects_p (pat) && !CALL_P (our_prev))
1256 	    break;
1257 
1258 	  if (GET_CODE (pat) == PARALLEL)
1259 	    {
1260 	      /* If we find a SET of something else, we can't
1261 		 delete the insn.  */
1262 
1263 	      int i;
1264 
1265 	      for (i = 0; i < XVECLEN (pat, 0); i++)
1266 		{
1267 		  rtx part = XVECEXP (pat, 0, i);
1268 
1269 		  if (GET_CODE (part) == SET
1270 		      && SET_DEST (part) != reg)
1271 		    break;
1272 		}
1273 
1274 	      if (i == XVECLEN (pat, 0))
1275 		delete_computation (our_prev);
1276 	    }
1277 	  else if (GET_CODE (pat) == SET
1278 		   && REG_P (SET_DEST (pat)))
1279 	    {
1280 	      int dest_regno = REGNO (SET_DEST (pat));
1281 	      int dest_endregno
1282 		= (dest_regno
1283 		   + (dest_regno < FIRST_PSEUDO_REGISTER
1284 		      ? hard_regno_nregs[dest_regno]
1285 					[GET_MODE (SET_DEST (pat))] : 1));
1286 	      int regno = REGNO (reg);
1287 	      int endregno
1288 		= (regno
1289 		   + (regno < FIRST_PSEUDO_REGISTER
1290 		      ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
1291 
1292 	      if (dest_regno >= regno
1293 		  && dest_endregno <= endregno)
1294 		delete_computation (our_prev);
1295 
1296 	      /* We may have a multi-word hard register and some, but not
1297 		 all, of the words of the register are needed in subsequent
1298 		 insns.  Write REG_UNUSED notes for those parts that were not
1299 		 needed.  */
1300 	      else if (dest_regno <= regno
1301 		       && dest_endregno >= endregno)
1302 		{
1303 		  int i;
1304 
1305 		  REG_NOTES (our_prev)
1306 		    = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1307 					 REG_NOTES (our_prev));
1308 
1309 		  for (i = dest_regno; i < dest_endregno; i++)
1310 		    if (! find_regno_note (our_prev, REG_UNUSED, i))
1311 		      break;
1312 
1313 		  if (i == dest_endregno)
1314 		    delete_computation (our_prev);
1315 		}
1316 	    }
1317 
1318 	  break;
1319 	}
1320 
1321       /* If PAT references the register that dies here, it is an
1322 	 additional use.  Hence any prior SET isn't dead.  However, this
1323 	 insn becomes the new place for the REG_DEAD note.  */
1324       if (reg_overlap_mentioned_p (reg, pat))
1325 	{
1326 	  XEXP (note, 1) = REG_NOTES (our_prev);
1327 	  REG_NOTES (our_prev) = note;
1328 	  break;
1329 	}
1330     }
1331 }
1332 
1333 /* Delete INSN and recursively delete insns that compute values used only
1334    by INSN.  This uses the REG_DEAD notes computed during flow analysis.
1335    If we are running before flow.c, we need do nothing since flow.c will
1336    delete dead code.  We also can't know if the registers being used are
1337    dead or not at this point.
1338 
1339    Otherwise, look at all our REG_DEAD notes.  If a previous insn does
1340    nothing other than set a register that dies in this insn, we can delete
1341    that insn as well.
1342 
1343    On machines with CC0, if CC0 is used in this insn, we may be able to
1344    delete the insn that set it.  */
1345 
1346 static void
delete_computation(rtx insn)1347 delete_computation (rtx insn)
1348 {
1349   rtx note, next;
1350 
1351 #ifdef HAVE_cc0
1352   if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1353     {
1354       rtx prev = prev_nonnote_insn (insn);
1355       /* We assume that at this stage
1356 	 CC's are always set explicitly
1357 	 and always immediately before the jump that
1358 	 will use them.  So if the previous insn
1359 	 exists to set the CC's, delete it
1360 	 (unless it performs auto-increments, etc.).  */
1361       if (prev && NONJUMP_INSN_P (prev)
1362 	  && sets_cc0_p (PATTERN (prev)))
1363 	{
1364 	  if (sets_cc0_p (PATTERN (prev)) > 0
1365 	      && ! side_effects_p (PATTERN (prev)))
1366 	    delete_computation (prev);
1367 	  else
1368 	    /* Otherwise, show that cc0 won't be used.  */
1369 	    REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1370 						  cc0_rtx, REG_NOTES (prev));
1371 	}
1372     }
1373 #endif
1374 
1375   for (note = REG_NOTES (insn); note; note = next)
1376     {
1377       next = XEXP (note, 1);
1378 
1379       if (REG_NOTE_KIND (note) != REG_DEAD
1380 	  /* Verify that the REG_NOTE is legitimate.  */
1381 	  || !REG_P (XEXP (note, 0)))
1382 	continue;
1383 
1384       delete_prior_computation (note, insn);
1385     }
1386 
1387   delete_related_insns (insn);
1388 }
1389 
1390 /* Delete insn INSN from the chain of insns and update label ref counts
1391    and delete insns now unreachable.
1392 
1393    Returns the first insn after INSN that was not deleted.
1394 
1395    Usage of this instruction is deprecated.  Use delete_insn instead and
1396    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1397 
1398 rtx
delete_related_insns(rtx insn)1399 delete_related_insns (rtx insn)
1400 {
1401   int was_code_label = (LABEL_P (insn));
1402   rtx note;
1403   rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1404 
1405   while (next && INSN_DELETED_P (next))
1406     next = NEXT_INSN (next);
1407 
1408   /* This insn is already deleted => return first following nondeleted.  */
1409   if (INSN_DELETED_P (insn))
1410     return next;
1411 
1412   delete_insn (insn);
1413 
1414   /* If instruction is followed by a barrier,
1415      delete the barrier too.  */
1416 
1417   if (next != 0 && BARRIER_P (next))
1418     delete_insn (next);
1419 
1420   /* If deleting a jump, decrement the count of the label,
1421      and delete the label if it is now unused.  */
1422 
1423   if (JUMP_P (insn) && JUMP_LABEL (insn))
1424     {
1425       rtx lab = JUMP_LABEL (insn), lab_next;
1426 
1427       if (LABEL_NUSES (lab) == 0)
1428 	{
1429 	  /* This can delete NEXT or PREV,
1430 	     either directly if NEXT is JUMP_LABEL (INSN),
1431 	     or indirectly through more levels of jumps.  */
1432 	  delete_related_insns (lab);
1433 
1434 	  /* I feel a little doubtful about this loop,
1435 	     but I see no clean and sure alternative way
1436 	     to find the first insn after INSN that is not now deleted.
1437 	     I hope this works.  */
1438 	  while (next && INSN_DELETED_P (next))
1439 	    next = NEXT_INSN (next);
1440 	  return next;
1441 	}
1442       else if (tablejump_p (insn, NULL, &lab_next))
1443 	{
1444 	  /* If we're deleting the tablejump, delete the dispatch table.
1445 	     We may not be able to kill the label immediately preceding
1446 	     just yet, as it might be referenced in code leading up to
1447 	     the tablejump.  */
1448 	  delete_related_insns (lab_next);
1449 	}
1450     }
1451 
1452   /* Likewise if we're deleting a dispatch table.  */
1453 
1454   if (JUMP_P (insn)
1455       && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1456 	  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1457     {
1458       rtx pat = PATTERN (insn);
1459       int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1460       int len = XVECLEN (pat, diff_vec_p);
1461 
1462       for (i = 0; i < len; i++)
1463 	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1464 	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1465       while (next && INSN_DELETED_P (next))
1466 	next = NEXT_INSN (next);
1467       return next;
1468     }
1469 
1470   /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note.  */
1471   if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1472     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1473       if (REG_NOTE_KIND (note) == REG_LABEL
1474 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1475 	  && LABEL_P (XEXP (note, 0)))
1476 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1477 	  delete_related_insns (XEXP (note, 0));
1478 
1479   while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1480     prev = PREV_INSN (prev);
1481 
1482   /* If INSN was a label and a dispatch table follows it,
1483      delete the dispatch table.  The tablejump must have gone already.
1484      It isn't useful to fall through into a table.  */
1485 
1486   if (was_code_label
1487       && NEXT_INSN (insn) != 0
1488       && JUMP_P (NEXT_INSN (insn))
1489       && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1490 	  || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1491     next = delete_related_insns (NEXT_INSN (insn));
1492 
1493   /* If INSN was a label, delete insns following it if now unreachable.  */
1494 
1495   if (was_code_label && prev && BARRIER_P (prev))
1496     {
1497       enum rtx_code code;
1498       while (next)
1499 	{
1500 	  code = GET_CODE (next);
1501 	  if (code == NOTE
1502 	      && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1503 	    next = NEXT_INSN (next);
1504 	  /* Keep going past other deleted labels to delete what follows.  */
1505 	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
1506 	    next = NEXT_INSN (next);
1507 	  else if (code == BARRIER || INSN_P (next))
1508 	    /* Note: if this deletes a jump, it can cause more
1509 	       deletion of unreachable code, after a different label.
1510 	       As long as the value from this recursive call is correct,
1511 	       this invocation functions correctly.  */
1512 	    next = delete_related_insns (next);
1513 	  else
1514 	    break;
1515 	}
1516     }
1517 
1518   return next;
1519 }
1520 
1521 /* Delete a range of insns from FROM to TO, inclusive.
1522    This is for the sake of peephole optimization, so assume
1523    that whatever these insns do will still be done by a new
1524    peephole insn that will replace them.  */
1525 
1526 void
delete_for_peephole(rtx from,rtx to)1527 delete_for_peephole (rtx from, rtx to)
1528 {
1529   rtx insn = from;
1530 
1531   while (1)
1532     {
1533       rtx next = NEXT_INSN (insn);
1534       rtx prev = PREV_INSN (insn);
1535 
1536       if (!NOTE_P (insn))
1537 	{
1538 	  INSN_DELETED_P (insn) = 1;
1539 
1540 	  /* Patch this insn out of the chain.  */
1541 	  /* We don't do this all at once, because we
1542 	     must preserve all NOTEs.  */
1543 	  if (prev)
1544 	    NEXT_INSN (prev) = next;
1545 
1546 	  if (next)
1547 	    PREV_INSN (next) = prev;
1548 	}
1549 
1550       if (insn == to)
1551 	break;
1552       insn = next;
1553     }
1554 
1555   /* Note that if TO is an unconditional jump
1556      we *do not* delete the BARRIER that follows,
1557      since the peephole that replaces this sequence
1558      is also an unconditional jump in that case.  */
1559 }
1560 
1561 /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1562    NLABEL as a return.  Accrue modifications into the change group.  */
1563 
1564 static void
redirect_exp_1(rtx * loc,rtx olabel,rtx nlabel,rtx insn)1565 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1566 {
1567   rtx x = *loc;
1568   RTX_CODE code = GET_CODE (x);
1569   int i;
1570   const char *fmt;
1571 
1572   if (code == LABEL_REF)
1573     {
1574       if (XEXP (x, 0) == olabel)
1575 	{
1576 	  rtx n;
1577 	  if (nlabel)
1578 	    n = gen_rtx_LABEL_REF (Pmode, nlabel);
1579 	  else
1580 	    n = gen_rtx_RETURN (VOIDmode);
1581 
1582 	  validate_change (insn, loc, n, 1);
1583 	  return;
1584 	}
1585     }
1586   else if (code == RETURN && olabel == 0)
1587     {
1588       if (nlabel)
1589 	x = gen_rtx_LABEL_REF (Pmode, nlabel);
1590       else
1591 	x = gen_rtx_RETURN (VOIDmode);
1592       if (loc == &PATTERN (insn))
1593 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1594       validate_change (insn, loc, x, 1);
1595       return;
1596     }
1597 
1598   if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1599       && GET_CODE (SET_SRC (x)) == LABEL_REF
1600       && XEXP (SET_SRC (x), 0) == olabel)
1601     {
1602       validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1603       return;
1604     }
1605 
1606   fmt = GET_RTX_FORMAT (code);
1607   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1608     {
1609       if (fmt[i] == 'e')
1610 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1611       else if (fmt[i] == 'E')
1612 	{
1613 	  int j;
1614 	  for (j = 0; j < XVECLEN (x, i); j++)
1615 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1616 	}
1617     }
1618 }
1619 
1620 /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1621    the modifications into the change group.  Return false if we did
1622    not see how to do that.  */
1623 
1624 int
redirect_jump_1(rtx jump,rtx nlabel)1625 redirect_jump_1 (rtx jump, rtx nlabel)
1626 {
1627   int ochanges = num_validated_changes ();
1628   rtx *loc;
1629 
1630   if (GET_CODE (PATTERN (jump)) == PARALLEL)
1631     loc = &XVECEXP (PATTERN (jump), 0, 0);
1632   else
1633     loc = &PATTERN (jump);
1634 
1635   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1636   return num_validated_changes () > ochanges;
1637 }
1638 
1639 /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1640    jump target label is unused as a result, it and the code following
1641    it may be deleted.
1642 
1643    If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1644    RETURN insn.
1645 
1646    The return value will be 1 if the change was made, 0 if it wasn't
1647    (this can only occur for NLABEL == 0).  */
1648 
1649 int
redirect_jump(rtx jump,rtx nlabel,int delete_unused)1650 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1651 {
1652   rtx olabel = JUMP_LABEL (jump);
1653 
1654   if (nlabel == olabel)
1655     return 1;
1656 
1657   if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1658     return 0;
1659 
1660   redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1661   return 1;
1662 }
1663 
1664 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1665    NLABEL in JUMP.  If DELETE_UNUSED is non-negative, copy a
1666    NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1667    If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1668    count has dropped to zero.  */
1669 void
redirect_jump_2(rtx jump,rtx olabel,rtx nlabel,int delete_unused,int invert)1670 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1671 		 int invert)
1672 {
1673   rtx note;
1674 
1675   JUMP_LABEL (jump) = nlabel;
1676   if (nlabel)
1677     ++LABEL_NUSES (nlabel);
1678 
1679   /* Update labels in any REG_EQUAL note.  */
1680   if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1681     {
1682       if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1683 	remove_note (jump, note);
1684       else
1685 	{
1686 	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1687 	  confirm_change_group ();
1688 	}
1689     }
1690 
1691   /* If we're eliding the jump over exception cleanups at the end of a
1692      function, move the function end note so that -Wreturn-type works.  */
1693   if (olabel && nlabel
1694       && NEXT_INSN (olabel)
1695       && NOTE_P (NEXT_INSN (olabel))
1696       && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END
1697       && delete_unused >= 0)
1698     emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
1699 
1700   if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1701       /* Undefined labels will remain outside the insn stream.  */
1702       && INSN_UID (olabel))
1703     delete_related_insns (olabel);
1704   if (invert)
1705     invert_br_probabilities (jump);
1706 }
1707 
1708 /* Invert the jump condition X contained in jump insn INSN.  Accrue the
1709    modifications into the change group.  Return nonzero for success.  */
1710 static int
invert_exp_1(rtx x,rtx insn)1711 invert_exp_1 (rtx x, rtx insn)
1712 {
1713   RTX_CODE code = GET_CODE (x);
1714 
1715   if (code == IF_THEN_ELSE)
1716     {
1717       rtx comp = XEXP (x, 0);
1718       rtx tem;
1719       enum rtx_code reversed_code;
1720 
1721       /* We can do this in two ways:  The preferable way, which can only
1722 	 be done if this is not an integer comparison, is to reverse
1723 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1724 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1725 
1726       reversed_code = reversed_comparison_code (comp, insn);
1727 
1728       if (reversed_code != UNKNOWN)
1729 	{
1730 	  validate_change (insn, &XEXP (x, 0),
1731 			   gen_rtx_fmt_ee (reversed_code,
1732 					   GET_MODE (comp), XEXP (comp, 0),
1733 					   XEXP (comp, 1)),
1734 			   1);
1735 	  return 1;
1736 	}
1737 
1738       tem = XEXP (x, 1);
1739       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1740       validate_change (insn, &XEXP (x, 2), tem, 1);
1741       return 1;
1742     }
1743   else
1744     return 0;
1745 }
1746 
1747 /* Invert the condition of the jump JUMP, and make it jump to label
1748    NLABEL instead of where it jumps now.  Accrue changes into the
1749    change group.  Return false if we didn't see how to perform the
1750    inversion and redirection.  */
1751 
1752 int
invert_jump_1(rtx jump,rtx nlabel)1753 invert_jump_1 (rtx jump, rtx nlabel)
1754 {
1755   rtx x = pc_set (jump);
1756   int ochanges;
1757   int ok;
1758 
1759   ochanges = num_validated_changes ();
1760   gcc_assert (x);
1761   ok = invert_exp_1 (SET_SRC (x), jump);
1762   gcc_assert (ok);
1763 
1764   if (num_validated_changes () == ochanges)
1765     return 0;
1766 
1767   /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1768      in Pmode, so checking this is not merely an optimization.  */
1769   return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1770 }
1771 
1772 /* Invert the condition of the jump JUMP, and make it jump to label
1773    NLABEL instead of where it jumps now.  Return true if successful.  */
1774 
1775 int
invert_jump(rtx jump,rtx nlabel,int delete_unused)1776 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1777 {
1778   rtx olabel = JUMP_LABEL (jump);
1779 
1780   if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1781     {
1782       redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1783       return 1;
1784     }
1785   cancel_changes (0);
1786   return 0;
1787 }
1788 
1789 
1790 /* Like rtx_equal_p except that it considers two REGs as equal
1791    if they renumber to the same value and considers two commutative
1792    operations to be the same if the order of the operands has been
1793    reversed.  */
1794 
1795 int
rtx_renumbered_equal_p(rtx x,rtx y)1796 rtx_renumbered_equal_p (rtx x, rtx y)
1797 {
1798   int i;
1799   enum rtx_code code = GET_CODE (x);
1800   const char *fmt;
1801 
1802   if (x == y)
1803     return 1;
1804 
1805   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1806       && (REG_P (y) || (GET_CODE (y) == SUBREG
1807 				  && REG_P (SUBREG_REG (y)))))
1808     {
1809       int reg_x = -1, reg_y = -1;
1810       int byte_x = 0, byte_y = 0;
1811 
1812       if (GET_MODE (x) != GET_MODE (y))
1813 	return 0;
1814 
1815       /* If we haven't done any renumbering, don't
1816 	 make any assumptions.  */
1817       if (reg_renumber == 0)
1818 	return rtx_equal_p (x, y);
1819 
1820       if (code == SUBREG)
1821 	{
1822 	  reg_x = REGNO (SUBREG_REG (x));
1823 	  byte_x = SUBREG_BYTE (x);
1824 
1825 	  if (reg_renumber[reg_x] >= 0)
1826 	    {
1827 	      reg_x = subreg_regno_offset (reg_renumber[reg_x],
1828 					   GET_MODE (SUBREG_REG (x)),
1829 					   byte_x,
1830 					   GET_MODE (x));
1831 	      byte_x = 0;
1832 	    }
1833 	}
1834       else
1835 	{
1836 	  reg_x = REGNO (x);
1837 	  if (reg_renumber[reg_x] >= 0)
1838 	    reg_x = reg_renumber[reg_x];
1839 	}
1840 
1841       if (GET_CODE (y) == SUBREG)
1842 	{
1843 	  reg_y = REGNO (SUBREG_REG (y));
1844 	  byte_y = SUBREG_BYTE (y);
1845 
1846 	  if (reg_renumber[reg_y] >= 0)
1847 	    {
1848 	      reg_y = subreg_regno_offset (reg_renumber[reg_y],
1849 					   GET_MODE (SUBREG_REG (y)),
1850 					   byte_y,
1851 					   GET_MODE (y));
1852 	      byte_y = 0;
1853 	    }
1854 	}
1855       else
1856 	{
1857 	  reg_y = REGNO (y);
1858 	  if (reg_renumber[reg_y] >= 0)
1859 	    reg_y = reg_renumber[reg_y];
1860 	}
1861 
1862       return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1863     }
1864 
1865   /* Now we have disposed of all the cases
1866      in which different rtx codes can match.  */
1867   if (code != GET_CODE (y))
1868     return 0;
1869 
1870   switch (code)
1871     {
1872     case PC:
1873     case CC0:
1874     case ADDR_VEC:
1875     case ADDR_DIFF_VEC:
1876     case CONST_INT:
1877     case CONST_DOUBLE:
1878       return 0;
1879 
1880     case LABEL_REF:
1881       /* We can't assume nonlocal labels have their following insns yet.  */
1882       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1883 	return XEXP (x, 0) == XEXP (y, 0);
1884 
1885       /* Two label-refs are equivalent if they point at labels
1886 	 in the same position in the instruction stream.  */
1887       return (next_real_insn (XEXP (x, 0))
1888 	      == next_real_insn (XEXP (y, 0)));
1889 
1890     case SYMBOL_REF:
1891       return XSTR (x, 0) == XSTR (y, 0);
1892 
1893     case CODE_LABEL:
1894       /* If we didn't match EQ equality above, they aren't the same.  */
1895       return 0;
1896 
1897     default:
1898       break;
1899     }
1900 
1901   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1902 
1903   if (GET_MODE (x) != GET_MODE (y))
1904     return 0;
1905 
1906   /* For commutative operations, the RTX match if the operand match in any
1907      order.  Also handle the simple binary and unary cases without a loop.  */
1908   if (targetm.commutative_p (x, UNKNOWN))
1909     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1910 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1911 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1912 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1913   else if (NON_COMMUTATIVE_P (x))
1914     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1915 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1916   else if (UNARY_P (x))
1917     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1918 
1919   /* Compare the elements.  If any pair of corresponding elements
1920      fail to match, return 0 for the whole things.  */
1921 
1922   fmt = GET_RTX_FORMAT (code);
1923   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1924     {
1925       int j;
1926       switch (fmt[i])
1927 	{
1928 	case 'w':
1929 	  if (XWINT (x, i) != XWINT (y, i))
1930 	    return 0;
1931 	  break;
1932 
1933 	case 'i':
1934 	  if (XINT (x, i) != XINT (y, i))
1935 	    return 0;
1936 	  break;
1937 
1938 	case 't':
1939 	  if (XTREE (x, i) != XTREE (y, i))
1940 	    return 0;
1941 	  break;
1942 
1943 	case 's':
1944 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1945 	    return 0;
1946 	  break;
1947 
1948 	case 'e':
1949 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1950 	    return 0;
1951 	  break;
1952 
1953 	case 'u':
1954 	  if (XEXP (x, i) != XEXP (y, i))
1955 	    return 0;
1956 	  /* Fall through.  */
1957 	case '0':
1958 	  break;
1959 
1960 	case 'E':
1961 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1962 	    return 0;
1963 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1964 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1965 	      return 0;
1966 	  break;
1967 
1968 	default:
1969 	  gcc_unreachable ();
1970 	}
1971     }
1972   return 1;
1973 }
1974 
1975 /* If X is a hard register or equivalent to one or a subregister of one,
1976    return the hard register number.  If X is a pseudo register that was not
1977    assigned a hard register, return the pseudo register number.  Otherwise,
1978    return -1.  Any rtx is valid for X.  */
1979 
1980 int
true_regnum(rtx x)1981 true_regnum (rtx x)
1982 {
1983   if (REG_P (x))
1984     {
1985       if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1986 	return reg_renumber[REGNO (x)];
1987       return REGNO (x);
1988     }
1989   if (GET_CODE (x) == SUBREG)
1990     {
1991       int base = true_regnum (SUBREG_REG (x));
1992       if (base >= 0
1993 	  && base < FIRST_PSEUDO_REGISTER
1994 	  && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1995 					    GET_MODE (SUBREG_REG (x)),
1996 					    SUBREG_BYTE (x), GET_MODE (x)))
1997 	return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1998 					   GET_MODE (SUBREG_REG (x)),
1999 					   SUBREG_BYTE (x), GET_MODE (x));
2000     }
2001   return -1;
2002 }
2003 
2004 /* Return regno of the register REG and handle subregs too.  */
2005 unsigned int
reg_or_subregno(rtx reg)2006 reg_or_subregno (rtx reg)
2007 {
2008   if (GET_CODE (reg) == SUBREG)
2009     reg = SUBREG_REG (reg);
2010   gcc_assert (REG_P (reg));
2011   return REGNO (reg);
2012 }
2013