xref: /openbsd-src/gnu/usr.bin/gcc/gcc/jump.c (revision c87b03e512fc05ed6e0222f6fb0ae86264b1d05b)
1 /* Optimize jump instructions, for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3    1998, 1999, 2000, 2001, 2002, 2003  Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23    of the compiler.  Now it contains basically set of utility function to
24    operate with jumps.
25 
26    Each CODE_LABEL has a count of the times it is used
27    stored in the LABEL_NUSES internal field, and each JUMP_INSN
28    has one label that it refers to stored in the
29    JUMP_LABEL internal field.  With this we can detect labels that
30    become unused because of the deletion of all the jumps that
31    formerly used them.  The JUMP_LABEL info is sometimes looked
32    at by later passes.
33 
34    The subroutines delete_insn, redirect_jump, and invert_jump are used
35    from other passes as well.  */
36 
37 #include "config.h"
38 #include "system.h"
39 #include "rtl.h"
40 #include "tm_p.h"
41 #include "flags.h"
42 #include "hard-reg-set.h"
43 #include "regs.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
46 #include "recog.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "real.h"
50 #include "except.h"
51 #include "toplev.h"
52 #include "reload.h"
53 #include "predict.h"
54 
55 /* Optimize jump y; x: ... y: jumpif... x?
56    Don't know if it is worth bothering with.  */
57 /* Optimize two cases of conditional jump to conditional jump?
58    This can never delete any instruction or make anything dead,
59    or even change what is live at any point.
60    So perhaps let combiner do it.  */
61 
62 static rtx next_nonnote_insn_in_loop	PARAMS ((rtx));
63 static int init_label_info		PARAMS ((rtx));
64 static void mark_all_labels		PARAMS ((rtx));
65 static int duplicate_loop_exit_test	PARAMS ((rtx));
66 static void delete_computation		PARAMS ((rtx));
67 static void redirect_exp_1		PARAMS ((rtx *, rtx, rtx, rtx));
68 static int redirect_exp			PARAMS ((rtx, rtx, rtx));
69 static void invert_exp_1		PARAMS ((rtx));
70 static int invert_exp			PARAMS ((rtx));
71 static int returnjump_p_1	        PARAMS ((rtx *, void *));
72 static void delete_prior_computation    PARAMS ((rtx, rtx));
73 
74 /* Alternate entry into the jump optimizer.  This entry point only rebuilds
75    the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76    instructions.  */
77 void
rebuild_jump_labels(f)78 rebuild_jump_labels (f)
79      rtx f;
80 {
81   rtx insn;
82   int max_uid = 0;
83 
84   max_uid = init_label_info (f) + 1;
85 
86   mark_all_labels (f);
87 
88   /* Keep track of labels used from static data; we don't track them
89      closely enough to delete them here, so make sure their reference
90      count doesn't drop to zero.  */
91 
92   for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93     if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94       LABEL_NUSES (XEXP (insn, 0))++;
95 }
96 
97 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
98    non-fallthru insn.  This is not generally true, as multiple barriers
99    may have crept in, or the BARRIER may be separated from the last
100    real insn by one or more NOTEs.
101 
102    This simple pass moves barriers and removes duplicates so that the
103    old code is happy.
104  */
105 void
cleanup_barriers()106 cleanup_barriers ()
107 {
108   rtx insn, next, prev;
109   for (insn = get_insns (); insn; insn = next)
110     {
111       next = NEXT_INSN (insn);
112       if (GET_CODE (insn) == BARRIER)
113 	{
114 	  prev = prev_nonnote_insn (insn);
115 	  if (GET_CODE (prev) == BARRIER)
116 	    delete_barrier (insn);
117 	  else if (prev != PREV_INSN (insn))
118 	    reorder_insns (insn, insn, prev);
119 	}
120     }
121 }
122 
123 /* Return the next insn after INSN that is not a NOTE and is in the loop,
124    i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
125    This routine does not look inside SEQUENCEs.  */
126 
127 static rtx
next_nonnote_insn_in_loop(insn)128 next_nonnote_insn_in_loop (insn)
129      rtx insn;
130 {
131   while (insn)
132     {
133       insn = NEXT_INSN (insn);
134       if (insn == 0 || GET_CODE (insn) != NOTE)
135 	break;
136       if (GET_CODE (insn) == NOTE
137 	  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
138 	return NULL_RTX;
139     }
140 
141   return insn;
142 }
143 
144 void
copy_loop_headers(f)145 copy_loop_headers (f)
146      rtx f;
147 {
148   rtx insn, next;
149   /* Now iterate optimizing jumps until nothing changes over one pass.  */
150   for (insn = f; insn; insn = next)
151     {
152       rtx temp, temp1;
153 
154       next = NEXT_INSN (insn);
155 
156       /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
157 	 jump.  Try to optimize by duplicating the loop exit test if so.
158 	 This is only safe immediately after regscan, because it uses
159 	 the values of regno_first_uid and regno_last_uid.  */
160       if (GET_CODE (insn) == NOTE
161 	  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
162 	  && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
163 	  && any_uncondjump_p (temp1) && onlyjump_p (temp1))
164 	{
165 	  temp = PREV_INSN (insn);
166 	  if (duplicate_loop_exit_test (insn))
167 	    {
168 	      next = NEXT_INSN (temp);
169 	    }
170 	}
171     }
172 }
173 
174 void
purge_line_number_notes(f)175 purge_line_number_notes (f)
176      rtx f;
177 {
178   rtx last_note = 0;
179   rtx insn;
180   /* Delete extraneous line number notes.
181      Note that two consecutive notes for different lines are not really
182      extraneous.  There should be some indication where that line belonged,
183      even if it became empty.  */
184 
185   for (insn = f; insn; insn = NEXT_INSN (insn))
186     if (GET_CODE (insn) == NOTE)
187       {
188 	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
189 	  /* Any previous line note was for the prologue; gdb wants a new
190 	     note after the prologue even if it is for the same line.  */
191 	  last_note = NULL_RTX;
192 	else if (NOTE_LINE_NUMBER (insn) >= 0)
193 	  {
194 	    /* Delete this note if it is identical to previous note.  */
195 	    if (last_note
196 		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
197 		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
198 	      {
199 		delete_related_insns (insn);
200 		continue;
201 	      }
202 
203 	    last_note = insn;
204 	  }
205       }
206 }
207 
208 /* Initialize LABEL_NUSES and JUMP_LABEL fields.  Delete any REG_LABEL
209    notes whose labels don't occur in the insn any more.  Returns the
210    largest INSN_UID found.  */
211 static int
init_label_info(f)212 init_label_info (f)
213      rtx f;
214 {
215   int largest_uid = 0;
216   rtx insn;
217 
218   for (insn = f; insn; insn = NEXT_INSN (insn))
219     {
220       if (GET_CODE (insn) == CODE_LABEL)
221 	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
222       else if (GET_CODE (insn) == JUMP_INSN)
223 	JUMP_LABEL (insn) = 0;
224       else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
225 	{
226 	  rtx note, next;
227 
228 	  for (note = REG_NOTES (insn); note; note = next)
229 	    {
230 	      next = XEXP (note, 1);
231 	      if (REG_NOTE_KIND (note) == REG_LABEL
232 		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
233 		remove_note (insn, note);
234 	    }
235 	}
236       if (INSN_UID (insn) > largest_uid)
237 	largest_uid = INSN_UID (insn);
238     }
239 
240   return largest_uid;
241 }
242 
243 /* Mark the label each jump jumps to.
244    Combine consecutive labels, and count uses of labels.  */
245 
246 static void
mark_all_labels(f)247 mark_all_labels (f)
248      rtx f;
249 {
250   rtx insn;
251 
252   for (insn = f; insn; insn = NEXT_INSN (insn))
253     if (INSN_P (insn))
254       {
255 	if (GET_CODE (insn) == CALL_INSN
256 	    && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
257 	  {
258 	    mark_all_labels (XEXP (PATTERN (insn), 0));
259 	    mark_all_labels (XEXP (PATTERN (insn), 1));
260 	    mark_all_labels (XEXP (PATTERN (insn), 2));
261 
262 	    /* Canonicalize the tail recursion label attached to the
263 	       CALL_PLACEHOLDER insn.  */
264 	    if (XEXP (PATTERN (insn), 3))
265 	      {
266 		rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
267 						   XEXP (PATTERN (insn), 3));
268 		mark_jump_label (label_ref, insn, 0);
269 		XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
270 	      }
271 
272 	    continue;
273 	  }
274 
275 	mark_jump_label (PATTERN (insn), insn, 0);
276 	if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
277 	  {
278 	    /* When we know the LABEL_REF contained in a REG used in
279 	       an indirect jump, we'll have a REG_LABEL note so that
280 	       flow can tell where it's going.  */
281 	    if (JUMP_LABEL (insn) == 0)
282 	      {
283 		rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
284 		if (label_note)
285 		  {
286 		    /* But a LABEL_REF around the REG_LABEL note, so
287 		       that we can canonicalize it.  */
288 		    rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
289 						       XEXP (label_note, 0));
290 
291 		    mark_jump_label (label_ref, insn, 0);
292 		    XEXP (label_note, 0) = XEXP (label_ref, 0);
293 		    JUMP_LABEL (insn) = XEXP (label_note, 0);
294 		  }
295 	      }
296 	  }
297       }
298 }
299 
300 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
301    jump.  Assume that this unconditional jump is to the exit test code.  If
302    the code is sufficiently simple, make a copy of it before INSN,
303    followed by a jump to the exit of the loop.  Then delete the unconditional
304    jump after INSN.
305 
306    Return 1 if we made the change, else 0.
307 
308    This is only safe immediately after a regscan pass because it uses the
309    values of regno_first_uid and regno_last_uid.  */
310 
311 static int
duplicate_loop_exit_test(loop_start)312 duplicate_loop_exit_test (loop_start)
313      rtx loop_start;
314 {
315   rtx insn, set, reg, p, link;
316   rtx copy = 0, first_copy = 0;
317   int num_insns = 0;
318   rtx exitcode
319     = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
320   rtx lastexit;
321   int max_reg = max_reg_num ();
322   rtx *reg_map = 0;
323   rtx loop_pre_header_label;
324 
325   /* Scan the exit code.  We do not perform this optimization if any insn:
326 
327          is a CALL_INSN
328 	 is a CODE_LABEL
329 	 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
330 	 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
331 
332      We also do not do this if we find an insn with ASM_OPERANDS.  While
333      this restriction should not be necessary, copying an insn with
334      ASM_OPERANDS can confuse asm_noperands in some cases.
335 
336      Also, don't do this if the exit code is more than 20 insns.  */
337 
338   for (insn = exitcode;
339        insn
340        && ! (GET_CODE (insn) == NOTE
341 	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
342        insn = NEXT_INSN (insn))
343     {
344       switch (GET_CODE (insn))
345 	{
346 	case CODE_LABEL:
347 	case CALL_INSN:
348 	  return 0;
349 	case NOTE:
350 
351 	  if (optimize < 2
352 	      && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
353 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
354 	    /* If we were to duplicate this code, we would not move
355 	       the BLOCK notes, and so debugging the moved code would
356 	       be difficult.  Thus, we only move the code with -O2 or
357 	       higher.  */
358 	    return 0;
359 
360 	  break;
361 	case JUMP_INSN:
362 	case INSN:
363 	  /* The code below would grossly mishandle REG_WAS_0 notes,
364 	     so get rid of them here.  */
365 	  while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
366 	    remove_note (insn, p);
367 	  if (++num_insns > 20
368 	      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
369 	      || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
370 	    return 0;
371 	  break;
372 	default:
373 	  break;
374 	}
375     }
376 
377   /* Unless INSN is zero, we can do the optimization.  */
378   if (insn == 0)
379     return 0;
380 
381   lastexit = insn;
382 
383   /* See if any insn sets a register only used in the loop exit code and
384      not a user variable.  If so, replace it with a new register.  */
385   for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
386     if (GET_CODE (insn) == INSN
387 	&& (set = single_set (insn)) != 0
388 	&& ((reg = SET_DEST (set), GET_CODE (reg) == REG)
389 	    || (GET_CODE (reg) == SUBREG
390 		&& (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
391 	&& REGNO (reg) >= FIRST_PSEUDO_REGISTER
392 	&& REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
393       {
394 	for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
395 	  if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
396 	    break;
397 
398 	if (p != lastexit)
399 	  {
400 	    /* We can do the replacement.  Allocate reg_map if this is the
401 	       first replacement we found.  */
402 	    if (reg_map == 0)
403 	      reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
404 
405 	    REG_LOOP_TEST_P (reg) = 1;
406 
407 	    reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
408 	  }
409       }
410   loop_pre_header_label = gen_label_rtx ();
411 
412   /* Now copy each insn.  */
413   for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
414     {
415       switch (GET_CODE (insn))
416 	{
417 	case BARRIER:
418 	  copy = emit_barrier_before (loop_start);
419 	  break;
420 	case NOTE:
421 	  /* Only copy line-number notes.  */
422 	  if (NOTE_LINE_NUMBER (insn) >= 0)
423 	    {
424 	      copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
425 	      NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
426 	    }
427 	  break;
428 
429 	case INSN:
430 	  copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
431 	  if (reg_map)
432 	    replace_regs (PATTERN (copy), reg_map, max_reg, 1);
433 
434 	  mark_jump_label (PATTERN (copy), copy, 0);
435 	  INSN_SCOPE (copy) = INSN_SCOPE (insn);
436 
437 	  /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
438 	     make them.  */
439 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
440 	    if (REG_NOTE_KIND (link) != REG_LABEL)
441 	      {
442 		if (GET_CODE (link) == EXPR_LIST)
443 		  REG_NOTES (copy)
444 		    = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
445 						      XEXP (link, 0),
446 						      REG_NOTES (copy)));
447 		else
448 		  REG_NOTES (copy)
449 		    = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
450 						      XEXP (link, 0),
451 						      REG_NOTES (copy)));
452 	      }
453 
454 	  if (reg_map && REG_NOTES (copy))
455 	    replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
456 	  break;
457 
458 	case JUMP_INSN:
459 	  copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
460 					loop_start);
461 	  INSN_SCOPE (copy) = INSN_SCOPE (insn);
462 	  if (reg_map)
463 	    replace_regs (PATTERN (copy), reg_map, max_reg, 1);
464 	  mark_jump_label (PATTERN (copy), copy, 0);
465 	  if (REG_NOTES (insn))
466 	    {
467 	      REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
468 	      if (reg_map)
469 		replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
470 	    }
471 
472 	  /* Predict conditional jump that do make loop looping as taken.
473 	     Other jumps are probably exit conditions, so predict
474 	     them as untaken.  */
475 	  if (any_condjump_p (copy))
476 	    {
477 	      rtx label = JUMP_LABEL (copy);
478 	      if (label)
479 		{
480 		  /* The jump_insn after loop_start should be followed
481 		     by barrier and loopback label.  */
482 		  if (prev_nonnote_insn (label)
483 		      && (prev_nonnote_insn (prev_nonnote_insn (label))
484 			  == next_nonnote_insn (loop_start)))
485 		    {
486 		      predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
487 		      /* To keep pre-header, we need to redirect all loop
488 		         entrances before the LOOP_BEG note.  */
489 		      redirect_jump (copy, loop_pre_header_label, 0);
490 		    }
491 		  else
492 		    predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
493 		}
494 	    }
495 	  break;
496 
497 	default:
498 	  abort ();
499 	}
500 
501       /* Record the first insn we copied.  We need it so that we can
502 	 scan the copied insns for new pseudo registers.  */
503       if (! first_copy)
504 	first_copy = copy;
505     }
506 
507   /* Now clean up by emitting a jump to the end label and deleting the jump
508      at the start of the loop.  */
509   if (! copy || GET_CODE (copy) != BARRIER)
510     {
511       copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
512 				    loop_start);
513 
514       /* Record the first insn we copied.  We need it so that we can
515 	 scan the copied insns for new pseudo registers.   This may not
516 	 be strictly necessary since we should have copied at least one
517 	 insn above.  But I am going to be safe.  */
518       if (! first_copy)
519 	first_copy = copy;
520 
521       mark_jump_label (PATTERN (copy), copy, 0);
522       emit_barrier_before (loop_start);
523     }
524 
525   emit_label_before (loop_pre_header_label, loop_start);
526 
527   /* Now scan from the first insn we copied to the last insn we copied
528      (copy) for new pseudo registers.  Do this after the code to jump to
529      the end label since that might create a new pseudo too.  */
530   reg_scan_update (first_copy, copy, max_reg);
531 
532   /* Mark the exit code as the virtual top of the converted loop.  */
533   emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
534 
535   delete_related_insns (next_nonnote_insn (loop_start));
536 
537   /* Clean up.  */
538   if (reg_map)
539     free (reg_map);
540 
541   return 1;
542 }
543 
544 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
545    notes between START and END out before START.  START and END may be such
546    notes.  Returns the values of the new starting and ending insns, which
547    may be different if the original ones were such notes.
548    Return true if there were only such notes and no real instructions.  */
549 
550 bool
squeeze_notes(startp,endp)551 squeeze_notes (startp, endp)
552      rtx* startp;
553      rtx* endp;
554 {
555   rtx start = *startp;
556   rtx end = *endp;
557 
558   rtx insn;
559   rtx next;
560   rtx last = NULL;
561   rtx past_end = NEXT_INSN (end);
562 
563   for (insn = start; insn != past_end; insn = next)
564     {
565       next = NEXT_INSN (insn);
566       if (GET_CODE (insn) == NOTE
567 	  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
568 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
569 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
570 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
571 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
572 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
573 	{
574 	  if (insn == start)
575 	    start = next;
576 	  else
577 	    {
578 	      rtx prev = PREV_INSN (insn);
579 	      PREV_INSN (insn) = PREV_INSN (start);
580 	      NEXT_INSN (insn) = start;
581 	      NEXT_INSN (PREV_INSN (insn)) = insn;
582 	      PREV_INSN (NEXT_INSN (insn)) = insn;
583 	      NEXT_INSN (prev) = next;
584 	      PREV_INSN (next) = prev;
585 	    }
586 	}
587       else
588 	last = insn;
589     }
590 
591   /* There were no real instructions.  */
592   if (start == past_end)
593     return true;
594 
595   end = last;
596 
597   *startp = start;
598   *endp = end;
599   return false;
600 }
601 
602 /* Return the label before INSN, or put a new label there.  */
603 
604 rtx
get_label_before(insn)605 get_label_before (insn)
606      rtx insn;
607 {
608   rtx label;
609 
610   /* Find an existing label at this point
611      or make a new one if there is none.  */
612   label = prev_nonnote_insn (insn);
613 
614   if (label == 0 || GET_CODE (label) != CODE_LABEL)
615     {
616       rtx prev = PREV_INSN (insn);
617 
618       label = gen_label_rtx ();
619       emit_label_after (label, prev);
620       LABEL_NUSES (label) = 0;
621     }
622   return label;
623 }
624 
625 /* Return the label after INSN, or put a new label there.  */
626 
627 rtx
get_label_after(insn)628 get_label_after (insn)
629      rtx insn;
630 {
631   rtx label;
632 
633   /* Find an existing label at this point
634      or make a new one if there is none.  */
635   label = next_nonnote_insn (insn);
636 
637   if (label == 0 || GET_CODE (label) != CODE_LABEL)
638     {
639       label = gen_label_rtx ();
640       emit_label_after (label, insn);
641       LABEL_NUSES (label) = 0;
642     }
643   return label;
644 }
645 
646 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
647    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
648    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
649    know whether it's source is floating point or integer comparison.  Machine
650    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
651    to help this function avoid overhead in these cases.  */
652 enum rtx_code
reversed_comparison_code_parts(code,arg0,arg1,insn)653 reversed_comparison_code_parts (code, arg0, arg1, insn)
654      rtx insn, arg0, arg1;
655      enum rtx_code code;
656 {
657   enum machine_mode mode;
658 
659   /* If this is not actually a comparison, we can't reverse it.  */
660   if (GET_RTX_CLASS (code) != '<')
661     return UNKNOWN;
662 
663   mode = GET_MODE (arg0);
664   if (mode == VOIDmode)
665     mode = GET_MODE (arg1);
666 
667   /* First see if machine description supply us way to reverse the comparison.
668      Give it priority over everything else to allow machine description to do
669      tricks.  */
670 #ifdef REVERSIBLE_CC_MODE
671   if (GET_MODE_CLASS (mode) == MODE_CC
672       && REVERSIBLE_CC_MODE (mode))
673     {
674 #ifdef REVERSE_CONDITION
675       return REVERSE_CONDITION (code, mode);
676 #endif
677       return reverse_condition (code);
678     }
679 #endif
680 
681   /* Try a few special cases based on the comparison code.  */
682   switch (code)
683     {
684     case GEU:
685     case GTU:
686     case LEU:
687     case LTU:
688     case NE:
689     case EQ:
690       /* It is always safe to reverse EQ and NE, even for the floating
691 	 point.  Similary the unsigned comparisons are never used for
692 	 floating point so we can reverse them in the default way.  */
693       return reverse_condition (code);
694     case ORDERED:
695     case UNORDERED:
696     case LTGT:
697     case UNEQ:
698       /* In case we already see unordered comparison, we can be sure to
699 	 be dealing with floating point so we don't need any more tests.  */
700       return reverse_condition_maybe_unordered (code);
701     case UNLT:
702     case UNLE:
703     case UNGT:
704     case UNGE:
705       /* We don't have safe way to reverse these yet.  */
706       return UNKNOWN;
707     default:
708       break;
709     }
710 
711   if (GET_MODE_CLASS (mode) == MODE_CC
712 #ifdef HAVE_cc0
713       || arg0 == cc0_rtx
714 #endif
715       )
716     {
717       rtx prev;
718       /* Try to search for the comparison to determine the real mode.
719          This code is expensive, but with sane machine description it
720          will be never used, since REVERSIBLE_CC_MODE will return true
721          in all cases.  */
722       if (! insn)
723 	return UNKNOWN;
724 
725       for (prev = prev_nonnote_insn (insn);
726 	   prev != 0 && GET_CODE (prev) != CODE_LABEL;
727 	   prev = prev_nonnote_insn (prev))
728 	{
729 	  rtx set = set_of (arg0, prev);
730 	  if (set && GET_CODE (set) == SET
731 	      && rtx_equal_p (SET_DEST (set), arg0))
732 	    {
733 	      rtx src = SET_SRC (set);
734 
735 	      if (GET_CODE (src) == COMPARE)
736 		{
737 		  rtx comparison = src;
738 		  arg0 = XEXP (src, 0);
739 		  mode = GET_MODE (arg0);
740 		  if (mode == VOIDmode)
741 		    mode = GET_MODE (XEXP (comparison, 1));
742 		  break;
743 		}
744 	      /* We can get past reg-reg moves.  This may be useful for model
745 	         of i387 comparisons that first move flag registers around.  */
746 	      if (REG_P (src))
747 		{
748 		  arg0 = src;
749 		  continue;
750 		}
751 	    }
752 	  /* If register is clobbered in some ununderstandable way,
753 	     give up.  */
754 	  if (set)
755 	    return UNKNOWN;
756 	}
757     }
758 
759   /* Test for an integer condition, or a floating-point comparison
760      in which NaNs can be ignored.  */
761   if (GET_CODE (arg0) == CONST_INT
762       || (GET_MODE (arg0) != VOIDmode
763 	  && GET_MODE_CLASS (mode) != MODE_CC
764 	  && !HONOR_NANS (mode)))
765     return reverse_condition (code);
766 
767   return UNKNOWN;
768 }
769 
770 /* An wrapper around the previous function to take COMPARISON as rtx
771    expression.  This simplifies many callers.  */
772 enum rtx_code
reversed_comparison_code(comparison,insn)773 reversed_comparison_code (comparison, insn)
774      rtx comparison, insn;
775 {
776   if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
777     return UNKNOWN;
778   return reversed_comparison_code_parts (GET_CODE (comparison),
779 					 XEXP (comparison, 0),
780 					 XEXP (comparison, 1), insn);
781 }
782 
783 /* Given an rtx-code for a comparison, return the code for the negated
784    comparison.  If no such code exists, return UNKNOWN.
785 
786    WATCH OUT!  reverse_condition is not safe to use on a jump that might
787    be acting on the results of an IEEE floating point comparison, because
788    of the special treatment of non-signaling nans in comparisons.
789    Use reversed_comparison_code instead.  */
790 
791 enum rtx_code
reverse_condition(code)792 reverse_condition (code)
793      enum rtx_code code;
794 {
795   switch (code)
796     {
797     case EQ:
798       return NE;
799     case NE:
800       return EQ;
801     case GT:
802       return LE;
803     case GE:
804       return LT;
805     case LT:
806       return GE;
807     case LE:
808       return GT;
809     case GTU:
810       return LEU;
811     case GEU:
812       return LTU;
813     case LTU:
814       return GEU;
815     case LEU:
816       return GTU;
817     case UNORDERED:
818       return ORDERED;
819     case ORDERED:
820       return UNORDERED;
821 
822     case UNLT:
823     case UNLE:
824     case UNGT:
825     case UNGE:
826     case UNEQ:
827     case LTGT:
828       return UNKNOWN;
829 
830     default:
831       abort ();
832     }
833 }
834 
835 /* Similar, but we're allowed to generate unordered comparisons, which
836    makes it safe for IEEE floating-point.  Of course, we have to recognize
837    that the target will support them too...  */
838 
839 enum rtx_code
reverse_condition_maybe_unordered(code)840 reverse_condition_maybe_unordered (code)
841      enum rtx_code code;
842 {
843   switch (code)
844     {
845     case EQ:
846       return NE;
847     case NE:
848       return EQ;
849     case GT:
850       return UNLE;
851     case GE:
852       return UNLT;
853     case LT:
854       return UNGE;
855     case LE:
856       return UNGT;
857     case LTGT:
858       return UNEQ;
859     case UNORDERED:
860       return ORDERED;
861     case ORDERED:
862       return UNORDERED;
863     case UNLT:
864       return GE;
865     case UNLE:
866       return GT;
867     case UNGT:
868       return LE;
869     case UNGE:
870       return LT;
871     case UNEQ:
872       return LTGT;
873 
874     default:
875       abort ();
876     }
877 }
878 
879 /* Similar, but return the code when two operands of a comparison are swapped.
880    This IS safe for IEEE floating-point.  */
881 
882 enum rtx_code
swap_condition(code)883 swap_condition (code)
884      enum rtx_code code;
885 {
886   switch (code)
887     {
888     case EQ:
889     case NE:
890     case UNORDERED:
891     case ORDERED:
892     case UNEQ:
893     case LTGT:
894       return code;
895 
896     case GT:
897       return LT;
898     case GE:
899       return LE;
900     case LT:
901       return GT;
902     case LE:
903       return GE;
904     case GTU:
905       return LTU;
906     case GEU:
907       return LEU;
908     case LTU:
909       return GTU;
910     case LEU:
911       return GEU;
912     case UNLT:
913       return UNGT;
914     case UNLE:
915       return UNGE;
916     case UNGT:
917       return UNLT;
918     case UNGE:
919       return UNLE;
920 
921     default:
922       abort ();
923     }
924 }
925 
926 /* Given a comparison CODE, return the corresponding unsigned comparison.
927    If CODE is an equality comparison or already an unsigned comparison,
928    CODE is returned.  */
929 
930 enum rtx_code
unsigned_condition(code)931 unsigned_condition (code)
932      enum rtx_code code;
933 {
934   switch (code)
935     {
936     case EQ:
937     case NE:
938     case GTU:
939     case GEU:
940     case LTU:
941     case LEU:
942       return code;
943 
944     case GT:
945       return GTU;
946     case GE:
947       return GEU;
948     case LT:
949       return LTU;
950     case LE:
951       return LEU;
952 
953     default:
954       abort ();
955     }
956 }
957 
958 /* Similarly, return the signed version of a comparison.  */
959 
960 enum rtx_code
signed_condition(code)961 signed_condition (code)
962      enum rtx_code code;
963 {
964   switch (code)
965     {
966     case EQ:
967     case NE:
968     case GT:
969     case GE:
970     case LT:
971     case LE:
972       return code;
973 
974     case GTU:
975       return GT;
976     case GEU:
977       return GE;
978     case LTU:
979       return LT;
980     case LEU:
981       return LE;
982 
983     default:
984       abort ();
985     }
986 }
987 
988 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
989    truth of CODE1 implies the truth of CODE2.  */
990 
991 int
comparison_dominates_p(code1,code2)992 comparison_dominates_p (code1, code2)
993      enum rtx_code code1, code2;
994 {
995   /* UNKNOWN comparison codes can happen as a result of trying to revert
996      comparison codes.
997      They can't match anything, so we have to reject them here.  */
998   if (code1 == UNKNOWN || code2 == UNKNOWN)
999     return 0;
1000 
1001   if (code1 == code2)
1002     return 1;
1003 
1004   switch (code1)
1005     {
1006     case UNEQ:
1007       if (code2 == UNLE || code2 == UNGE)
1008 	return 1;
1009       break;
1010 
1011     case EQ:
1012       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1013 	  || code2 == ORDERED)
1014 	return 1;
1015       break;
1016 
1017     case UNLT:
1018       if (code2 == UNLE || code2 == NE)
1019 	return 1;
1020       break;
1021 
1022     case LT:
1023       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1024 	return 1;
1025       break;
1026 
1027     case UNGT:
1028       if (code2 == UNGE || code2 == NE)
1029 	return 1;
1030       break;
1031 
1032     case GT:
1033       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1034 	return 1;
1035       break;
1036 
1037     case GE:
1038     case LE:
1039       if (code2 == ORDERED)
1040 	return 1;
1041       break;
1042 
1043     case LTGT:
1044       if (code2 == NE || code2 == ORDERED)
1045 	return 1;
1046       break;
1047 
1048     case LTU:
1049       if (code2 == LEU || code2 == NE)
1050 	return 1;
1051       break;
1052 
1053     case GTU:
1054       if (code2 == GEU || code2 == NE)
1055 	return 1;
1056       break;
1057 
1058     case UNORDERED:
1059       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1060 	  || code2 == UNGE || code2 == UNGT)
1061 	return 1;
1062       break;
1063 
1064     default:
1065       break;
1066     }
1067 
1068   return 0;
1069 }
1070 
1071 /* Return 1 if INSN is an unconditional jump and nothing else.  */
1072 
1073 int
simplejump_p(insn)1074 simplejump_p (insn)
1075      rtx insn;
1076 {
1077   return (GET_CODE (insn) == JUMP_INSN
1078 	  && GET_CODE (PATTERN (insn)) == SET
1079 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1080 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1081 }
1082 
1083 /* Return 1 if INSN is an tablejump.  */
1084 
1085 int
tablejump_p(insn)1086 tablejump_p (insn)
1087      rtx insn;
1088 {
1089   rtx table;
1090   return (GET_CODE (insn) == JUMP_INSN
1091           && JUMP_LABEL (insn)
1092           && NEXT_INSN (JUMP_LABEL (insn))
1093           && (table = next_active_insn (JUMP_LABEL (insn)))
1094           && GET_CODE (table) == JUMP_INSN
1095           && (GET_CODE (PATTERN (table)) == ADDR_VEC
1096               || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC));
1097 }
1098 
1099 /* Return nonzero if INSN is a (possibly) conditional jump
1100    and nothing more.
1101 
1102    Use this function is deprecated, since we need to support combined
1103    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
1104 
1105 int
condjump_p(insn)1106 condjump_p (insn)
1107      rtx insn;
1108 {
1109   rtx x = PATTERN (insn);
1110 
1111   if (GET_CODE (x) != SET
1112       || GET_CODE (SET_DEST (x)) != PC)
1113     return 0;
1114 
1115   x = SET_SRC (x);
1116   if (GET_CODE (x) == LABEL_REF)
1117     return 1;
1118   else
1119     return (GET_CODE (x) == IF_THEN_ELSE
1120 	    && ((GET_CODE (XEXP (x, 2)) == PC
1121 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1122 		     || GET_CODE (XEXP (x, 1)) == RETURN))
1123 		|| (GET_CODE (XEXP (x, 1)) == PC
1124 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1125 			|| GET_CODE (XEXP (x, 2)) == RETURN))));
1126 
1127   return 0;
1128 }
1129 
1130 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1131    PARALLEL.
1132 
1133    Use this function is deprecated, since we need to support combined
1134    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
1135 
1136 int
condjump_in_parallel_p(insn)1137 condjump_in_parallel_p (insn)
1138      rtx insn;
1139 {
1140   rtx x = PATTERN (insn);
1141 
1142   if (GET_CODE (x) != PARALLEL)
1143     return 0;
1144   else
1145     x = XVECEXP (x, 0, 0);
1146 
1147   if (GET_CODE (x) != SET)
1148     return 0;
1149   if (GET_CODE (SET_DEST (x)) != PC)
1150     return 0;
1151   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1152     return 1;
1153   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1154     return 0;
1155   if (XEXP (SET_SRC (x), 2) == pc_rtx
1156       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1157 	  || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1158     return 1;
1159   if (XEXP (SET_SRC (x), 1) == pc_rtx
1160       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1161 	  || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1162     return 1;
1163   return 0;
1164 }
1165 
1166 /* Return set of PC, otherwise NULL.  */
1167 
1168 rtx
pc_set(insn)1169 pc_set (insn)
1170      rtx insn;
1171 {
1172   rtx pat;
1173   if (GET_CODE (insn) != JUMP_INSN)
1174     return NULL_RTX;
1175   pat = PATTERN (insn);
1176 
1177   /* The set is allowed to appear either as the insn pattern or
1178      the first set in a PARALLEL.  */
1179   if (GET_CODE (pat) == PARALLEL)
1180     pat = XVECEXP (pat, 0, 0);
1181   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1182     return pat;
1183 
1184   return NULL_RTX;
1185 }
1186 
1187 /* Return true when insn is an unconditional direct jump,
1188    possibly bundled inside a PARALLEL.  */
1189 
1190 int
any_uncondjump_p(insn)1191 any_uncondjump_p (insn)
1192      rtx insn;
1193 {
1194   rtx x = pc_set (insn);
1195   if (!x)
1196     return 0;
1197   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1198     return 0;
1199   return 1;
1200 }
1201 
1202 /* Return true when insn is a conditional jump.  This function works for
1203    instructions containing PC sets in PARALLELs.  The instruction may have
1204    various other effects so before removing the jump you must verify
1205    onlyjump_p.
1206 
1207    Note that unlike condjump_p it returns false for unconditional jumps.  */
1208 
1209 int
any_condjump_p(insn)1210 any_condjump_p (insn)
1211      rtx insn;
1212 {
1213   rtx x = pc_set (insn);
1214   enum rtx_code a, b;
1215 
1216   if (!x)
1217     return 0;
1218   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1219     return 0;
1220 
1221   a = GET_CODE (XEXP (SET_SRC (x), 1));
1222   b = GET_CODE (XEXP (SET_SRC (x), 2));
1223 
1224   return ((b == PC && (a == LABEL_REF || a == RETURN))
1225 	  || (a == PC && (b == LABEL_REF || b == RETURN)));
1226 }
1227 
1228 /* Return the label of a conditional jump.  */
1229 
1230 rtx
condjump_label(insn)1231 condjump_label (insn)
1232      rtx insn;
1233 {
1234   rtx x = pc_set (insn);
1235 
1236   if (!x)
1237     return NULL_RTX;
1238   x = SET_SRC (x);
1239   if (GET_CODE (x) == LABEL_REF)
1240     return x;
1241   if (GET_CODE (x) != IF_THEN_ELSE)
1242     return NULL_RTX;
1243   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1244     return XEXP (x, 1);
1245   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1246     return XEXP (x, 2);
1247   return NULL_RTX;
1248 }
1249 
1250 /* Return true if INSN is a (possibly conditional) return insn.  */
1251 
1252 static int
returnjump_p_1(loc,data)1253 returnjump_p_1 (loc, data)
1254      rtx *loc;
1255      void *data ATTRIBUTE_UNUSED;
1256 {
1257   rtx x = *loc;
1258 
1259   return x && (GET_CODE (x) == RETURN
1260 	       || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1261 }
1262 
1263 int
returnjump_p(insn)1264 returnjump_p (insn)
1265      rtx insn;
1266 {
1267   if (GET_CODE (insn) != JUMP_INSN)
1268     return 0;
1269   return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1270 }
1271 
1272 /* Return true if INSN is a jump that only transfers control and
1273    nothing more.  */
1274 
1275 int
onlyjump_p(insn)1276 onlyjump_p (insn)
1277      rtx insn;
1278 {
1279   rtx set;
1280 
1281   if (GET_CODE (insn) != JUMP_INSN)
1282     return 0;
1283 
1284   set = single_set (insn);
1285   if (set == NULL)
1286     return 0;
1287   if (GET_CODE (SET_DEST (set)) != PC)
1288     return 0;
1289   if (side_effects_p (SET_SRC (set)))
1290     return 0;
1291 
1292   return 1;
1293 }
1294 
1295 #ifdef HAVE_cc0
1296 
1297 /* Return nonzero if X is an RTX that only sets the condition codes
1298    and has no side effects.  */
1299 
1300 int
only_sets_cc0_p(x)1301 only_sets_cc0_p (x)
1302      rtx x;
1303 {
1304 
1305   if (! x)
1306     return 0;
1307 
1308   if (INSN_P (x))
1309     x = PATTERN (x);
1310 
1311   return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1312 }
1313 
1314 /* Return 1 if X is an RTX that does nothing but set the condition codes
1315    and CLOBBER or USE registers.
1316    Return -1 if X does explicitly set the condition codes,
1317    but also does other things.  */
1318 
1319 int
sets_cc0_p(x)1320 sets_cc0_p (x)
1321      rtx x;
1322 {
1323 
1324   if (! x)
1325     return 0;
1326 
1327   if (INSN_P (x))
1328     x = PATTERN (x);
1329 
1330   if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1331     return 1;
1332   if (GET_CODE (x) == PARALLEL)
1333     {
1334       int i;
1335       int sets_cc0 = 0;
1336       int other_things = 0;
1337       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1338 	{
1339 	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1340 	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1341 	    sets_cc0 = 1;
1342 	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1343 	    other_things = 1;
1344 	}
1345       return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1346     }
1347   return 0;
1348 }
1349 #endif
1350 
1351 /* Follow any unconditional jump at LABEL;
1352    return the ultimate label reached by any such chain of jumps.
1353    If LABEL is not followed by a jump, return LABEL.
1354    If the chain loops or we can't find end, return LABEL,
1355    since that tells caller to avoid changing the insn.
1356 
1357    If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1358    a USE or CLOBBER.  */
1359 
1360 rtx
follow_jumps(label)1361 follow_jumps (label)
1362      rtx label;
1363 {
1364   rtx insn;
1365   rtx next;
1366   rtx value = label;
1367   int depth;
1368 
1369   for (depth = 0;
1370        (depth < 10
1371 	&& (insn = next_active_insn (value)) != 0
1372 	&& GET_CODE (insn) == JUMP_INSN
1373 	&& ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1374 	     && onlyjump_p (insn))
1375 	    || GET_CODE (PATTERN (insn)) == RETURN)
1376 	&& (next = NEXT_INSN (insn))
1377 	&& GET_CODE (next) == BARRIER);
1378        depth++)
1379     {
1380       /* Don't chain through the insn that jumps into a loop
1381 	 from outside the loop,
1382 	 since that would create multiple loop entry jumps
1383 	 and prevent loop optimization.  */
1384       rtx tem;
1385       if (!reload_completed)
1386 	for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1387 	  if (GET_CODE (tem) == NOTE
1388 	      && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1389 		  /* ??? Optional.  Disables some optimizations, but makes
1390 		     gcov output more accurate with -O.  */
1391 		  || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1392 	    return value;
1393 
1394       /* If we have found a cycle, make the insn jump to itself.  */
1395       if (JUMP_LABEL (insn) == label)
1396 	return label;
1397 
1398       tem = next_active_insn (JUMP_LABEL (insn));
1399       if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1400 		  || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1401 	break;
1402 
1403       value = JUMP_LABEL (insn);
1404     }
1405   if (depth == 10)
1406     return label;
1407   return value;
1408 }
1409 
1410 
1411 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1412    If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1413    in INSN, then store one of them in JUMP_LABEL (INSN).
1414    If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1415    referenced in INSN, add a REG_LABEL note containing that label to INSN.
1416    Also, when there are consecutive labels, canonicalize on the last of them.
1417 
1418    Note that two labels separated by a loop-beginning note
1419    must be kept distinct if we have not yet done loop-optimization,
1420    because the gap between them is where loop-optimize
1421    will want to move invariant code to.  CROSS_JUMP tells us
1422    that loop-optimization is done with.  */
1423 
1424 void
mark_jump_label(x,insn,in_mem)1425 mark_jump_label (x, insn, in_mem)
1426      rtx x;
1427      rtx insn;
1428      int in_mem;
1429 {
1430   RTX_CODE code = GET_CODE (x);
1431   int i;
1432   const char *fmt;
1433 
1434   switch (code)
1435     {
1436     case PC:
1437     case CC0:
1438     case REG:
1439     case CONST_INT:
1440     case CONST_DOUBLE:
1441     case CLOBBER:
1442     case CALL:
1443       return;
1444 
1445     case MEM:
1446       in_mem = 1;
1447       break;
1448 
1449     case SYMBOL_REF:
1450       if (!in_mem)
1451 	return;
1452 
1453       /* If this is a constant-pool reference, see if it is a label.  */
1454       if (CONSTANT_POOL_ADDRESS_P (x))
1455 	mark_jump_label (get_pool_constant (x), insn, in_mem);
1456       break;
1457 
1458     case LABEL_REF:
1459       {
1460 	rtx label = XEXP (x, 0);
1461 
1462 	/* Ignore remaining references to unreachable labels that
1463 	   have been deleted.  */
1464 	if (GET_CODE (label) == NOTE
1465 	    && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1466 	  break;
1467 
1468 	if (GET_CODE (label) != CODE_LABEL)
1469 	  abort ();
1470 
1471 	/* Ignore references to labels of containing functions.  */
1472 	if (LABEL_REF_NONLOCAL_P (x))
1473 	  break;
1474 
1475 	XEXP (x, 0) = label;
1476 	if (! insn || ! INSN_DELETED_P (insn))
1477 	  ++LABEL_NUSES (label);
1478 
1479 	if (insn)
1480 	  {
1481 	    if (GET_CODE (insn) == JUMP_INSN)
1482 	      JUMP_LABEL (insn) = label;
1483 	    else
1484 	      {
1485 		/* Add a REG_LABEL note for LABEL unless there already
1486 		   is one.  All uses of a label, except for labels
1487 		   that are the targets of jumps, must have a
1488 		   REG_LABEL note.  */
1489 		if (! find_reg_note (insn, REG_LABEL, label))
1490 		  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1491 							REG_NOTES (insn));
1492 	      }
1493 	  }
1494 	return;
1495       }
1496 
1497   /* Do walk the labels in a vector, but not the first operand of an
1498      ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1499     case ADDR_VEC:
1500     case ADDR_DIFF_VEC:
1501       if (! INSN_DELETED_P (insn))
1502 	{
1503 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1504 
1505 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1506 	    mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1507 	}
1508       return;
1509 
1510     default:
1511       break;
1512     }
1513 
1514   fmt = GET_RTX_FORMAT (code);
1515   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1516     {
1517       if (fmt[i] == 'e')
1518 	mark_jump_label (XEXP (x, i), insn, in_mem);
1519       else if (fmt[i] == 'E')
1520 	{
1521 	  int j;
1522 	  for (j = 0; j < XVECLEN (x, i); j++)
1523 	    mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1524 	}
1525     }
1526 }
1527 
1528 /* If all INSN does is set the pc, delete it,
1529    and delete the insn that set the condition codes for it
1530    if that's what the previous thing was.  */
1531 
1532 void
delete_jump(insn)1533 delete_jump (insn)
1534      rtx insn;
1535 {
1536   rtx set = single_set (insn);
1537 
1538   if (set && GET_CODE (SET_DEST (set)) == PC)
1539     delete_computation (insn);
1540 }
1541 
1542 /* Verify INSN is a BARRIER and delete it.  */
1543 
1544 void
delete_barrier(insn)1545 delete_barrier (insn)
1546      rtx insn;
1547 {
1548   if (GET_CODE (insn) != BARRIER)
1549     abort ();
1550 
1551   delete_insn (insn);
1552 }
1553 
1554 /* Recursively delete prior insns that compute the value (used only by INSN
1555    which the caller is deleting) stored in the register mentioned by NOTE
1556    which is a REG_DEAD note associated with INSN.  */
1557 
1558 static void
delete_prior_computation(note,insn)1559 delete_prior_computation (note, insn)
1560      rtx note;
1561      rtx insn;
1562 {
1563   rtx our_prev;
1564   rtx reg = XEXP (note, 0);
1565 
1566   for (our_prev = prev_nonnote_insn (insn);
1567        our_prev && (GET_CODE (our_prev) == INSN
1568 		    || GET_CODE (our_prev) == CALL_INSN);
1569        our_prev = prev_nonnote_insn (our_prev))
1570     {
1571       rtx pat = PATTERN (our_prev);
1572 
1573       /* If we reach a CALL which is not calling a const function
1574 	 or the callee pops the arguments, then give up.  */
1575       if (GET_CODE (our_prev) == CALL_INSN
1576 	  && (! CONST_OR_PURE_CALL_P (our_prev)
1577 	      || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1578 	break;
1579 
1580       /* If we reach a SEQUENCE, it is too complex to try to
1581 	 do anything with it, so give up.  We can be run during
1582 	 and after reorg, so SEQUENCE rtl can legitimately show
1583 	 up here.  */
1584       if (GET_CODE (pat) == SEQUENCE)
1585 	break;
1586 
1587       if (GET_CODE (pat) == USE
1588 	  && GET_CODE (XEXP (pat, 0)) == INSN)
1589 	/* reorg creates USEs that look like this.  We leave them
1590 	   alone because reorg needs them for its own purposes.  */
1591 	break;
1592 
1593       if (reg_set_p (reg, pat))
1594 	{
1595 	  if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1596 	    break;
1597 
1598 	  if (GET_CODE (pat) == PARALLEL)
1599 	    {
1600 	      /* If we find a SET of something else, we can't
1601 		 delete the insn.  */
1602 
1603 	      int i;
1604 
1605 	      for (i = 0; i < XVECLEN (pat, 0); i++)
1606 		{
1607 		  rtx part = XVECEXP (pat, 0, i);
1608 
1609 		  if (GET_CODE (part) == SET
1610 		      && SET_DEST (part) != reg)
1611 		    break;
1612 		}
1613 
1614 	      if (i == XVECLEN (pat, 0))
1615 		delete_computation (our_prev);
1616 	    }
1617 	  else if (GET_CODE (pat) == SET
1618 		   && GET_CODE (SET_DEST (pat)) == REG)
1619 	    {
1620 	      int dest_regno = REGNO (SET_DEST (pat));
1621 	      int dest_endregno
1622 		= (dest_regno
1623 		   + (dest_regno < FIRST_PSEUDO_REGISTER
1624 		      ? HARD_REGNO_NREGS (dest_regno,
1625 					  GET_MODE (SET_DEST (pat))) : 1));
1626 	      int regno = REGNO (reg);
1627 	      int endregno
1628 		= (regno
1629 		   + (regno < FIRST_PSEUDO_REGISTER
1630 		      ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1631 
1632 	      if (dest_regno >= regno
1633 		  && dest_endregno <= endregno)
1634 		delete_computation (our_prev);
1635 
1636 	      /* We may have a multi-word hard register and some, but not
1637 		 all, of the words of the register are needed in subsequent
1638 		 insns.  Write REG_UNUSED notes for those parts that were not
1639 		 needed.  */
1640 	      else if (dest_regno <= regno
1641 		       && dest_endregno >= endregno)
1642 		{
1643 		  int i;
1644 
1645 		  REG_NOTES (our_prev)
1646 		    = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1647 					 REG_NOTES (our_prev));
1648 
1649 		  for (i = dest_regno; i < dest_endregno; i++)
1650 		    if (! find_regno_note (our_prev, REG_UNUSED, i))
1651 		      break;
1652 
1653 		  if (i == dest_endregno)
1654 		    delete_computation (our_prev);
1655 		}
1656 	    }
1657 
1658 	  break;
1659 	}
1660 
1661       /* If PAT references the register that dies here, it is an
1662 	 additional use.  Hence any prior SET isn't dead.  However, this
1663 	 insn becomes the new place for the REG_DEAD note.  */
1664       if (reg_overlap_mentioned_p (reg, pat))
1665 	{
1666 	  XEXP (note, 1) = REG_NOTES (our_prev);
1667 	  REG_NOTES (our_prev) = note;
1668 	  break;
1669 	}
1670     }
1671 }
1672 
1673 /* Delete INSN and recursively delete insns that compute values used only
1674    by INSN.  This uses the REG_DEAD notes computed during flow analysis.
1675    If we are running before flow.c, we need do nothing since flow.c will
1676    delete dead code.  We also can't know if the registers being used are
1677    dead or not at this point.
1678 
1679    Otherwise, look at all our REG_DEAD notes.  If a previous insn does
1680    nothing other than set a register that dies in this insn, we can delete
1681    that insn as well.
1682 
1683    On machines with CC0, if CC0 is used in this insn, we may be able to
1684    delete the insn that set it.  */
1685 
1686 static void
delete_computation(insn)1687 delete_computation (insn)
1688      rtx insn;
1689 {
1690   rtx note, next;
1691 
1692 #ifdef HAVE_cc0
1693   if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1694     {
1695       rtx prev = prev_nonnote_insn (insn);
1696       /* We assume that at this stage
1697 	 CC's are always set explicitly
1698 	 and always immediately before the jump that
1699 	 will use them.  So if the previous insn
1700 	 exists to set the CC's, delete it
1701 	 (unless it performs auto-increments, etc.).  */
1702       if (prev && GET_CODE (prev) == INSN
1703 	  && sets_cc0_p (PATTERN (prev)))
1704 	{
1705 	  if (sets_cc0_p (PATTERN (prev)) > 0
1706 	      && ! side_effects_p (PATTERN (prev)))
1707 	    delete_computation (prev);
1708 	  else
1709 	    /* Otherwise, show that cc0 won't be used.  */
1710 	    REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1711 						  cc0_rtx, REG_NOTES (prev));
1712 	}
1713     }
1714 #endif
1715 
1716   for (note = REG_NOTES (insn); note; note = next)
1717     {
1718       next = XEXP (note, 1);
1719 
1720       if (REG_NOTE_KIND (note) != REG_DEAD
1721 	  /* Verify that the REG_NOTE is legitimate.  */
1722 	  || GET_CODE (XEXP (note, 0)) != REG)
1723 	continue;
1724 
1725       delete_prior_computation (note, insn);
1726     }
1727 
1728   delete_related_insns (insn);
1729 }
1730 
1731 /* Delete insn INSN from the chain of insns and update label ref counts
1732    and delete insns now unreachable.
1733 
1734    Returns the first insn after INSN that was not deleted.
1735 
1736    Usage of this instruction is deprecated.  Use delete_insn instead and
1737    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1738 
1739 rtx
delete_related_insns(insn)1740 delete_related_insns (insn)
1741      rtx insn;
1742 {
1743   int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1744   rtx note;
1745   rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1746 
1747   while (next && INSN_DELETED_P (next))
1748     next = NEXT_INSN (next);
1749 
1750   /* This insn is already deleted => return first following nondeleted.  */
1751   if (INSN_DELETED_P (insn))
1752     return next;
1753 
1754   delete_insn (insn);
1755 
1756   /* If instruction is followed by a barrier,
1757      delete the barrier too.  */
1758 
1759   if (next != 0 && GET_CODE (next) == BARRIER)
1760     delete_insn (next);
1761 
1762   /* If deleting a jump, decrement the count of the label,
1763      and delete the label if it is now unused.  */
1764 
1765   if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1766     {
1767       rtx lab = JUMP_LABEL (insn), lab_next;
1768 
1769       if (LABEL_NUSES (lab) == 0)
1770 	{
1771 	  /* This can delete NEXT or PREV,
1772 	     either directly if NEXT is JUMP_LABEL (INSN),
1773 	     or indirectly through more levels of jumps.  */
1774 	  delete_related_insns (lab);
1775 
1776 	  /* I feel a little doubtful about this loop,
1777 	     but I see no clean and sure alternative way
1778 	     to find the first insn after INSN that is not now deleted.
1779 	     I hope this works.  */
1780 	  while (next && INSN_DELETED_P (next))
1781 	    next = NEXT_INSN (next);
1782 	  return next;
1783 	}
1784       else if ((lab_next = next_nonnote_insn (lab)) != NULL
1785 	       && GET_CODE (lab_next) == JUMP_INSN
1786 	       && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1787 		   || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1788 	{
1789 	  /* If we're deleting the tablejump, delete the dispatch table.
1790 	     We may not be able to kill the label immediately preceding
1791 	     just yet, as it might be referenced in code leading up to
1792 	     the tablejump.  */
1793 	  delete_related_insns (lab_next);
1794 	}
1795     }
1796 
1797   /* Likewise if we're deleting a dispatch table.  */
1798 
1799   if (GET_CODE (insn) == JUMP_INSN
1800       && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1801 	  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1802     {
1803       rtx pat = PATTERN (insn);
1804       int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1805       int len = XVECLEN (pat, diff_vec_p);
1806 
1807       for (i = 0; i < len; i++)
1808 	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1809 	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1810       while (next && INSN_DELETED_P (next))
1811 	next = NEXT_INSN (next);
1812       return next;
1813     }
1814 
1815   /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note.  */
1816   if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1817     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1818       if (REG_NOTE_KIND (note) == REG_LABEL
1819 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1820 	  && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1821 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1822 	  delete_related_insns (XEXP (note, 0));
1823 
1824   while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1825     prev = PREV_INSN (prev);
1826 
1827   /* If INSN was a label and a dispatch table follows it,
1828      delete the dispatch table.  The tablejump must have gone already.
1829      It isn't useful to fall through into a table.  */
1830 
1831   if (was_code_label
1832       && NEXT_INSN (insn) != 0
1833       && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1834       && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1835 	  || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1836     next = delete_related_insns (NEXT_INSN (insn));
1837 
1838   /* If INSN was a label, delete insns following it if now unreachable.  */
1839 
1840   if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1841     {
1842       RTX_CODE code;
1843       while (next != 0
1844 	     && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1845 		 || code == NOTE || code == BARRIER
1846 		 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1847 	{
1848 	  if (code == NOTE
1849 	      && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1850 	    next = NEXT_INSN (next);
1851 	  /* Keep going past other deleted labels to delete what follows.  */
1852 	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
1853 	    next = NEXT_INSN (next);
1854 	  else
1855 	    /* Note: if this deletes a jump, it can cause more
1856 	       deletion of unreachable code, after a different label.
1857 	       As long as the value from this recursive call is correct,
1858 	       this invocation functions correctly.  */
1859 	    next = delete_related_insns (next);
1860 	}
1861     }
1862 
1863   return next;
1864 }
1865 
1866 /* Advance from INSN till reaching something not deleted
1867    then return that.  May return INSN itself.  */
1868 
1869 rtx
next_nondeleted_insn(insn)1870 next_nondeleted_insn (insn)
1871      rtx insn;
1872 {
1873   while (INSN_DELETED_P (insn))
1874     insn = NEXT_INSN (insn);
1875   return insn;
1876 }
1877 
1878 /* Delete a range of insns from FROM to TO, inclusive.
1879    This is for the sake of peephole optimization, so assume
1880    that whatever these insns do will still be done by a new
1881    peephole insn that will replace them.  */
1882 
1883 void
delete_for_peephole(from,to)1884 delete_for_peephole (from, to)
1885      rtx from, to;
1886 {
1887   rtx insn = from;
1888 
1889   while (1)
1890     {
1891       rtx next = NEXT_INSN (insn);
1892       rtx prev = PREV_INSN (insn);
1893 
1894       if (GET_CODE (insn) != NOTE)
1895 	{
1896 	  INSN_DELETED_P (insn) = 1;
1897 
1898 	  /* Patch this insn out of the chain.  */
1899 	  /* We don't do this all at once, because we
1900 	     must preserve all NOTEs.  */
1901 	  if (prev)
1902 	    NEXT_INSN (prev) = next;
1903 
1904 	  if (next)
1905 	    PREV_INSN (next) = prev;
1906 	}
1907 
1908       if (insn == to)
1909 	break;
1910       insn = next;
1911     }
1912 
1913   /* Note that if TO is an unconditional jump
1914      we *do not* delete the BARRIER that follows,
1915      since the peephole that replaces this sequence
1916      is also an unconditional jump in that case.  */
1917 }
1918 
1919 /* We have determined that AVOIDED_INSN is never reached, and are
1920    about to delete it.  If the insn chain between AVOIDED_INSN and
1921    FINISH contains more than one line from the current function, and
1922    contains at least one operation, print a warning if the user asked
1923    for it.  If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1924 
1925    CSE and inlining can duplicate insns, so it's possible to get
1926    spurious warnings from this.  */
1927 
1928 void
never_reached_warning(avoided_insn,finish)1929 never_reached_warning (avoided_insn, finish)
1930      rtx avoided_insn, finish;
1931 {
1932   rtx insn;
1933   rtx a_line_note = NULL;
1934   int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1935 
1936   if (!warn_notreached)
1937     return;
1938 
1939   /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
1940      us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
1941      the line note.  */
1942   insn = avoided_insn;
1943   while (1)
1944     {
1945       rtx prev = PREV_INSN (insn);
1946       if (prev == NULL_RTX
1947 	  || GET_CODE (prev) != NOTE)
1948 	break;
1949       insn = prev;
1950     }
1951 
1952   /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1953      in case FINISH is NULL, otherwise until we run out of insns.  */
1954 
1955   for (; insn != NULL; insn = NEXT_INSN (insn))
1956     {
1957       if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1958 	  || GET_CODE (insn) == BARRIER)
1959 	break;
1960 
1961       if (GET_CODE (insn) == NOTE		/* A line number note?  */
1962 	  && NOTE_LINE_NUMBER (insn) >= 0)
1963 	{
1964 	  if (a_line_note == NULL)
1965 	    a_line_note = insn;
1966 	  else
1967 	    two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1968 				  != NOTE_LINE_NUMBER (insn));
1969 	}
1970       else if (INSN_P (insn))
1971 	{
1972 	  if (reached_end)
1973 	    break;
1974 	  contains_insn = 1;
1975 	}
1976 
1977       if (insn == finish)
1978 	reached_end = 1;
1979     }
1980   if (two_avoided_lines && contains_insn)
1981     warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1982 				NOTE_LINE_NUMBER (a_line_note),
1983 				"will never be executed");
1984 }
1985 
1986 /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1987    NLABEL as a return.  Accrue modifications into the change group.  */
1988 
1989 static void
redirect_exp_1(loc,olabel,nlabel,insn)1990 redirect_exp_1 (loc, olabel, nlabel, insn)
1991      rtx *loc;
1992      rtx olabel, nlabel;
1993      rtx insn;
1994 {
1995   rtx x = *loc;
1996   RTX_CODE code = GET_CODE (x);
1997   int i;
1998   const char *fmt;
1999 
2000   if (code == LABEL_REF)
2001     {
2002       if (XEXP (x, 0) == olabel)
2003 	{
2004 	  rtx n;
2005 	  if (nlabel)
2006 	    n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2007 	  else
2008 	    n = gen_rtx_RETURN (VOIDmode);
2009 
2010 	  validate_change (insn, loc, n, 1);
2011 	  return;
2012 	}
2013     }
2014   else if (code == RETURN && olabel == 0)
2015     {
2016       x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2017       if (loc == &PATTERN (insn))
2018 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2019       validate_change (insn, loc, x, 1);
2020       return;
2021     }
2022 
2023   if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
2024       && GET_CODE (SET_SRC (x)) == LABEL_REF
2025       && XEXP (SET_SRC (x), 0) == olabel)
2026     {
2027       validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
2028       return;
2029     }
2030 
2031   fmt = GET_RTX_FORMAT (code);
2032   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2033     {
2034       if (fmt[i] == 'e')
2035 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2036       else if (fmt[i] == 'E')
2037 	{
2038 	  int j;
2039 	  for (j = 0; j < XVECLEN (x, i); j++)
2040 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2041 	}
2042     }
2043 }
2044 
2045 /* Similar, but apply the change group and report success or failure.  */
2046 
2047 static int
redirect_exp(olabel,nlabel,insn)2048 redirect_exp (olabel, nlabel, insn)
2049      rtx olabel, nlabel;
2050      rtx insn;
2051 {
2052   rtx *loc;
2053 
2054   if (GET_CODE (PATTERN (insn)) == PARALLEL)
2055     loc = &XVECEXP (PATTERN (insn), 0, 0);
2056   else
2057     loc = &PATTERN (insn);
2058 
2059   redirect_exp_1 (loc, olabel, nlabel, insn);
2060   if (num_validated_changes () == 0)
2061     return 0;
2062 
2063   return apply_change_group ();
2064 }
2065 
2066 /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
2067    the modifications into the change group.  Return false if we did
2068    not see how to do that.  */
2069 
2070 int
redirect_jump_1(jump,nlabel)2071 redirect_jump_1 (jump, nlabel)
2072      rtx jump, nlabel;
2073 {
2074   int ochanges = num_validated_changes ();
2075   rtx *loc;
2076 
2077   if (GET_CODE (PATTERN (jump)) == PARALLEL)
2078     loc = &XVECEXP (PATTERN (jump), 0, 0);
2079   else
2080     loc = &PATTERN (jump);
2081 
2082   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2083   return num_validated_changes () > ochanges;
2084 }
2085 
2086 /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
2087    jump target label is unused as a result, it and the code following
2088    it may be deleted.
2089 
2090    If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2091    RETURN insn.
2092 
2093    The return value will be 1 if the change was made, 0 if it wasn't
2094    (this can only occur for NLABEL == 0).  */
2095 
2096 int
redirect_jump(jump,nlabel,delete_unused)2097 redirect_jump (jump, nlabel, delete_unused)
2098      rtx jump, nlabel;
2099      int delete_unused;
2100 {
2101   rtx olabel = JUMP_LABEL (jump);
2102 
2103   if (nlabel == olabel)
2104     return 1;
2105 
2106   if (! redirect_exp (olabel, nlabel, jump))
2107     return 0;
2108 
2109   JUMP_LABEL (jump) = nlabel;
2110   if (nlabel)
2111     ++LABEL_NUSES (nlabel);
2112 
2113   /* If we're eliding the jump over exception cleanups at the end of a
2114      function, move the function end note so that -Wreturn-type works.  */
2115   if (olabel && nlabel
2116       && NEXT_INSN (olabel)
2117       && GET_CODE (NEXT_INSN (olabel)) == NOTE
2118       && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2119     emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2120 
2121   if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2122       /* Undefined labels will remain outside the insn stream.  */
2123       && INSN_UID (olabel))
2124     delete_related_insns (olabel);
2125 
2126   return 1;
2127 }
2128 
2129 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2130    Accrue the modifications into the change group.  */
2131 
2132 static void
invert_exp_1(insn)2133 invert_exp_1 (insn)
2134      rtx insn;
2135 {
2136   RTX_CODE code;
2137   rtx x = pc_set (insn);
2138 
2139   if (!x)
2140     abort ();
2141   x = SET_SRC (x);
2142 
2143   code = GET_CODE (x);
2144 
2145   if (code == IF_THEN_ELSE)
2146     {
2147       rtx comp = XEXP (x, 0);
2148       rtx tem;
2149       enum rtx_code reversed_code;
2150 
2151       /* We can do this in two ways:  The preferable way, which can only
2152 	 be done if this is not an integer comparison, is to reverse
2153 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
2154 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
2155 
2156       reversed_code = reversed_comparison_code (comp, insn);
2157 
2158       if (reversed_code != UNKNOWN)
2159 	{
2160 	  validate_change (insn, &XEXP (x, 0),
2161 			   gen_rtx_fmt_ee (reversed_code,
2162 					   GET_MODE (comp), XEXP (comp, 0),
2163 					   XEXP (comp, 1)),
2164 			   1);
2165 	  return;
2166 	}
2167 
2168       tem = XEXP (x, 1);
2169       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2170       validate_change (insn, &XEXP (x, 2), tem, 1);
2171     }
2172   else
2173     abort ();
2174 }
2175 
2176 /* Invert the jump condition of conditional jump insn, INSN.
2177 
2178    Return 1 if we can do so, 0 if we cannot find a way to do so that
2179    matches a pattern.  */
2180 
2181 static int
invert_exp(insn)2182 invert_exp (insn)
2183      rtx insn;
2184 {
2185   invert_exp_1 (insn);
2186   if (num_validated_changes () == 0)
2187     return 0;
2188 
2189   return apply_change_group ();
2190 }
2191 
2192 /* Invert the condition of the jump JUMP, and make it jump to label
2193    NLABEL instead of where it jumps now.  Accrue changes into the
2194    change group.  Return false if we didn't see how to perform the
2195    inversion and redirection.  */
2196 
2197 int
invert_jump_1(jump,nlabel)2198 invert_jump_1 (jump, nlabel)
2199      rtx jump, nlabel;
2200 {
2201   int ochanges;
2202 
2203   ochanges = num_validated_changes ();
2204   invert_exp_1 (jump);
2205   if (num_validated_changes () == ochanges)
2206     return 0;
2207 
2208   return redirect_jump_1 (jump, nlabel);
2209 }
2210 
2211 /* Invert the condition of the jump JUMP, and make it jump to label
2212    NLABEL instead of where it jumps now.  Return true if successful.  */
2213 
2214 int
invert_jump(jump,nlabel,delete_unused)2215 invert_jump (jump, nlabel, delete_unused)
2216      rtx jump, nlabel;
2217      int delete_unused;
2218 {
2219   /* We have to either invert the condition and change the label or
2220      do neither.  Either operation could fail.  We first try to invert
2221      the jump. If that succeeds, we try changing the label.  If that fails,
2222      we invert the jump back to what it was.  */
2223 
2224   if (! invert_exp (jump))
2225     return 0;
2226 
2227   if (redirect_jump (jump, nlabel, delete_unused))
2228     {
2229       invert_br_probabilities (jump);
2230 
2231       return 1;
2232     }
2233 
2234   if (! invert_exp (jump))
2235     /* This should just be putting it back the way it was.  */
2236     abort ();
2237 
2238   return 0;
2239 }
2240 
2241 
2242 /* Like rtx_equal_p except that it considers two REGs as equal
2243    if they renumber to the same value and considers two commutative
2244    operations to be the same if the order of the operands has been
2245    reversed.
2246 
2247    ??? Addition is not commutative on the PA due to the weird implicit
2248    space register selection rules for memory addresses.  Therefore, we
2249    don't consider a + b == b + a.
2250 
2251    We could/should make this test a little tighter.  Possibly only
2252    disabling it on the PA via some backend macro or only disabling this
2253    case when the PLUS is inside a MEM.  */
2254 
2255 int
rtx_renumbered_equal_p(x,y)2256 rtx_renumbered_equal_p (x, y)
2257      rtx x, y;
2258 {
2259   int i;
2260   RTX_CODE code = GET_CODE (x);
2261   const char *fmt;
2262 
2263   if (x == y)
2264     return 1;
2265 
2266   if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2267       && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2268 				  && GET_CODE (SUBREG_REG (y)) == REG)))
2269     {
2270       int reg_x = -1, reg_y = -1;
2271       int byte_x = 0, byte_y = 0;
2272 
2273       if (GET_MODE (x) != GET_MODE (y))
2274 	return 0;
2275 
2276       /* If we haven't done any renumbering, don't
2277 	 make any assumptions.  */
2278       if (reg_renumber == 0)
2279 	return rtx_equal_p (x, y);
2280 
2281       if (code == SUBREG)
2282 	{
2283 	  reg_x = REGNO (SUBREG_REG (x));
2284 	  byte_x = SUBREG_BYTE (x);
2285 
2286 	  if (reg_renumber[reg_x] >= 0)
2287 	    {
2288 	      reg_x = subreg_regno_offset (reg_renumber[reg_x],
2289 					   GET_MODE (SUBREG_REG (x)),
2290 					   byte_x,
2291 					   GET_MODE (x));
2292 	      byte_x = 0;
2293 	    }
2294 	}
2295       else
2296 	{
2297 	  reg_x = REGNO (x);
2298 	  if (reg_renumber[reg_x] >= 0)
2299 	    reg_x = reg_renumber[reg_x];
2300 	}
2301 
2302       if (GET_CODE (y) == SUBREG)
2303 	{
2304 	  reg_y = REGNO (SUBREG_REG (y));
2305 	  byte_y = SUBREG_BYTE (y);
2306 
2307 	  if (reg_renumber[reg_y] >= 0)
2308 	    {
2309 	      reg_y = subreg_regno_offset (reg_renumber[reg_y],
2310 					   GET_MODE (SUBREG_REG (y)),
2311 					   byte_y,
2312 					   GET_MODE (y));
2313 	      byte_y = 0;
2314 	    }
2315 	}
2316       else
2317 	{
2318 	  reg_y = REGNO (y);
2319 	  if (reg_renumber[reg_y] >= 0)
2320 	    reg_y = reg_renumber[reg_y];
2321 	}
2322 
2323       return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2324     }
2325 
2326   /* Now we have disposed of all the cases
2327      in which different rtx codes can match.  */
2328   if (code != GET_CODE (y))
2329     return 0;
2330 
2331   switch (code)
2332     {
2333     case PC:
2334     case CC0:
2335     case ADDR_VEC:
2336     case ADDR_DIFF_VEC:
2337       return 0;
2338 
2339     case CONST_INT:
2340       return INTVAL (x) == INTVAL (y);
2341 
2342     case LABEL_REF:
2343       /* We can't assume nonlocal labels have their following insns yet.  */
2344       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2345 	return XEXP (x, 0) == XEXP (y, 0);
2346 
2347       /* Two label-refs are equivalent if they point at labels
2348 	 in the same position in the instruction stream.  */
2349       return (next_real_insn (XEXP (x, 0))
2350 	      == next_real_insn (XEXP (y, 0)));
2351 
2352     case SYMBOL_REF:
2353       return XSTR (x, 0) == XSTR (y, 0);
2354 
2355     case CODE_LABEL:
2356       /* If we didn't match EQ equality above, they aren't the same.  */
2357       return 0;
2358 
2359     default:
2360       break;
2361     }
2362 
2363   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2364 
2365   if (GET_MODE (x) != GET_MODE (y))
2366     return 0;
2367 
2368   /* For commutative operations, the RTX match if the operand match in any
2369      order.  Also handle the simple binary and unary cases without a loop.
2370 
2371      ??? Don't consider PLUS a commutative operator; see comments above.  */
2372   if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2373       && code != PLUS)
2374     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2375 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2376 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2377 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2378   else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2379     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2380 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2381   else if (GET_RTX_CLASS (code) == '1')
2382     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2383 
2384   /* Compare the elements.  If any pair of corresponding elements
2385      fail to match, return 0 for the whole things.  */
2386 
2387   fmt = GET_RTX_FORMAT (code);
2388   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2389     {
2390       int j;
2391       switch (fmt[i])
2392 	{
2393 	case 'w':
2394 	  if (XWINT (x, i) != XWINT (y, i))
2395 	    return 0;
2396 	  break;
2397 
2398 	case 'i':
2399 	  if (XINT (x, i) != XINT (y, i))
2400 	    return 0;
2401 	  break;
2402 
2403 	case 't':
2404 	  if (XTREE (x, i) != XTREE (y, i))
2405 	    return 0;
2406 	  break;
2407 
2408 	case 's':
2409 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2410 	    return 0;
2411 	  break;
2412 
2413 	case 'e':
2414 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2415 	    return 0;
2416 	  break;
2417 
2418 	case 'u':
2419 	  if (XEXP (x, i) != XEXP (y, i))
2420 	    return 0;
2421 	  /* fall through.  */
2422 	case '0':
2423 	  break;
2424 
2425 	case 'E':
2426 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2427 	    return 0;
2428 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2429 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2430 	      return 0;
2431 	  break;
2432 
2433 	default:
2434 	  abort ();
2435 	}
2436     }
2437   return 1;
2438 }
2439 
2440 /* If X is a hard register or equivalent to one or a subregister of one,
2441    return the hard register number.  If X is a pseudo register that was not
2442    assigned a hard register, return the pseudo register number.  Otherwise,
2443    return -1.  Any rtx is valid for X.  */
2444 
2445 int
true_regnum(x)2446 true_regnum (x)
2447      rtx x;
2448 {
2449   if (GET_CODE (x) == REG)
2450     {
2451       if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2452 	return reg_renumber[REGNO (x)];
2453       return REGNO (x);
2454     }
2455   if (GET_CODE (x) == SUBREG)
2456     {
2457       int base = true_regnum (SUBREG_REG (x));
2458       if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2459 	return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2460 					   GET_MODE (SUBREG_REG (x)),
2461 					   SUBREG_BYTE (x), GET_MODE (x));
2462     }
2463   return -1;
2464 }
2465 
2466 /* Return regno of the register REG and handle subregs too.  */
2467 unsigned int
reg_or_subregno(reg)2468 reg_or_subregno (reg)
2469      rtx reg;
2470 {
2471   if (REG_P (reg))
2472     return REGNO (reg);
2473   if (GET_CODE (reg) == SUBREG)
2474     return REGNO (SUBREG_REG (reg));
2475   abort ();
2476 }
2477