xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ifcvt.c (revision 413d532bcc3f62d122e56d92e13ac64825a40baf)
1 /* If-conversion support.
2    Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010
3    Free Software Foundation, Inc.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15    License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43 #include "timevar.h"
44 #include "tree-pass.h"
45 #include "df.h"
46 #include "vec.h"
47 #include "vecprim.h"
48 #include "dbgcnt.h"
49 
50 #ifndef HAVE_conditional_move
51 #define HAVE_conditional_move 0
52 #endif
53 #ifndef HAVE_incscc
54 #define HAVE_incscc 0
55 #endif
56 #ifndef HAVE_decscc
57 #define HAVE_decscc 0
58 #endif
59 #ifndef HAVE_trap
60 #define HAVE_trap 0
61 #endif
62 
63 #ifndef MAX_CONDITIONAL_EXECUTE
64 #define MAX_CONDITIONAL_EXECUTE \
65   (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
66    + 1)
67 #endif
68 
69 #define IFCVT_MULTIPLE_DUMPS 1
70 
71 #define NULL_BLOCK	((basic_block) NULL)
72 
73 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
74 static int num_possible_if_blocks;
75 
76 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
77    execution.  */
78 static int num_updated_if_blocks;
79 
80 /* # of changes made.  */
81 static int num_true_changes;
82 
83 /* Whether conditional execution changes were made.  */
84 static int cond_exec_changed_p;
85 
86 /* Forward references.  */
87 static int count_bb_insns (const_basic_block);
88 static bool cheap_bb_rtx_cost_p (const_basic_block, int);
89 static rtx first_active_insn (basic_block);
90 static rtx last_active_insn (basic_block, int);
91 static basic_block block_fallthru (basic_block);
92 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
93 static rtx cond_exec_get_condition (rtx);
94 static rtx noce_get_condition (rtx, rtx *, bool);
95 static int noce_operand_ok (const_rtx);
96 static void merge_if_block (ce_if_block_t *);
97 static int find_cond_trap (basic_block, edge, edge);
98 static basic_block find_if_header (basic_block, int);
99 static int block_jumps_and_fallthru_p (basic_block, basic_block);
100 static int noce_find_if_block (basic_block, edge, edge, int);
101 static int cond_exec_find_if_block (ce_if_block_t *);
102 static int find_if_case_1 (basic_block, edge, edge);
103 static int find_if_case_2 (basic_block, edge, edge);
104 static int find_memory (rtx *, void *);
105 static int dead_or_predicable (basic_block, basic_block, basic_block,
106 			       basic_block, int);
107 static void noce_emit_move_insn (rtx, rtx);
108 static rtx block_has_only_trap (basic_block);
109 
110 /* Count the number of non-jump active insns in BB.  */
111 
112 static int
113 count_bb_insns (const_basic_block bb)
114 {
115   int count = 0;
116   rtx insn = BB_HEAD (bb);
117 
118   while (1)
119     {
120       if (CALL_P (insn) || NONJUMP_INSN_P (insn))
121 	count++;
122 
123       if (insn == BB_END (bb))
124 	break;
125       insn = NEXT_INSN (insn);
126     }
127 
128   return count;
129 }
130 
131 /* Determine whether the total insn_rtx_cost on non-jump insns in
132    basic block BB is less than MAX_COST.  This function returns
133    false if the cost of any instruction could not be estimated.  */
134 
135 static bool
136 cheap_bb_rtx_cost_p (const_basic_block bb, int max_cost)
137 {
138   int count = 0;
139   rtx insn = BB_HEAD (bb);
140   bool speed = optimize_bb_for_speed_p (bb);
141 
142   while (1)
143     {
144       if (NONJUMP_INSN_P (insn))
145 	{
146 	  int cost = insn_rtx_cost (PATTERN (insn), speed);
147 	  if (cost == 0)
148 	    return false;
149 
150 	  /* If this instruction is the load or set of a "stack" register,
151 	     such as a floating point register on x87, then the cost of
152 	     speculatively executing this insn may need to include
153 	     the additional cost of popping its result off of the
154 	     register stack.  Unfortunately, correctly recognizing and
155 	     accounting for this additional overhead is tricky, so for
156 	     now we simply prohibit such speculative execution.  */
157 #ifdef STACK_REGS
158 	  {
159 	    rtx set = single_set (insn);
160 	    if (set && STACK_REG_P (SET_DEST (set)))
161 	      return false;
162 	  }
163 #endif
164 
165 	  count += cost;
166 	  if (count >= max_cost)
167 	    return false;
168 	}
169       else if (CALL_P (insn))
170 	return false;
171 
172       if (insn == BB_END (bb))
173 	break;
174       insn = NEXT_INSN (insn);
175     }
176 
177   return true;
178 }
179 
180 /* Return the first non-jump active insn in the basic block.  */
181 
182 static rtx
183 first_active_insn (basic_block bb)
184 {
185   rtx insn = BB_HEAD (bb);
186 
187   if (LABEL_P (insn))
188     {
189       if (insn == BB_END (bb))
190 	return NULL_RTX;
191       insn = NEXT_INSN (insn);
192     }
193 
194   while (NOTE_P (insn) || DEBUG_INSN_P (insn))
195     {
196       if (insn == BB_END (bb))
197 	return NULL_RTX;
198       insn = NEXT_INSN (insn);
199     }
200 
201   if (JUMP_P (insn))
202     return NULL_RTX;
203 
204   return insn;
205 }
206 
207 /* Return the last non-jump active (non-jump) insn in the basic block.  */
208 
209 static rtx
210 last_active_insn (basic_block bb, int skip_use_p)
211 {
212   rtx insn = BB_END (bb);
213   rtx head = BB_HEAD (bb);
214 
215   while (NOTE_P (insn)
216 	 || JUMP_P (insn)
217 	 || DEBUG_INSN_P (insn)
218 	 || (skip_use_p
219 	     && NONJUMP_INSN_P (insn)
220 	     && GET_CODE (PATTERN (insn)) == USE))
221     {
222       if (insn == head)
223 	return NULL_RTX;
224       insn = PREV_INSN (insn);
225     }
226 
227   if (LABEL_P (insn))
228     return NULL_RTX;
229 
230   return insn;
231 }
232 
233 /* Return the basic block reached by falling though the basic block BB.  */
234 
235 static basic_block
236 block_fallthru (basic_block bb)
237 {
238   edge e;
239   edge_iterator ei;
240 
241   FOR_EACH_EDGE (e, ei, bb->succs)
242     if (e->flags & EDGE_FALLTHRU)
243       break;
244 
245   return (e) ? e->dest : NULL_BLOCK;
246 }
247 
248 /* Go through a bunch of insns, converting them to conditional
249    execution format if possible.  Return TRUE if all of the non-note
250    insns were processed.  */
251 
252 static int
253 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
254 			 /* if block information */rtx start,
255 			 /* first insn to look at */rtx end,
256 			 /* last insn to look at */rtx test,
257 			 /* conditional execution test */rtx prob_val,
258 			 /* probability of branch taken. */int mod_ok)
259 {
260   int must_be_last = FALSE;
261   rtx insn;
262   rtx xtest;
263   rtx pattern;
264 
265   if (!start || !end)
266     return FALSE;
267 
268   for (insn = start; ; insn = NEXT_INSN (insn))
269     {
270       if (NOTE_P (insn) || DEBUG_INSN_P (insn))
271 	goto insn_done;
272 
273       gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
274 
275       /* Remove USE insns that get in the way.  */
276       if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
277 	{
278 	  /* ??? Ug.  Actually unlinking the thing is problematic,
279 	     given what we'd have to coordinate with our callers.  */
280 	  SET_INSN_DELETED (insn);
281 	  goto insn_done;
282 	}
283 
284       /* Last insn wasn't last?  */
285       if (must_be_last)
286 	return FALSE;
287 
288       if (modified_in_p (test, insn))
289 	{
290 	  if (!mod_ok)
291 	    return FALSE;
292 	  must_be_last = TRUE;
293 	}
294 
295       /* Now build the conditional form of the instruction.  */
296       pattern = PATTERN (insn);
297       xtest = copy_rtx (test);
298 
299       /* If this is already a COND_EXEC, rewrite the test to be an AND of the
300          two conditions.  */
301       if (GET_CODE (pattern) == COND_EXEC)
302 	{
303 	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
304 	    return FALSE;
305 
306 	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
307 			       COND_EXEC_TEST (pattern));
308 	  pattern = COND_EXEC_CODE (pattern);
309 	}
310 
311       pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
312 
313       /* If the machine needs to modify the insn being conditionally executed,
314          say for example to force a constant integer operand into a temp
315          register, do so here.  */
316 #ifdef IFCVT_MODIFY_INSN
317       IFCVT_MODIFY_INSN (ce_info, pattern, insn);
318       if (! pattern)
319 	return FALSE;
320 #endif
321 
322       validate_change (insn, &PATTERN (insn), pattern, 1);
323 
324       if (CALL_P (insn) && prob_val)
325 	validate_change (insn, &REG_NOTES (insn),
326 			 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
327 					  REG_NOTES (insn)), 1);
328 
329     insn_done:
330       if (insn == end)
331 	break;
332     }
333 
334   return TRUE;
335 }
336 
337 /* Return the condition for a jump.  Do not do any special processing.  */
338 
339 static rtx
340 cond_exec_get_condition (rtx jump)
341 {
342   rtx test_if, cond;
343 
344   if (any_condjump_p (jump))
345     test_if = SET_SRC (pc_set (jump));
346   else
347     return NULL_RTX;
348   cond = XEXP (test_if, 0);
349 
350   /* If this branches to JUMP_LABEL when the condition is false,
351      reverse the condition.  */
352   if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
353       && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
354     {
355       enum rtx_code rev = reversed_comparison_code (cond, jump);
356       if (rev == UNKNOWN)
357 	return NULL_RTX;
358 
359       cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
360 			     XEXP (cond, 1));
361     }
362 
363   return cond;
364 }
365 
366 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
367    to conditional execution.  Return TRUE if we were successful at
368    converting the block.  */
369 
370 static int
371 cond_exec_process_if_block (ce_if_block_t * ce_info,
372 			    /* if block information */int do_multiple_p)
373 {
374   basic_block test_bb = ce_info->test_bb;	/* last test block */
375   basic_block then_bb = ce_info->then_bb;	/* THEN */
376   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
377   rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
378   rtx then_start;		/* first insn in THEN block */
379   rtx then_end;			/* last insn + 1 in THEN block */
380   rtx else_start = NULL_RTX;	/* first insn in ELSE block or NULL */
381   rtx else_end = NULL_RTX;	/* last insn + 1 in ELSE block */
382   int max;			/* max # of insns to convert.  */
383   int then_mod_ok;		/* whether conditional mods are ok in THEN */
384   rtx true_expr;		/* test for else block insns */
385   rtx false_expr;		/* test for then block insns */
386   rtx true_prob_val;		/* probability of else block */
387   rtx false_prob_val;		/* probability of then block */
388   int n_insns;
389   enum rtx_code false_code;
390 
391   /* If test is comprised of && or || elements, and we've failed at handling
392      all of them together, just use the last test if it is the special case of
393      && elements without an ELSE block.  */
394   if (!do_multiple_p && ce_info->num_multiple_test_blocks)
395     {
396       if (else_bb || ! ce_info->and_and_p)
397 	return FALSE;
398 
399       ce_info->test_bb = test_bb = ce_info->last_test_bb;
400       ce_info->num_multiple_test_blocks = 0;
401       ce_info->num_and_and_blocks = 0;
402       ce_info->num_or_or_blocks = 0;
403     }
404 
405   /* Find the conditional jump to the ELSE or JOIN part, and isolate
406      the test.  */
407   test_expr = cond_exec_get_condition (BB_END (test_bb));
408   if (! test_expr)
409     return FALSE;
410 
411   /* If the conditional jump is more than just a conditional jump,
412      then we can not do conditional execution conversion on this block.  */
413   if (! onlyjump_p (BB_END (test_bb)))
414     return FALSE;
415 
416   /* Collect the bounds of where we're to search, skipping any labels, jumps
417      and notes at the beginning and end of the block.  Then count the total
418      number of insns and see if it is small enough to convert.  */
419   then_start = first_active_insn (then_bb);
420   then_end = last_active_insn (then_bb, TRUE);
421   n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
422   max = MAX_CONDITIONAL_EXECUTE;
423 
424   if (else_bb)
425     {
426       max *= 2;
427       else_start = first_active_insn (else_bb);
428       else_end = last_active_insn (else_bb, TRUE);
429       n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
430     }
431 
432   if (n_insns > max)
433     return FALSE;
434 
435   /* Map test_expr/test_jump into the appropriate MD tests to use on
436      the conditionally executed code.  */
437 
438   true_expr = test_expr;
439 
440   false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
441   if (false_code != UNKNOWN)
442     false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
443 				 XEXP (true_expr, 0), XEXP (true_expr, 1));
444   else
445     false_expr = NULL_RTX;
446 
447 #ifdef IFCVT_MODIFY_TESTS
448   /* If the machine description needs to modify the tests, such as setting a
449      conditional execution register from a comparison, it can do so here.  */
450   IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
451 
452   /* See if the conversion failed.  */
453   if (!true_expr || !false_expr)
454     goto fail;
455 #endif
456 
457   true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
458   if (true_prob_val)
459     {
460       true_prob_val = XEXP (true_prob_val, 0);
461       false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
462     }
463   else
464     false_prob_val = NULL_RTX;
465 
466   /* If we have && or || tests, do them here.  These tests are in the adjacent
467      blocks after the first block containing the test.  */
468   if (ce_info->num_multiple_test_blocks > 0)
469     {
470       basic_block bb = test_bb;
471       basic_block last_test_bb = ce_info->last_test_bb;
472 
473       if (! false_expr)
474 	goto fail;
475 
476       do
477 	{
478 	  rtx start, end;
479 	  rtx t, f;
480 	  enum rtx_code f_code;
481 
482 	  bb = block_fallthru (bb);
483 	  start = first_active_insn (bb);
484 	  end = last_active_insn (bb, TRUE);
485 	  if (start
486 	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
487 					    false_prob_val, FALSE))
488 	    goto fail;
489 
490 	  /* If the conditional jump is more than just a conditional jump, then
491 	     we can not do conditional execution conversion on this block.  */
492 	  if (! onlyjump_p (BB_END (bb)))
493 	    goto fail;
494 
495 	  /* Find the conditional jump and isolate the test.  */
496 	  t = cond_exec_get_condition (BB_END (bb));
497 	  if (! t)
498 	    goto fail;
499 
500 	  f_code = reversed_comparison_code (t, BB_END (bb));
501 	  if (f_code == UNKNOWN)
502 	    goto fail;
503 
504 	  f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
505 	  if (ce_info->and_and_p)
506 	    {
507 	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
508 	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
509 	    }
510 	  else
511 	    {
512 	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
513 	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
514 	    }
515 
516 	  /* If the machine description needs to modify the tests, such as
517 	     setting a conditional execution register from a comparison, it can
518 	     do so here.  */
519 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
520 	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
521 
522 	  /* See if the conversion failed.  */
523 	  if (!t || !f)
524 	    goto fail;
525 #endif
526 
527 	  true_expr = t;
528 	  false_expr = f;
529 	}
530       while (bb != last_test_bb);
531     }
532 
533   /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
534      on then THEN block.  */
535   then_mod_ok = (else_bb == NULL_BLOCK);
536 
537   /* Go through the THEN and ELSE blocks converting the insns if possible
538      to conditional execution.  */
539 
540   if (then_end
541       && (! false_expr
542 	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
543 					false_expr, false_prob_val,
544 					then_mod_ok)))
545     goto fail;
546 
547   if (else_bb && else_end
548       && ! cond_exec_process_insns (ce_info, else_start, else_end,
549 				    true_expr, true_prob_val, TRUE))
550     goto fail;
551 
552   /* If we cannot apply the changes, fail.  Do not go through the normal fail
553      processing, since apply_change_group will call cancel_changes.  */
554   if (! apply_change_group ())
555     {
556 #ifdef IFCVT_MODIFY_CANCEL
557       /* Cancel any machine dependent changes.  */
558       IFCVT_MODIFY_CANCEL (ce_info);
559 #endif
560       return FALSE;
561     }
562 
563 #ifdef IFCVT_MODIFY_FINAL
564   /* Do any machine dependent final modifications.  */
565   IFCVT_MODIFY_FINAL (ce_info);
566 #endif
567 
568   /* Conversion succeeded.  */
569   if (dump_file)
570     fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
571 	     n_insns, (n_insns == 1) ? " was" : "s were");
572 
573   /* Merge the blocks!  */
574   merge_if_block (ce_info);
575   cond_exec_changed_p = TRUE;
576   return TRUE;
577 
578  fail:
579 #ifdef IFCVT_MODIFY_CANCEL
580   /* Cancel any machine dependent changes.  */
581   IFCVT_MODIFY_CANCEL (ce_info);
582 #endif
583 
584   cancel_changes (0);
585   return FALSE;
586 }
587 
588 /* Used by noce_process_if_block to communicate with its subroutines.
589 
590    The subroutines know that A and B may be evaluated freely.  They
591    know that X is a register.  They should insert new instructions
592    before cond_earliest.  */
593 
594 struct noce_if_info
595 {
596   /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block.  */
597   basic_block test_bb, then_bb, else_bb, join_bb;
598 
599   /* The jump that ends TEST_BB.  */
600   rtx jump;
601 
602   /* The jump condition.  */
603   rtx cond;
604 
605   /* New insns should be inserted before this one.  */
606   rtx cond_earliest;
607 
608   /* Insns in the THEN and ELSE block.  There is always just this
609      one insns in those blocks.  The insns are single_set insns.
610      If there was no ELSE block, INSN_B is the last insn before
611      COND_EARLIEST, or NULL_RTX.  In the former case, the insn
612      operands are still valid, as if INSN_B was moved down below
613      the jump.  */
614   rtx insn_a, insn_b;
615 
616   /* The SET_SRC of INSN_A and INSN_B.  */
617   rtx a, b;
618 
619   /* The SET_DEST of INSN_A.  */
620   rtx x;
621 
622   /* True if this if block is not canonical.  In the canonical form of
623      if blocks, the THEN_BB is the block reached via the fallthru edge
624      from TEST_BB.  For the noce transformations, we allow the symmetric
625      form as well.  */
626   bool then_else_reversed;
627 
628   /* Estimated cost of the particular branch instruction.  */
629   int branch_cost;
630 };
631 
632 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
633 static int noce_try_move (struct noce_if_info *);
634 static int noce_try_store_flag (struct noce_if_info *);
635 static int noce_try_addcc (struct noce_if_info *);
636 static int noce_try_store_flag_constants (struct noce_if_info *);
637 static int noce_try_store_flag_mask (struct noce_if_info *);
638 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
639 			    rtx, rtx, rtx);
640 static int noce_try_cmove (struct noce_if_info *);
641 static int noce_try_cmove_arith (struct noce_if_info *);
642 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
643 static int noce_try_minmax (struct noce_if_info *);
644 static int noce_try_abs (struct noce_if_info *);
645 static int noce_try_sign_mask (struct noce_if_info *);
646 
647 /* Helper function for noce_try_store_flag*.  */
648 
649 static rtx
650 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
651 		      int normalize)
652 {
653   rtx cond = if_info->cond;
654   int cond_complex;
655   enum rtx_code code;
656 
657   cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
658 		  || ! general_operand (XEXP (cond, 1), VOIDmode));
659 
660   /* If earliest == jump, or when the condition is complex, try to
661      build the store_flag insn directly.  */
662 
663   if (cond_complex)
664     {
665       rtx set = pc_set (if_info->jump);
666       cond = XEXP (SET_SRC (set), 0);
667       if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
668 	  && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump))
669 	reversep = !reversep;
670       if (if_info->then_else_reversed)
671 	reversep = !reversep;
672     }
673 
674   if (reversep)
675     code = reversed_comparison_code (cond, if_info->jump);
676   else
677     code = GET_CODE (cond);
678 
679   if ((if_info->cond_earliest == if_info->jump || cond_complex)
680       && (normalize == 0 || STORE_FLAG_VALUE == normalize))
681     {
682       rtx tmp;
683 
684       tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
685 			    XEXP (cond, 1));
686       tmp = gen_rtx_SET (VOIDmode, x, tmp);
687 
688       start_sequence ();
689       tmp = emit_insn (tmp);
690 
691       if (recog_memoized (tmp) >= 0)
692 	{
693 	  tmp = get_insns ();
694 	  end_sequence ();
695 	  emit_insn (tmp);
696 
697 	  if_info->cond_earliest = if_info->jump;
698 
699 	  return x;
700 	}
701 
702       end_sequence ();
703     }
704 
705   /* Don't even try if the comparison operands or the mode of X are weird.  */
706   if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
707     return NULL_RTX;
708 
709   return emit_store_flag (x, code, XEXP (cond, 0),
710 			  XEXP (cond, 1), VOIDmode,
711 			  (code == LTU || code == LEU
712 			   || code == GEU || code == GTU), normalize);
713 }
714 
715 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
716    X is the destination/target and Y is the value to copy.  */
717 
718 static void
719 noce_emit_move_insn (rtx x, rtx y)
720 {
721   enum machine_mode outmode;
722   rtx outer, inner;
723   int bitpos;
724 
725   if (GET_CODE (x) != STRICT_LOW_PART)
726     {
727       rtx seq, insn, target;
728       optab ot;
729 
730       start_sequence ();
731       /* Check that the SET_SRC is reasonable before calling emit_move_insn,
732 	 otherwise construct a suitable SET pattern ourselves.  */
733       insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
734 	     ? emit_move_insn (x, y)
735 	     : emit_insn (gen_rtx_SET (VOIDmode, x, y));
736       seq = get_insns ();
737       end_sequence ();
738 
739       if (recog_memoized (insn) <= 0)
740 	{
741 	  if (GET_CODE (x) == ZERO_EXTRACT)
742 	    {
743 	      rtx op = XEXP (x, 0);
744 	      unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
745 	      unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
746 
747 	      /* store_bit_field expects START to be relative to
748 		 BYTES_BIG_ENDIAN and adjusts this value for machines with
749 		 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
750 		 invoke store_bit_field again it is necessary to have the START
751 		 value from the first call.  */
752 	      if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
753 		{
754 		  if (MEM_P (op))
755 		    start = BITS_PER_UNIT - start - size;
756 		  else
757 		    {
758 		      gcc_assert (REG_P (op));
759 		      start = BITS_PER_WORD - start - size;
760 		    }
761 		}
762 
763 	      gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
764 	      store_bit_field (op, size, start, GET_MODE (x), y);
765 	      return;
766 	    }
767 
768 	  switch (GET_RTX_CLASS (GET_CODE (y)))
769 	    {
770 	    case RTX_UNARY:
771 	      ot = code_to_optab[GET_CODE (y)];
772 	      if (ot)
773 		{
774 		  start_sequence ();
775 		  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
776 		  if (target != NULL_RTX)
777 		    {
778 		      if (target != x)
779 			emit_move_insn (x, target);
780 		      seq = get_insns ();
781 		    }
782 		  end_sequence ();
783 		}
784 	      break;
785 
786 	    case RTX_BIN_ARITH:
787 	    case RTX_COMM_ARITH:
788 	      ot = code_to_optab[GET_CODE (y)];
789 	      if (ot)
790 		{
791 		  start_sequence ();
792 		  target = expand_binop (GET_MODE (y), ot,
793 					 XEXP (y, 0), XEXP (y, 1),
794 					 x, 0, OPTAB_DIRECT);
795 		  if (target != NULL_RTX)
796 		    {
797 		      if (target != x)
798 			  emit_move_insn (x, target);
799 		      seq = get_insns ();
800 		    }
801 		  end_sequence ();
802 		}
803 	      break;
804 
805 	    default:
806 	      break;
807 	    }
808 	}
809 
810       emit_insn (seq);
811       return;
812     }
813 
814   outer = XEXP (x, 0);
815   inner = XEXP (outer, 0);
816   outmode = GET_MODE (outer);
817   bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
818   store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
819 }
820 
821 /* Return sequence of instructions generated by if conversion.  This
822    function calls end_sequence() to end the current stream, ensures
823    that are instructions are unshared, recognizable non-jump insns.
824    On failure, this function returns a NULL_RTX.  */
825 
826 static rtx
827 end_ifcvt_sequence (struct noce_if_info *if_info)
828 {
829   rtx insn;
830   rtx seq = get_insns ();
831 
832   set_used_flags (if_info->x);
833   set_used_flags (if_info->cond);
834   unshare_all_rtl_in_chain (seq);
835   end_sequence ();
836 
837   /* Make sure that all of the instructions emitted are recognizable,
838      and that we haven't introduced a new jump instruction.
839      As an exercise for the reader, build a general mechanism that
840      allows proper placement of required clobbers.  */
841   for (insn = seq; insn; insn = NEXT_INSN (insn))
842     if (JUMP_P (insn)
843 	|| recog_memoized (insn) == -1)
844       return NULL_RTX;
845 
846   return seq;
847 }
848 
849 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
850    "if (a == b) x = a; else x = b" into "x = b".  */
851 
852 static int
853 noce_try_move (struct noce_if_info *if_info)
854 {
855   rtx cond = if_info->cond;
856   enum rtx_code code = GET_CODE (cond);
857   rtx y, seq;
858 
859   if (code != NE && code != EQ)
860     return FALSE;
861 
862   /* This optimization isn't valid if either A or B could be a NaN
863      or a signed zero.  */
864   if (HONOR_NANS (GET_MODE (if_info->x))
865       || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
866     return FALSE;
867 
868   /* Check whether the operands of the comparison are A and in
869      either order.  */
870   if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
871        && rtx_equal_p (if_info->b, XEXP (cond, 1)))
872       || (rtx_equal_p (if_info->a, XEXP (cond, 1))
873 	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
874     {
875       y = (code == EQ) ? if_info->a : if_info->b;
876 
877       /* Avoid generating the move if the source is the destination.  */
878       if (! rtx_equal_p (if_info->x, y))
879 	{
880 	  start_sequence ();
881 	  noce_emit_move_insn (if_info->x, y);
882 	  seq = end_ifcvt_sequence (if_info);
883 	  if (!seq)
884 	    return FALSE;
885 
886 	  emit_insn_before_setloc (seq, if_info->jump,
887 				   INSN_LOCATOR (if_info->insn_a));
888 	}
889       return TRUE;
890     }
891   return FALSE;
892 }
893 
894 /* Convert "if (test) x = 1; else x = 0".
895 
896    Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
897    tried in noce_try_store_flag_constants after noce_try_cmove has had
898    a go at the conversion.  */
899 
900 static int
901 noce_try_store_flag (struct noce_if_info *if_info)
902 {
903   int reversep;
904   rtx target, seq;
905 
906   if (CONST_INT_P (if_info->b)
907       && INTVAL (if_info->b) == STORE_FLAG_VALUE
908       && if_info->a == const0_rtx)
909     reversep = 0;
910   else if (if_info->b == const0_rtx
911 	   && CONST_INT_P (if_info->a)
912 	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
913 	   && (reversed_comparison_code (if_info->cond, if_info->jump)
914 	       != UNKNOWN))
915     reversep = 1;
916   else
917     return FALSE;
918 
919   start_sequence ();
920 
921   target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
922   if (target)
923     {
924       if (target != if_info->x)
925 	noce_emit_move_insn (if_info->x, target);
926 
927       seq = end_ifcvt_sequence (if_info);
928       if (! seq)
929 	return FALSE;
930 
931       emit_insn_before_setloc (seq, if_info->jump,
932 			       INSN_LOCATOR (if_info->insn_a));
933       return TRUE;
934     }
935   else
936     {
937       end_sequence ();
938       return FALSE;
939     }
940 }
941 
942 /* Convert "if (test) x = a; else x = b", for A and B constant.  */
943 
944 static int
945 noce_try_store_flag_constants (struct noce_if_info *if_info)
946 {
947   rtx target, seq;
948   int reversep;
949   HOST_WIDE_INT itrue, ifalse, diff, tmp;
950   int normalize, can_reverse;
951   enum machine_mode mode;
952 
953   if (CONST_INT_P (if_info->a)
954       && CONST_INT_P (if_info->b))
955     {
956       mode = GET_MODE (if_info->x);
957       ifalse = INTVAL (if_info->a);
958       itrue = INTVAL (if_info->b);
959 
960       /* Make sure we can represent the difference between the two values.  */
961       if ((itrue - ifalse > 0)
962 	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
963 	return FALSE;
964 
965       diff = trunc_int_for_mode (itrue - ifalse, mode);
966 
967       can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
968 		     != UNKNOWN);
969 
970       reversep = 0;
971       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
972 	normalize = 0;
973       else if (ifalse == 0 && exact_log2 (itrue) >= 0
974 	       && (STORE_FLAG_VALUE == 1
975 		   || if_info->branch_cost >= 2))
976 	normalize = 1;
977       else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
978 	       && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
979 	normalize = 1, reversep = 1;
980       else if (itrue == -1
981 	       && (STORE_FLAG_VALUE == -1
982 		   || if_info->branch_cost >= 2))
983 	normalize = -1;
984       else if (ifalse == -1 && can_reverse
985 	       && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
986 	normalize = -1, reversep = 1;
987       else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
988 	       || if_info->branch_cost >= 3)
989 	normalize = -1;
990       else
991 	return FALSE;
992 
993       if (reversep)
994 	{
995 	  tmp = itrue; itrue = ifalse; ifalse = tmp;
996 	  diff = trunc_int_for_mode (-diff, mode);
997 	}
998 
999       start_sequence ();
1000       target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1001       if (! target)
1002 	{
1003 	  end_sequence ();
1004 	  return FALSE;
1005 	}
1006 
1007       /* if (test) x = 3; else x = 4;
1008 	 =>   x = 3 + (test == 0);  */
1009       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1010 	{
1011 	  target = expand_simple_binop (mode,
1012 					(diff == STORE_FLAG_VALUE
1013 					 ? PLUS : MINUS),
1014 					GEN_INT (ifalse), target, if_info->x, 0,
1015 					OPTAB_WIDEN);
1016 	}
1017 
1018       /* if (test) x = 8; else x = 0;
1019 	 =>   x = (test != 0) << 3;  */
1020       else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1021 	{
1022 	  target = expand_simple_binop (mode, ASHIFT,
1023 					target, GEN_INT (tmp), if_info->x, 0,
1024 					OPTAB_WIDEN);
1025 	}
1026 
1027       /* if (test) x = -1; else x = b;
1028 	 =>   x = -(test != 0) | b;  */
1029       else if (itrue == -1)
1030 	{
1031 	  target = expand_simple_binop (mode, IOR,
1032 					target, GEN_INT (ifalse), if_info->x, 0,
1033 					OPTAB_WIDEN);
1034 	}
1035 
1036       /* if (test) x = a; else x = b;
1037 	 =>   x = (-(test != 0) & (b - a)) + a;  */
1038       else
1039 	{
1040 	  target = expand_simple_binop (mode, AND,
1041 					target, GEN_INT (diff), if_info->x, 0,
1042 					OPTAB_WIDEN);
1043 	  if (target)
1044 	    target = expand_simple_binop (mode, PLUS,
1045 					  target, GEN_INT (ifalse),
1046 					  if_info->x, 0, OPTAB_WIDEN);
1047 	}
1048 
1049       if (! target)
1050 	{
1051 	  end_sequence ();
1052 	  return FALSE;
1053 	}
1054 
1055       if (target != if_info->x)
1056 	noce_emit_move_insn (if_info->x, target);
1057 
1058       seq = end_ifcvt_sequence (if_info);
1059       if (!seq)
1060 	return FALSE;
1061 
1062       emit_insn_before_setloc (seq, if_info->jump,
1063 			       INSN_LOCATOR (if_info->insn_a));
1064       return TRUE;
1065     }
1066 
1067   return FALSE;
1068 }
1069 
1070 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1071    similarly for "foo--".  */
1072 
1073 static int
1074 noce_try_addcc (struct noce_if_info *if_info)
1075 {
1076   rtx target, seq;
1077   int subtract, normalize;
1078 
1079   if (GET_CODE (if_info->a) == PLUS
1080       && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1081       && (reversed_comparison_code (if_info->cond, if_info->jump)
1082 	  != UNKNOWN))
1083     {
1084       rtx cond = if_info->cond;
1085       enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1086 
1087       /* First try to use addcc pattern.  */
1088       if (general_operand (XEXP (cond, 0), VOIDmode)
1089 	  && general_operand (XEXP (cond, 1), VOIDmode))
1090 	{
1091 	  start_sequence ();
1092 	  target = emit_conditional_add (if_info->x, code,
1093 					 XEXP (cond, 0),
1094 					 XEXP (cond, 1),
1095 					 VOIDmode,
1096 					 if_info->b,
1097 					 XEXP (if_info->a, 1),
1098 					 GET_MODE (if_info->x),
1099 					 (code == LTU || code == GEU
1100 					  || code == LEU || code == GTU));
1101 	  if (target)
1102 	    {
1103 	      if (target != if_info->x)
1104 		noce_emit_move_insn (if_info->x, target);
1105 
1106 	      seq = end_ifcvt_sequence (if_info);
1107 	      if (!seq)
1108 		return FALSE;
1109 
1110 	      emit_insn_before_setloc (seq, if_info->jump,
1111 				       INSN_LOCATOR (if_info->insn_a));
1112 	      return TRUE;
1113 	    }
1114 	  end_sequence ();
1115 	}
1116 
1117       /* If that fails, construct conditional increment or decrement using
1118 	 setcc.  */
1119       if (if_info->branch_cost >= 2
1120 	  && (XEXP (if_info->a, 1) == const1_rtx
1121 	      || XEXP (if_info->a, 1) == constm1_rtx))
1122         {
1123 	  start_sequence ();
1124 	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1125 	    subtract = 0, normalize = 0;
1126 	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1127 	    subtract = 1, normalize = 0;
1128 	  else
1129 	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1130 
1131 
1132 	  target = noce_emit_store_flag (if_info,
1133 					 gen_reg_rtx (GET_MODE (if_info->x)),
1134 					 1, normalize);
1135 
1136 	  if (target)
1137 	    target = expand_simple_binop (GET_MODE (if_info->x),
1138 					  subtract ? MINUS : PLUS,
1139 					  if_info->b, target, if_info->x,
1140 					  0, OPTAB_WIDEN);
1141 	  if (target)
1142 	    {
1143 	      if (target != if_info->x)
1144 		noce_emit_move_insn (if_info->x, target);
1145 
1146 	      seq = end_ifcvt_sequence (if_info);
1147 	      if (!seq)
1148 		return FALSE;
1149 
1150 	      emit_insn_before_setloc (seq, if_info->jump,
1151 				       INSN_LOCATOR (if_info->insn_a));
1152 	      return TRUE;
1153 	    }
1154 	  end_sequence ();
1155 	}
1156     }
1157 
1158   return FALSE;
1159 }
1160 
1161 /* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1162 
1163 static int
1164 noce_try_store_flag_mask (struct noce_if_info *if_info)
1165 {
1166   rtx target, seq;
1167   int reversep;
1168 
1169   reversep = 0;
1170   if ((if_info->branch_cost >= 2
1171        || STORE_FLAG_VALUE == -1)
1172       && ((if_info->a == const0_rtx
1173 	   && rtx_equal_p (if_info->b, if_info->x))
1174 	  || ((reversep = (reversed_comparison_code (if_info->cond,
1175 						     if_info->jump)
1176 			   != UNKNOWN))
1177 	      && if_info->b == const0_rtx
1178 	      && rtx_equal_p (if_info->a, if_info->x))))
1179     {
1180       start_sequence ();
1181       target = noce_emit_store_flag (if_info,
1182 				     gen_reg_rtx (GET_MODE (if_info->x)),
1183 				     reversep, -1);
1184       if (target)
1185         target = expand_simple_binop (GET_MODE (if_info->x), AND,
1186 				      if_info->x,
1187 				      target, if_info->x, 0,
1188 				      OPTAB_WIDEN);
1189 
1190       if (target)
1191 	{
1192 	  if (target != if_info->x)
1193 	    noce_emit_move_insn (if_info->x, target);
1194 
1195 	  seq = end_ifcvt_sequence (if_info);
1196 	  if (!seq)
1197 	    return FALSE;
1198 
1199 	  emit_insn_before_setloc (seq, if_info->jump,
1200 				   INSN_LOCATOR (if_info->insn_a));
1201 	  return TRUE;
1202 	}
1203 
1204       end_sequence ();
1205     }
1206 
1207   return FALSE;
1208 }
1209 
1210 /* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1211 
1212 static rtx
1213 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1214 		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1215 {
1216   /* If earliest == jump, try to build the cmove insn directly.
1217      This is helpful when combine has created some complex condition
1218      (like for alpha's cmovlbs) that we can't hope to regenerate
1219      through the normal interface.  */
1220 
1221   if (if_info->cond_earliest == if_info->jump)
1222     {
1223       rtx tmp;
1224 
1225       tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1226       tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1227       tmp = gen_rtx_SET (VOIDmode, x, tmp);
1228 
1229       start_sequence ();
1230       tmp = emit_insn (tmp);
1231 
1232       if (recog_memoized (tmp) >= 0)
1233 	{
1234 	  tmp = get_insns ();
1235 	  end_sequence ();
1236 	  emit_insn (tmp);
1237 
1238 	  return x;
1239 	}
1240 
1241       end_sequence ();
1242     }
1243 
1244   /* Don't even try if the comparison operands are weird.  */
1245   if (! general_operand (cmp_a, GET_MODE (cmp_a))
1246       || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1247     return NULL_RTX;
1248 
1249 #if HAVE_conditional_move
1250   return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1251 				vtrue, vfalse, GET_MODE (x),
1252 			        (code == LTU || code == GEU
1253 				 || code == LEU || code == GTU));
1254 #else
1255   /* We'll never get here, as noce_process_if_block doesn't call the
1256      functions involved.  Ifdef code, however, should be discouraged
1257      because it leads to typos in the code not selected.  However,
1258      emit_conditional_move won't exist either.  */
1259   return NULL_RTX;
1260 #endif
1261 }
1262 
1263 /* Try only simple constants and registers here.  More complex cases
1264    are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1265    has had a go at it.  */
1266 
1267 static int
1268 noce_try_cmove (struct noce_if_info *if_info)
1269 {
1270   enum rtx_code code;
1271   rtx target, seq;
1272 
1273   if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1274       && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1275     {
1276       start_sequence ();
1277 
1278       code = GET_CODE (if_info->cond);
1279       target = noce_emit_cmove (if_info, if_info->x, code,
1280 				XEXP (if_info->cond, 0),
1281 				XEXP (if_info->cond, 1),
1282 				if_info->a, if_info->b);
1283 
1284       if (target)
1285 	{
1286 	  if (target != if_info->x)
1287 	    noce_emit_move_insn (if_info->x, target);
1288 
1289 	  seq = end_ifcvt_sequence (if_info);
1290 	  if (!seq)
1291 	    return FALSE;
1292 
1293 	  emit_insn_before_setloc (seq, if_info->jump,
1294 				   INSN_LOCATOR (if_info->insn_a));
1295 	  return TRUE;
1296 	}
1297       else
1298 	{
1299 	  end_sequence ();
1300 	  return FALSE;
1301 	}
1302     }
1303 
1304   return FALSE;
1305 }
1306 
1307 /* Try more complex cases involving conditional_move.  */
1308 
1309 static int
1310 noce_try_cmove_arith (struct noce_if_info *if_info)
1311 {
1312   rtx a = if_info->a;
1313   rtx b = if_info->b;
1314   rtx x = if_info->x;
1315   rtx orig_a, orig_b;
1316   rtx insn_a, insn_b;
1317   rtx tmp, target;
1318   int is_mem = 0;
1319   int insn_cost;
1320   enum rtx_code code;
1321 
1322   /* A conditional move from two memory sources is equivalent to a
1323      conditional on their addresses followed by a load.  Don't do this
1324      early because it'll screw alias analysis.  Note that we've
1325      already checked for no side effects.  */
1326   /* ??? FIXME: Magic number 5.  */
1327   if (cse_not_expected
1328       && MEM_P (a) && MEM_P (b)
1329       && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1330       && if_info->branch_cost >= 5)
1331     {
1332       enum machine_mode address_mode
1333 	= targetm.addr_space.address_mode (MEM_ADDR_SPACE (a));
1334 
1335       a = XEXP (a, 0);
1336       b = XEXP (b, 0);
1337       x = gen_reg_rtx (address_mode);
1338       is_mem = 1;
1339     }
1340 
1341   /* ??? We could handle this if we knew that a load from A or B could
1342      not fault.  This is also true if we've already loaded
1343      from the address along the path from ENTRY.  */
1344   else if (may_trap_p (a) || may_trap_p (b))
1345     return FALSE;
1346 
1347   /* if (test) x = a + b; else x = c - d;
1348      => y = a + b;
1349         x = c - d;
1350 	if (test)
1351 	  x = y;
1352   */
1353 
1354   code = GET_CODE (if_info->cond);
1355   insn_a = if_info->insn_a;
1356   insn_b = if_info->insn_b;
1357 
1358   /* Total insn_rtx_cost should be smaller than branch cost.  Exit
1359      if insn_rtx_cost can't be estimated.  */
1360   if (insn_a)
1361     {
1362       insn_cost = insn_rtx_cost (PATTERN (insn_a),
1363       				 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1364       if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1365 	return FALSE;
1366     }
1367   else
1368     insn_cost = 0;
1369 
1370   if (insn_b)
1371     {
1372       insn_cost += insn_rtx_cost (PATTERN (insn_b),
1373       				 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1374       if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1375         return FALSE;
1376     }
1377 
1378   /* Possibly rearrange operands to make things come out more natural.  */
1379   if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1380     {
1381       int reversep = 0;
1382       if (rtx_equal_p (b, x))
1383 	reversep = 1;
1384       else if (general_operand (b, GET_MODE (b)))
1385 	reversep = 1;
1386 
1387       if (reversep)
1388 	{
1389 	  code = reversed_comparison_code (if_info->cond, if_info->jump);
1390 	  tmp = a, a = b, b = tmp;
1391 	  tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1392 	}
1393     }
1394 
1395   start_sequence ();
1396 
1397   orig_a = a;
1398   orig_b = b;
1399 
1400   /* If either operand is complex, load it into a register first.
1401      The best way to do this is to copy the original insn.  In this
1402      way we preserve any clobbers etc that the insn may have had.
1403      This is of course not possible in the IS_MEM case.  */
1404   if (! general_operand (a, GET_MODE (a)))
1405     {
1406       rtx set;
1407 
1408       if (is_mem)
1409 	{
1410 	  tmp = gen_reg_rtx (GET_MODE (a));
1411 	  tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1412 	}
1413       else if (! insn_a)
1414 	goto end_seq_and_fail;
1415       else
1416 	{
1417 	  a = gen_reg_rtx (GET_MODE (a));
1418 	  tmp = copy_rtx (insn_a);
1419 	  set = single_set (tmp);
1420 	  SET_DEST (set) = a;
1421 	  tmp = emit_insn (PATTERN (tmp));
1422 	}
1423       if (recog_memoized (tmp) < 0)
1424 	goto end_seq_and_fail;
1425     }
1426   if (! general_operand (b, GET_MODE (b)))
1427     {
1428       rtx set, last;
1429 
1430       if (is_mem)
1431 	{
1432           tmp = gen_reg_rtx (GET_MODE (b));
1433 	  tmp = gen_rtx_SET (VOIDmode, tmp, b);
1434 	}
1435       else if (! insn_b)
1436 	goto end_seq_and_fail;
1437       else
1438 	{
1439           b = gen_reg_rtx (GET_MODE (b));
1440 	  tmp = copy_rtx (insn_b);
1441 	  set = single_set (tmp);
1442 	  SET_DEST (set) = b;
1443 	  tmp = PATTERN (tmp);
1444 	}
1445 
1446       /* If insn to set up A clobbers any registers B depends on, try to
1447 	 swap insn that sets up A with the one that sets up B.  If even
1448 	 that doesn't help, punt.  */
1449       last = get_last_insn ();
1450       if (last && modified_in_p (orig_b, last))
1451 	{
1452 	  tmp = emit_insn_before (tmp, get_insns ());
1453 	  if (modified_in_p (orig_a, tmp))
1454 	    goto end_seq_and_fail;
1455 	}
1456       else
1457 	tmp = emit_insn (tmp);
1458 
1459       if (recog_memoized (tmp) < 0)
1460 	goto end_seq_and_fail;
1461     }
1462 
1463   target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1464 			    XEXP (if_info->cond, 1), a, b);
1465 
1466   if (! target)
1467     goto end_seq_and_fail;
1468 
1469   /* If we're handling a memory for above, emit the load now.  */
1470   if (is_mem)
1471     {
1472       tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1473 
1474       /* Copy over flags as appropriate.  */
1475       if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1476 	MEM_VOLATILE_P (tmp) = 1;
1477       if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1478 	MEM_IN_STRUCT_P (tmp) = 1;
1479       if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1480 	MEM_SCALAR_P (tmp) = 1;
1481       if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1482 	set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1483       set_mem_align (tmp,
1484 		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1485 
1486       gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1487       set_mem_addr_space (tmp, MEM_ADDR_SPACE (if_info->a));
1488 
1489       noce_emit_move_insn (if_info->x, tmp);
1490     }
1491   else if (target != x)
1492     noce_emit_move_insn (x, target);
1493 
1494   tmp = end_ifcvt_sequence (if_info);
1495   if (!tmp)
1496     return FALSE;
1497 
1498   emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1499   return TRUE;
1500 
1501  end_seq_and_fail:
1502   end_sequence ();
1503   return FALSE;
1504 }
1505 
1506 /* For most cases, the simplified condition we found is the best
1507    choice, but this is not the case for the min/max/abs transforms.
1508    For these we wish to know that it is A or B in the condition.  */
1509 
1510 static rtx
1511 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1512 			rtx *earliest)
1513 {
1514   rtx cond, set, insn;
1515   int reverse;
1516 
1517   /* If target is already mentioned in the known condition, return it.  */
1518   if (reg_mentioned_p (target, if_info->cond))
1519     {
1520       *earliest = if_info->cond_earliest;
1521       return if_info->cond;
1522     }
1523 
1524   set = pc_set (if_info->jump);
1525   cond = XEXP (SET_SRC (set), 0);
1526   reverse
1527     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1528       && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1529   if (if_info->then_else_reversed)
1530     reverse = !reverse;
1531 
1532   /* If we're looking for a constant, try to make the conditional
1533      have that constant in it.  There are two reasons why it may
1534      not have the constant we want:
1535 
1536      1. GCC may have needed to put the constant in a register, because
1537         the target can't compare directly against that constant.  For
1538         this case, we look for a SET immediately before the comparison
1539         that puts a constant in that register.
1540 
1541      2. GCC may have canonicalized the conditional, for example
1542 	replacing "if x < 4" with "if x <= 3".  We can undo that (or
1543 	make equivalent types of changes) to get the constants we need
1544 	if they're off by one in the right direction.  */
1545 
1546   if (CONST_INT_P (target))
1547     {
1548       enum rtx_code code = GET_CODE (if_info->cond);
1549       rtx op_a = XEXP (if_info->cond, 0);
1550       rtx op_b = XEXP (if_info->cond, 1);
1551       rtx prev_insn;
1552 
1553       /* First, look to see if we put a constant in a register.  */
1554       prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1555       if (prev_insn
1556 	  && BLOCK_FOR_INSN (prev_insn)
1557 	     == BLOCK_FOR_INSN (if_info->cond_earliest)
1558 	  && INSN_P (prev_insn)
1559 	  && GET_CODE (PATTERN (prev_insn)) == SET)
1560 	{
1561 	  rtx src = find_reg_equal_equiv_note (prev_insn);
1562 	  if (!src)
1563 	    src = SET_SRC (PATTERN (prev_insn));
1564 	  if (CONST_INT_P (src))
1565 	    {
1566 	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1567 		op_a = src;
1568 	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1569 		op_b = src;
1570 
1571 	      if (CONST_INT_P (op_a))
1572 		{
1573 		  rtx tmp = op_a;
1574 		  op_a = op_b;
1575 		  op_b = tmp;
1576 		  code = swap_condition (code);
1577 		}
1578 	    }
1579 	}
1580 
1581       /* Now, look to see if we can get the right constant by
1582 	 adjusting the conditional.  */
1583       if (CONST_INT_P (op_b))
1584 	{
1585 	  HOST_WIDE_INT desired_val = INTVAL (target);
1586 	  HOST_WIDE_INT actual_val = INTVAL (op_b);
1587 
1588 	  switch (code)
1589 	    {
1590 	    case LT:
1591 	      if (actual_val == desired_val + 1)
1592 		{
1593 		  code = LE;
1594 		  op_b = GEN_INT (desired_val);
1595 		}
1596 	      break;
1597 	    case LE:
1598 	      if (actual_val == desired_val - 1)
1599 		{
1600 		  code = LT;
1601 		  op_b = GEN_INT (desired_val);
1602 		}
1603 	      break;
1604 	    case GT:
1605 	      if (actual_val == desired_val - 1)
1606 		{
1607 		  code = GE;
1608 		  op_b = GEN_INT (desired_val);
1609 		}
1610 	      break;
1611 	    case GE:
1612 	      if (actual_val == desired_val + 1)
1613 		{
1614 		  code = GT;
1615 		  op_b = GEN_INT (desired_val);
1616 		}
1617 	      break;
1618 	    default:
1619 	      break;
1620 	    }
1621 	}
1622 
1623       /* If we made any changes, generate a new conditional that is
1624 	 equivalent to what we started with, but has the right
1625 	 constants in it.  */
1626       if (code != GET_CODE (if_info->cond)
1627 	  || op_a != XEXP (if_info->cond, 0)
1628 	  || op_b != XEXP (if_info->cond, 1))
1629 	{
1630 	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1631 	  *earliest = if_info->cond_earliest;
1632 	  return cond;
1633 	}
1634     }
1635 
1636   cond = canonicalize_condition (if_info->jump, cond, reverse,
1637 				 earliest, target, false, true);
1638   if (! cond || ! reg_mentioned_p (target, cond))
1639     return NULL;
1640 
1641   /* We almost certainly searched back to a different place.
1642      Need to re-verify correct lifetimes.  */
1643 
1644   /* X may not be mentioned in the range (cond_earliest, jump].  */
1645   for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1646     if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1647       return NULL;
1648 
1649   /* A and B may not be modified in the range [cond_earliest, jump).  */
1650   for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1651     if (INSN_P (insn)
1652 	&& (modified_in_p (if_info->a, insn)
1653 	    || modified_in_p (if_info->b, insn)))
1654       return NULL;
1655 
1656   return cond;
1657 }
1658 
1659 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1660 
1661 static int
1662 noce_try_minmax (struct noce_if_info *if_info)
1663 {
1664   rtx cond, earliest, target, seq;
1665   enum rtx_code code, op;
1666   int unsignedp;
1667 
1668   /* ??? Reject modes with NaNs or signed zeros since we don't know how
1669      they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1670      to get the target to tell us...  */
1671   if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1672       || HONOR_NANS (GET_MODE (if_info->x)))
1673     return FALSE;
1674 
1675   cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1676   if (!cond)
1677     return FALSE;
1678 
1679   /* Verify the condition is of the form we expect, and canonicalize
1680      the comparison code.  */
1681   code = GET_CODE (cond);
1682   if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1683     {
1684       if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1685 	return FALSE;
1686     }
1687   else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1688     {
1689       if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1690 	return FALSE;
1691       code = swap_condition (code);
1692     }
1693   else
1694     return FALSE;
1695 
1696   /* Determine what sort of operation this is.  Note that the code is for
1697      a taken branch, so the code->operation mapping appears backwards.  */
1698   switch (code)
1699     {
1700     case LT:
1701     case LE:
1702     case UNLT:
1703     case UNLE:
1704       op = SMAX;
1705       unsignedp = 0;
1706       break;
1707     case GT:
1708     case GE:
1709     case UNGT:
1710     case UNGE:
1711       op = SMIN;
1712       unsignedp = 0;
1713       break;
1714     case LTU:
1715     case LEU:
1716       op = UMAX;
1717       unsignedp = 1;
1718       break;
1719     case GTU:
1720     case GEU:
1721       op = UMIN;
1722       unsignedp = 1;
1723       break;
1724     default:
1725       return FALSE;
1726     }
1727 
1728   start_sequence ();
1729 
1730   target = expand_simple_binop (GET_MODE (if_info->x), op,
1731 				if_info->a, if_info->b,
1732 				if_info->x, unsignedp, OPTAB_WIDEN);
1733   if (! target)
1734     {
1735       end_sequence ();
1736       return FALSE;
1737     }
1738   if (target != if_info->x)
1739     noce_emit_move_insn (if_info->x, target);
1740 
1741   seq = end_ifcvt_sequence (if_info);
1742   if (!seq)
1743     return FALSE;
1744 
1745   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1746   if_info->cond = cond;
1747   if_info->cond_earliest = earliest;
1748 
1749   return TRUE;
1750 }
1751 
1752 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
1753    "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
1754    etc.  */
1755 
1756 static int
1757 noce_try_abs (struct noce_if_info *if_info)
1758 {
1759   rtx cond, earliest, target, seq, a, b, c;
1760   int negate;
1761   bool one_cmpl = false;
1762 
1763   /* Reject modes with signed zeros.  */
1764   if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1765     return FALSE;
1766 
1767   /* Recognize A and B as constituting an ABS or NABS.  The canonical
1768      form is a branch around the negation, taken when the object is the
1769      first operand of a comparison against 0 that evaluates to true.  */
1770   a = if_info->a;
1771   b = if_info->b;
1772   if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1773     negate = 0;
1774   else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1775     {
1776       c = a; a = b; b = c;
1777       negate = 1;
1778     }
1779   else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
1780     {
1781       negate = 0;
1782       one_cmpl = true;
1783     }
1784   else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
1785     {
1786       c = a; a = b; b = c;
1787       negate = 1;
1788       one_cmpl = true;
1789     }
1790   else
1791     return FALSE;
1792 
1793   cond = noce_get_alt_condition (if_info, b, &earliest);
1794   if (!cond)
1795     return FALSE;
1796 
1797   /* Verify the condition is of the form we expect.  */
1798   if (rtx_equal_p (XEXP (cond, 0), b))
1799     c = XEXP (cond, 1);
1800   else if (rtx_equal_p (XEXP (cond, 1), b))
1801     {
1802       c = XEXP (cond, 0);
1803       negate = !negate;
1804     }
1805   else
1806     return FALSE;
1807 
1808   /* Verify that C is zero.  Search one step backward for a
1809      REG_EQUAL note or a simple source if necessary.  */
1810   if (REG_P (c))
1811     {
1812       rtx set, insn = prev_nonnote_insn (earliest);
1813       if (insn
1814 	  && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
1815 	  && (set = single_set (insn))
1816 	  && rtx_equal_p (SET_DEST (set), c))
1817 	{
1818 	  rtx note = find_reg_equal_equiv_note (insn);
1819 	  if (note)
1820 	    c = XEXP (note, 0);
1821 	  else
1822 	    c = SET_SRC (set);
1823 	}
1824       else
1825 	return FALSE;
1826     }
1827   if (MEM_P (c)
1828       && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1829       && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1830     c = get_pool_constant (XEXP (c, 0));
1831 
1832   /* Work around funny ideas get_condition has wrt canonicalization.
1833      Note that these rtx constants are known to be CONST_INT, and
1834      therefore imply integer comparisons.  */
1835   if (c == constm1_rtx && GET_CODE (cond) == GT)
1836     ;
1837   else if (c == const1_rtx && GET_CODE (cond) == LT)
1838     ;
1839   else if (c != CONST0_RTX (GET_MODE (b)))
1840     return FALSE;
1841 
1842   /* Determine what sort of operation this is.  */
1843   switch (GET_CODE (cond))
1844     {
1845     case LT:
1846     case LE:
1847     case UNLT:
1848     case UNLE:
1849       negate = !negate;
1850       break;
1851     case GT:
1852     case GE:
1853     case UNGT:
1854     case UNGE:
1855       break;
1856     default:
1857       return FALSE;
1858     }
1859 
1860   start_sequence ();
1861   if (one_cmpl)
1862     target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
1863                                          if_info->x);
1864   else
1865     target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1866 
1867   /* ??? It's a quandary whether cmove would be better here, especially
1868      for integers.  Perhaps combine will clean things up.  */
1869   if (target && negate)
1870     {
1871       if (one_cmpl)
1872         target = expand_simple_unop (GET_MODE (target), NOT, target,
1873                                      if_info->x, 0);
1874       else
1875         target = expand_simple_unop (GET_MODE (target), NEG, target,
1876                                      if_info->x, 0);
1877     }
1878 
1879   if (! target)
1880     {
1881       end_sequence ();
1882       return FALSE;
1883     }
1884 
1885   if (target != if_info->x)
1886     noce_emit_move_insn (if_info->x, target);
1887 
1888   seq = end_ifcvt_sequence (if_info);
1889   if (!seq)
1890     return FALSE;
1891 
1892   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1893   if_info->cond = cond;
1894   if_info->cond_earliest = earliest;
1895 
1896   return TRUE;
1897 }
1898 
1899 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
1900 
1901 static int
1902 noce_try_sign_mask (struct noce_if_info *if_info)
1903 {
1904   rtx cond, t, m, c, seq;
1905   enum machine_mode mode;
1906   enum rtx_code code;
1907   bool t_unconditional;
1908 
1909   cond = if_info->cond;
1910   code = GET_CODE (cond);
1911   m = XEXP (cond, 0);
1912   c = XEXP (cond, 1);
1913 
1914   t = NULL_RTX;
1915   if (if_info->a == const0_rtx)
1916     {
1917       if ((code == LT && c == const0_rtx)
1918 	  || (code == LE && c == constm1_rtx))
1919 	t = if_info->b;
1920     }
1921   else if (if_info->b == const0_rtx)
1922     {
1923       if ((code == GE && c == const0_rtx)
1924 	  || (code == GT && c == constm1_rtx))
1925 	t = if_info->a;
1926     }
1927 
1928   if (! t || side_effects_p (t))
1929     return FALSE;
1930 
1931   /* We currently don't handle different modes.  */
1932   mode = GET_MODE (t);
1933   if (GET_MODE (m) != mode)
1934     return FALSE;
1935 
1936   /* This is only profitable if T is unconditionally executed/evaluated in the
1937      original insn sequence or T is cheap.  The former happens if B is the
1938      non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
1939      INSN_B which can happen for e.g. conditional stores to memory.  For the
1940      cost computation use the block TEST_BB where the evaluation will end up
1941      after the transformation.  */
1942   t_unconditional =
1943     (t == if_info->b
1944      && (if_info->insn_b == NULL_RTX
1945 	 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
1946   if (!(t_unconditional
1947 	|| (rtx_cost (t, SET, optimize_bb_for_speed_p (if_info->test_bb))
1948 	    < COSTS_N_INSNS (2))))
1949     return FALSE;
1950 
1951   start_sequence ();
1952   /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1953      "(signed) m >> 31" directly.  This benefits targets with specialized
1954      insns to obtain the signmask, but still uses ashr_optab otherwise.  */
1955   m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1956   t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1957 	: NULL_RTX;
1958 
1959   if (!t)
1960     {
1961       end_sequence ();
1962       return FALSE;
1963     }
1964 
1965   noce_emit_move_insn (if_info->x, t);
1966 
1967   seq = end_ifcvt_sequence (if_info);
1968   if (!seq)
1969     return FALSE;
1970 
1971   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1972   return TRUE;
1973 }
1974 
1975 
1976 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
1977    transformations.  */
1978 
1979 static int
1980 noce_try_bitop (struct noce_if_info *if_info)
1981 {
1982   rtx cond, x, a, result, seq;
1983   enum machine_mode mode;
1984   enum rtx_code code;
1985   int bitnum;
1986 
1987   x = if_info->x;
1988   cond = if_info->cond;
1989   code = GET_CODE (cond);
1990 
1991   /* Check for no else condition.  */
1992   if (! rtx_equal_p (x, if_info->b))
1993     return FALSE;
1994 
1995   /* Check for a suitable condition.  */
1996   if (code != NE && code != EQ)
1997     return FALSE;
1998   if (XEXP (cond, 1) != const0_rtx)
1999     return FALSE;
2000   cond = XEXP (cond, 0);
2001 
2002   /* ??? We could also handle AND here.  */
2003   if (GET_CODE (cond) == ZERO_EXTRACT)
2004     {
2005       if (XEXP (cond, 1) != const1_rtx
2006 	  || !CONST_INT_P (XEXP (cond, 2))
2007 	  || ! rtx_equal_p (x, XEXP (cond, 0)))
2008 	return FALSE;
2009       bitnum = INTVAL (XEXP (cond, 2));
2010       mode = GET_MODE (x);
2011       if (BITS_BIG_ENDIAN)
2012 	bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2013       if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2014 	return FALSE;
2015     }
2016   else
2017     return FALSE;
2018 
2019   a = if_info->a;
2020   if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2021     {
2022       /* Check for "if (X & C) x = x op C".  */
2023       if (! rtx_equal_p (x, XEXP (a, 0))
2024           || !CONST_INT_P (XEXP (a, 1))
2025 	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2026 	     != (unsigned HOST_WIDE_INT) 1 << bitnum)
2027         return FALSE;
2028 
2029       /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
2030       /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
2031       if (GET_CODE (a) == IOR)
2032 	result = (code == NE) ? a : NULL_RTX;
2033       else if (code == NE)
2034 	{
2035 	  /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
2036 	  result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2037 	  result = simplify_gen_binary (IOR, mode, x, result);
2038 	}
2039       else
2040 	{
2041 	  /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
2042 	  result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2043 	  result = simplify_gen_binary (AND, mode, x, result);
2044 	}
2045     }
2046   else if (GET_CODE (a) == AND)
2047     {
2048       /* Check for "if (X & C) x &= ~C".  */
2049       if (! rtx_equal_p (x, XEXP (a, 0))
2050 	  || !CONST_INT_P (XEXP (a, 1))
2051 	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2052 	     != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2053         return FALSE;
2054 
2055       /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
2056       /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
2057       result = (code == EQ) ? a : NULL_RTX;
2058     }
2059   else
2060     return FALSE;
2061 
2062   if (result)
2063     {
2064       start_sequence ();
2065       noce_emit_move_insn (x, result);
2066       seq = end_ifcvt_sequence (if_info);
2067       if (!seq)
2068 	return FALSE;
2069 
2070       emit_insn_before_setloc (seq, if_info->jump,
2071 			       INSN_LOCATOR (if_info->insn_a));
2072     }
2073   return TRUE;
2074 }
2075 
2076 
2077 /* Similar to get_condition, only the resulting condition must be
2078    valid at JUMP, instead of at EARLIEST.
2079 
2080    If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2081    THEN block of the caller, and we have to reverse the condition.  */
2082 
2083 static rtx
2084 noce_get_condition (rtx jump, rtx *earliest, bool then_else_reversed)
2085 {
2086   rtx cond, set, tmp;
2087   bool reverse;
2088 
2089   if (! any_condjump_p (jump))
2090     return NULL_RTX;
2091 
2092   set = pc_set (jump);
2093 
2094   /* If this branches to JUMP_LABEL when the condition is false,
2095      reverse the condition.  */
2096   reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2097 	     && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
2098 
2099   /* We may have to reverse because the caller's if block is not canonical,
2100      i.e. the THEN block isn't the fallthrough block for the TEST block
2101      (see find_if_header).  */
2102   if (then_else_reversed)
2103     reverse = !reverse;
2104 
2105   /* If the condition variable is a register and is MODE_INT, accept it.  */
2106 
2107   cond = XEXP (SET_SRC (set), 0);
2108   tmp = XEXP (cond, 0);
2109   if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
2110     {
2111       *earliest = jump;
2112 
2113       if (reverse)
2114 	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2115 			       GET_MODE (cond), tmp, XEXP (cond, 1));
2116       return cond;
2117     }
2118 
2119   /* Otherwise, fall back on canonicalize_condition to do the dirty
2120      work of manipulating MODE_CC values and COMPARE rtx codes.  */
2121   tmp = canonicalize_condition (jump, cond, reverse, earliest,
2122 				NULL_RTX, false, true);
2123 
2124   /* We don't handle side-effects in the condition, like handling
2125      REG_INC notes and making sure no duplicate conditions are emitted.  */
2126   if (tmp != NULL_RTX && side_effects_p (tmp))
2127     return NULL_RTX;
2128 
2129   return tmp;
2130 }
2131 
2132 /* Return true if OP is ok for if-then-else processing.  */
2133 
2134 static int
2135 noce_operand_ok (const_rtx op)
2136 {
2137   /* We special-case memories, so handle any of them with
2138      no address side effects.  */
2139   if (MEM_P (op))
2140     return ! side_effects_p (XEXP (op, 0));
2141 
2142   if (side_effects_p (op))
2143     return FALSE;
2144 
2145   return ! may_trap_p (op);
2146 }
2147 
2148 /* Return true if a write into MEM may trap or fault.  */
2149 
2150 static bool
2151 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2152 {
2153   rtx addr;
2154 
2155   if (MEM_READONLY_P (mem))
2156     return true;
2157 
2158   if (may_trap_or_fault_p (mem))
2159     return true;
2160 
2161   addr = XEXP (mem, 0);
2162 
2163   /* Call target hook to avoid the effects of -fpic etc....  */
2164   addr = targetm.delegitimize_address (addr);
2165 
2166   while (addr)
2167     switch (GET_CODE (addr))
2168       {
2169       case CONST:
2170       case PRE_DEC:
2171       case PRE_INC:
2172       case POST_DEC:
2173       case POST_INC:
2174       case POST_MODIFY:
2175 	addr = XEXP (addr, 0);
2176 	break;
2177       case LO_SUM:
2178       case PRE_MODIFY:
2179 	addr = XEXP (addr, 1);
2180 	break;
2181       case PLUS:
2182 	if (CONST_INT_P (XEXP (addr, 1)))
2183 	  addr = XEXP (addr, 0);
2184 	else
2185 	  return false;
2186 	break;
2187       case LABEL_REF:
2188 	return true;
2189       case SYMBOL_REF:
2190 	if (SYMBOL_REF_DECL (addr)
2191 	    && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2192 	  return true;
2193 	return false;
2194       default:
2195 	return false;
2196       }
2197 
2198   return false;
2199 }
2200 
2201 /* Return whether we can use store speculation for MEM.  TOP_BB is the
2202    basic block above the conditional block where we are considering
2203    doing the speculative store.  We look for whether MEM is set
2204    unconditionally later in the function.  */
2205 
2206 static bool
2207 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2208 {
2209   basic_block dominator;
2210 
2211   for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2212        dominator != NULL;
2213        dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2214     {
2215       rtx insn;
2216 
2217       FOR_BB_INSNS (dominator, insn)
2218 	{
2219 	  /* If we see something that might be a memory barrier, we
2220 	     have to stop looking.  Even if the MEM is set later in
2221 	     the function, we still don't want to set it
2222 	     unconditionally before the barrier.  */
2223 	  if (INSN_P (insn)
2224 	      && (volatile_insn_p (PATTERN (insn))
2225 		  || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2226 	    return false;
2227 
2228 	  if (memory_modified_in_insn_p (mem, insn))
2229 	    return true;
2230 	  if (modified_in_p (XEXP (mem, 0), insn))
2231 	    return false;
2232 
2233 	}
2234     }
2235 
2236   return false;
2237 }
2238 
2239 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2240    it without using conditional execution.  Return TRUE if we were successful
2241    at converting the block.  */
2242 
2243 static int
2244 noce_process_if_block (struct noce_if_info *if_info)
2245 {
2246   basic_block test_bb = if_info->test_bb;	/* test block */
2247   basic_block then_bb = if_info->then_bb;	/* THEN */
2248   basic_block else_bb = if_info->else_bb;	/* ELSE or NULL */
2249   basic_block join_bb = if_info->join_bb;	/* JOIN */
2250   rtx jump = if_info->jump;
2251   rtx cond = if_info->cond;
2252   rtx insn_a, insn_b;
2253   rtx set_a, set_b;
2254   rtx orig_x, x, a, b;
2255 
2256   /* We're looking for patterns of the form
2257 
2258      (1) if (...) x = a; else x = b;
2259      (2) x = b; if (...) x = a;
2260      (3) if (...) x = a;   // as if with an initial x = x.
2261 
2262      The later patterns require jumps to be more expensive.
2263 
2264      ??? For future expansion, look for multiple X in such patterns.  */
2265 
2266   /* Look for one of the potential sets.  */
2267   insn_a = first_active_insn (then_bb);
2268   if (! insn_a
2269       || insn_a != last_active_insn (then_bb, FALSE)
2270       || (set_a = single_set (insn_a)) == NULL_RTX)
2271     return FALSE;
2272 
2273   x = SET_DEST (set_a);
2274   a = SET_SRC (set_a);
2275 
2276   /* Look for the other potential set.  Make sure we've got equivalent
2277      destinations.  */
2278   /* ??? This is overconservative.  Storing to two different mems is
2279      as easy as conditionally computing the address.  Storing to a
2280      single mem merely requires a scratch memory to use as one of the
2281      destination addresses; often the memory immediately below the
2282      stack pointer is available for this.  */
2283   set_b = NULL_RTX;
2284   if (else_bb)
2285     {
2286       insn_b = first_active_insn (else_bb);
2287       if (! insn_b
2288 	  || insn_b != last_active_insn (else_bb, FALSE)
2289 	  || (set_b = single_set (insn_b)) == NULL_RTX
2290 	  || ! rtx_equal_p (x, SET_DEST (set_b)))
2291 	return FALSE;
2292     }
2293   else
2294     {
2295       insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2296       /* We're going to be moving the evaluation of B down from above
2297 	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
2298 	 intact.  */
2299       if (! insn_b
2300 	  || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2301 	  || !NONJUMP_INSN_P (insn_b)
2302 	  || (set_b = single_set (insn_b)) == NULL_RTX
2303 	  || ! rtx_equal_p (x, SET_DEST (set_b))
2304 	  || ! noce_operand_ok (SET_SRC (set_b))
2305 	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2306 	  || modified_between_p (SET_SRC (set_b), insn_b, jump)
2307 	  /* Likewise with X.  In particular this can happen when
2308 	     noce_get_condition looks farther back in the instruction
2309 	     stream than one might expect.  */
2310 	  || reg_overlap_mentioned_p (x, cond)
2311 	  || reg_overlap_mentioned_p (x, a)
2312 	  || modified_between_p (x, insn_b, jump))
2313 	insn_b = set_b = NULL_RTX;
2314     }
2315 
2316   /* If x has side effects then only the if-then-else form is safe to
2317      convert.  But even in that case we would need to restore any notes
2318      (such as REG_INC) at then end.  That can be tricky if
2319      noce_emit_move_insn expands to more than one insn, so disable the
2320      optimization entirely for now if there are side effects.  */
2321   if (side_effects_p (x))
2322     return FALSE;
2323 
2324   b = (set_b ? SET_SRC (set_b) : x);
2325 
2326   /* Only operate on register destinations, and even then avoid extending
2327      the lifetime of hard registers on small register class machines.  */
2328   orig_x = x;
2329   if (!REG_P (x)
2330       || (SMALL_REGISTER_CLASSES
2331 	  && REGNO (x) < FIRST_PSEUDO_REGISTER))
2332     {
2333       if (GET_MODE (x) == BLKmode)
2334 	return FALSE;
2335 
2336       if (GET_CODE (x) == ZERO_EXTRACT
2337 	  && (!CONST_INT_P (XEXP (x, 1))
2338 	      || !CONST_INT_P (XEXP (x, 2))))
2339 	return FALSE;
2340 
2341       x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2342 				 ? XEXP (x, 0) : x));
2343     }
2344 
2345   /* Don't operate on sources that may trap or are volatile.  */
2346   if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2347     return FALSE;
2348 
2349  retry:
2350   /* Set up the info block for our subroutines.  */
2351   if_info->insn_a = insn_a;
2352   if_info->insn_b = insn_b;
2353   if_info->x = x;
2354   if_info->a = a;
2355   if_info->b = b;
2356 
2357   /* Try optimizations in some approximation of a useful order.  */
2358   /* ??? Should first look to see if X is live incoming at all.  If it
2359      isn't, we don't need anything but an unconditional set.  */
2360 
2361   /* Look and see if A and B are really the same.  Avoid creating silly
2362      cmove constructs that no one will fix up later.  */
2363   if (rtx_equal_p (a, b))
2364     {
2365       /* If we have an INSN_B, we don't have to create any new rtl.  Just
2366 	 move the instruction that we already have.  If we don't have an
2367 	 INSN_B, that means that A == X, and we've got a noop move.  In
2368 	 that case don't do anything and let the code below delete INSN_A.  */
2369       if (insn_b && else_bb)
2370 	{
2371 	  rtx note;
2372 
2373 	  if (else_bb && insn_b == BB_END (else_bb))
2374 	    BB_END (else_bb) = PREV_INSN (insn_b);
2375 	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2376 
2377 	  /* If there was a REG_EQUAL note, delete it since it may have been
2378 	     true due to this insn being after a jump.  */
2379 	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2380 	    remove_note (insn_b, note);
2381 
2382 	  insn_b = NULL_RTX;
2383 	}
2384       /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2385 	 x must be executed twice.  */
2386       else if (insn_b && side_effects_p (orig_x))
2387 	return FALSE;
2388 
2389       x = orig_x;
2390       goto success;
2391     }
2392 
2393   if (!set_b && MEM_P (orig_x))
2394     {
2395       /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2396 	 for optimizations if writing to x may trap or fault,
2397 	 i.e. it's a memory other than a static var or a stack slot,
2398 	 is misaligned on strict aligned machines or is read-only.  If
2399 	 x is a read-only memory, then the program is valid only if we
2400 	 avoid the store into it.  If there are stores on both the
2401 	 THEN and ELSE arms, then we can go ahead with the conversion;
2402 	 either the program is broken, or the condition is always
2403 	 false such that the other memory is selected.  */
2404       if (noce_mem_write_may_trap_or_fault_p (orig_x))
2405 	return FALSE;
2406 
2407       /* Avoid store speculation: given "if (...) x = a" where x is a
2408 	 MEM, we only want to do the store if x is always set
2409 	 somewhere in the function.  This avoids cases like
2410 	   if (pthread_mutex_trylock(mutex))
2411 	     ++global_variable;
2412 	 where we only want global_variable to be changed if the mutex
2413 	 is held.  FIXME: This should ideally be expressed directly in
2414 	 RTL somehow.  */
2415       if (!noce_can_store_speculate_p (test_bb, orig_x))
2416 	return FALSE;
2417     }
2418 
2419   if (noce_try_move (if_info))
2420     goto success;
2421   if (noce_try_store_flag (if_info))
2422     goto success;
2423   if (noce_try_bitop (if_info))
2424     goto success;
2425   if (noce_try_minmax (if_info))
2426     goto success;
2427   if (noce_try_abs (if_info))
2428     goto success;
2429   if (HAVE_conditional_move
2430       && noce_try_cmove (if_info))
2431     goto success;
2432   if (! targetm.have_conditional_execution ())
2433     {
2434       if (noce_try_store_flag_constants (if_info))
2435 	goto success;
2436       if (noce_try_addcc (if_info))
2437 	goto success;
2438       if (noce_try_store_flag_mask (if_info))
2439 	goto success;
2440       if (HAVE_conditional_move
2441 	  && noce_try_cmove_arith (if_info))
2442 	goto success;
2443       if (noce_try_sign_mask (if_info))
2444 	goto success;
2445     }
2446 
2447   if (!else_bb && set_b)
2448     {
2449       insn_b = set_b = NULL_RTX;
2450       b = orig_x;
2451       goto retry;
2452     }
2453 
2454   return FALSE;
2455 
2456  success:
2457 
2458   /* If we used a temporary, fix it up now.  */
2459   if (orig_x != x)
2460     {
2461       rtx seq;
2462 
2463       start_sequence ();
2464       noce_emit_move_insn (orig_x, x);
2465       seq = get_insns ();
2466       set_used_flags (orig_x);
2467       unshare_all_rtl_in_chain (seq);
2468       end_sequence ();
2469 
2470       emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATOR (insn_a));
2471     }
2472 
2473   /* The original THEN and ELSE blocks may now be removed.  The test block
2474      must now jump to the join block.  If the test block and the join block
2475      can be merged, do so.  */
2476   if (else_bb)
2477     {
2478       delete_basic_block (else_bb);
2479       num_true_changes++;
2480     }
2481   else
2482     remove_edge (find_edge (test_bb, join_bb));
2483 
2484   remove_edge (find_edge (then_bb, join_bb));
2485   redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2486   delete_basic_block (then_bb);
2487   num_true_changes++;
2488 
2489   if (can_merge_blocks_p (test_bb, join_bb))
2490     {
2491       merge_blocks (test_bb, join_bb);
2492       num_true_changes++;
2493     }
2494 
2495   num_updated_if_blocks++;
2496   return TRUE;
2497 }
2498 
2499 /* Check whether a block is suitable for conditional move conversion.
2500    Every insn must be a simple set of a register to a constant or a
2501    register.  For each assignment, store the value in the array VALS,
2502    indexed by register number, then store the register number in
2503    REGS.  COND is the condition we will test.  */
2504 
2505 static int
2506 check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) **regs, rtx cond)
2507 {
2508   rtx insn;
2509 
2510    /* We can only handle simple jumps at the end of the basic block.
2511       It is almost impossible to update the CFG otherwise.  */
2512   insn = BB_END (bb);
2513   if (JUMP_P (insn) && !onlyjump_p (insn))
2514     return FALSE;
2515 
2516   FOR_BB_INSNS (bb, insn)
2517     {
2518       rtx set, dest, src;
2519 
2520       if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2521 	continue;
2522       set = single_set (insn);
2523       if (!set)
2524 	return FALSE;
2525 
2526       dest = SET_DEST (set);
2527       src = SET_SRC (set);
2528       if (!REG_P (dest)
2529 	  || (SMALL_REGISTER_CLASSES && HARD_REGISTER_P (dest)))
2530 	return FALSE;
2531 
2532       if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2533 	return FALSE;
2534 
2535       if (side_effects_p (src) || side_effects_p (dest))
2536 	return FALSE;
2537 
2538       if (may_trap_p (src) || may_trap_p (dest))
2539 	return FALSE;
2540 
2541       /* Don't try to handle this if the source register was
2542 	 modified earlier in the block.  */
2543       if ((REG_P (src)
2544 	   && vals[REGNO (src)] != NULL)
2545 	  || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2546 	      && vals[REGNO (SUBREG_REG (src))] != NULL))
2547 	return FALSE;
2548 
2549       /* Don't try to handle this if the destination register was
2550 	 modified earlier in the block.  */
2551       if (vals[REGNO (dest)] != NULL)
2552 	return FALSE;
2553 
2554       /* Don't try to handle this if the condition uses the
2555 	 destination register.  */
2556       if (reg_overlap_mentioned_p (dest, cond))
2557 	return FALSE;
2558 
2559       /* Don't try to handle this if the source register is modified
2560 	 later in the block.  */
2561       if (!CONSTANT_P (src)
2562 	  && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2563 	return FALSE;
2564 
2565       vals[REGNO (dest)] = src;
2566 
2567       VEC_safe_push (int, heap, *regs, REGNO (dest));
2568     }
2569 
2570   return TRUE;
2571 }
2572 
2573 /* Given a basic block BB suitable for conditional move conversion,
2574    a condition COND, and arrays THEN_VALS and ELSE_VALS containing the
2575    register values depending on COND, emit the insns in the block as
2576    conditional moves.  If ELSE_BLOCK is true, THEN_BB was already
2577    processed.  The caller has started a sequence for the conversion.
2578    Return true if successful, false if something goes wrong.  */
2579 
2580 static bool
2581 cond_move_convert_if_block (struct noce_if_info *if_infop,
2582 			    basic_block bb, rtx cond,
2583 			    rtx *then_vals, rtx *else_vals,
2584 			    bool else_block_p)
2585 {
2586   enum rtx_code code;
2587   rtx insn, cond_arg0, cond_arg1;
2588 
2589   code = GET_CODE (cond);
2590   cond_arg0 = XEXP (cond, 0);
2591   cond_arg1 = XEXP (cond, 1);
2592 
2593   FOR_BB_INSNS (bb, insn)
2594     {
2595       rtx set, target, dest, t, e;
2596       unsigned int regno;
2597 
2598       /* ??? Maybe emit conditional debug insn?  */
2599       if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2600 	continue;
2601       set = single_set (insn);
2602       gcc_assert (set && REG_P (SET_DEST (set)));
2603 
2604       dest = SET_DEST (set);
2605       regno = REGNO (dest);
2606 
2607       t = then_vals[regno];
2608       e = else_vals[regno];
2609 
2610       if (else_block_p)
2611 	{
2612 	  /* If this register was set in the then block, we already
2613 	     handled this case there.  */
2614 	  if (t)
2615 	    continue;
2616 	  t = dest;
2617 	  gcc_assert (e);
2618 	}
2619       else
2620 	{
2621 	  gcc_assert (t);
2622 	  if (!e)
2623 	    e = dest;
2624 	}
2625 
2626       target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2627 				t, e);
2628       if (!target)
2629 	return false;
2630 
2631       if (target != dest)
2632 	noce_emit_move_insn (dest, target);
2633     }
2634 
2635   return true;
2636 }
2637 
2638 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2639    it using only conditional moves.  Return TRUE if we were successful at
2640    converting the block.  */
2641 
2642 static int
2643 cond_move_process_if_block (struct noce_if_info *if_info)
2644 {
2645   basic_block test_bb = if_info->test_bb;
2646   basic_block then_bb = if_info->then_bb;
2647   basic_block else_bb = if_info->else_bb;
2648   basic_block join_bb = if_info->join_bb;
2649   rtx jump = if_info->jump;
2650   rtx cond = if_info->cond;
2651   rtx seq, loc_insn;
2652   int max_reg, size, c, reg;
2653   rtx *then_vals;
2654   rtx *else_vals;
2655   VEC (int, heap) *then_regs = NULL;
2656   VEC (int, heap) *else_regs = NULL;
2657   unsigned int i;
2658 
2659   /* Build a mapping for each block to the value used for each
2660      register.  */
2661   max_reg = max_reg_num ();
2662   size = (max_reg + 1) * sizeof (rtx);
2663   then_vals = (rtx *) alloca (size);
2664   else_vals = (rtx *) alloca (size);
2665   memset (then_vals, 0, size);
2666   memset (else_vals, 0, size);
2667 
2668   /* Make sure the blocks are suitable.  */
2669   if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
2670       || (else_bb && !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
2671     {
2672       VEC_free (int, heap, then_regs);
2673       VEC_free (int, heap, else_regs);
2674       return FALSE;
2675     }
2676 
2677   /* Make sure the blocks can be used together.  If the same register
2678      is set in both blocks, and is not set to a constant in both
2679      cases, then both blocks must set it to the same register.  We
2680      have already verified that if it is set to a register, that the
2681      source register does not change after the assignment.  Also count
2682      the number of registers set in only one of the blocks.  */
2683   c = 0;
2684   for (i = 0; VEC_iterate (int, then_regs, i, reg); i++)
2685     {
2686       if (!then_vals[reg] && !else_vals[reg])
2687 	continue;
2688 
2689       if (!else_vals[reg])
2690 	++c;
2691       else
2692 	{
2693 	  if (!CONSTANT_P (then_vals[reg])
2694 	      && !CONSTANT_P (else_vals[reg])
2695 	      && !rtx_equal_p (then_vals[reg], else_vals[reg]))
2696 	    {
2697 	      VEC_free (int, heap, then_regs);
2698 	      VEC_free (int, heap, else_regs);
2699 	      return FALSE;
2700 	    }
2701 	}
2702     }
2703 
2704   /* Finish off c for MAX_CONDITIONAL_EXECUTE.  */
2705   for (i = 0; VEC_iterate (int, else_regs, i, reg); ++i)
2706     if (!then_vals[reg])
2707       ++c;
2708 
2709   /* Make sure it is reasonable to convert this block.  What matters
2710      is the number of assignments currently made in only one of the
2711      branches, since if we convert we are going to always execute
2712      them.  */
2713   if (c > MAX_CONDITIONAL_EXECUTE)
2714     {
2715       VEC_free (int, heap, then_regs);
2716       VEC_free (int, heap, else_regs);
2717       return FALSE;
2718     }
2719 
2720   /* Try to emit the conditional moves.  First do the then block,
2721      then do anything left in the else blocks.  */
2722   start_sequence ();
2723   if (!cond_move_convert_if_block (if_info, then_bb, cond,
2724 				   then_vals, else_vals, false)
2725       || (else_bb
2726 	  && !cond_move_convert_if_block (if_info, else_bb, cond,
2727 					  then_vals, else_vals, true)))
2728     {
2729       end_sequence ();
2730       VEC_free (int, heap, then_regs);
2731       VEC_free (int, heap, else_regs);
2732       return FALSE;
2733     }
2734   seq = end_ifcvt_sequence (if_info);
2735   if (!seq)
2736     {
2737       VEC_free (int, heap, then_regs);
2738       VEC_free (int, heap, else_regs);
2739       return FALSE;
2740     }
2741 
2742   loc_insn = first_active_insn (then_bb);
2743   if (!loc_insn)
2744     {
2745       loc_insn = first_active_insn (else_bb);
2746       gcc_assert (loc_insn);
2747     }
2748   emit_insn_before_setloc (seq, jump, INSN_LOCATOR (loc_insn));
2749 
2750   if (else_bb)
2751     {
2752       delete_basic_block (else_bb);
2753       num_true_changes++;
2754     }
2755   else
2756     remove_edge (find_edge (test_bb, join_bb));
2757 
2758   remove_edge (find_edge (then_bb, join_bb));
2759   redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2760   delete_basic_block (then_bb);
2761   num_true_changes++;
2762 
2763   if (can_merge_blocks_p (test_bb, join_bb))
2764     {
2765       merge_blocks (test_bb, join_bb);
2766       num_true_changes++;
2767     }
2768 
2769   num_updated_if_blocks++;
2770 
2771   VEC_free (int, heap, then_regs);
2772   VEC_free (int, heap, else_regs);
2773   return TRUE;
2774 }
2775 
2776 
2777 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
2778    IF-THEN-ELSE-JOIN block.
2779 
2780    If so, we'll try to convert the insns to not require the branch,
2781    using only transformations that do not require conditional execution.
2782 
2783    Return TRUE if we were successful at converting the block.  */
2784 
2785 static int
2786 noce_find_if_block (basic_block test_bb,
2787 		    edge then_edge, edge else_edge,
2788 		    int pass)
2789 {
2790   basic_block then_bb, else_bb, join_bb;
2791   bool then_else_reversed = false;
2792   rtx jump, cond;
2793   rtx cond_earliest;
2794   struct noce_if_info if_info;
2795 
2796   /* We only ever should get here before reload.  */
2797   gcc_assert (!reload_completed);
2798 
2799   /* Recognize an IF-THEN-ELSE-JOIN block.  */
2800   if (single_pred_p (then_edge->dest)
2801       && single_succ_p (then_edge->dest)
2802       && single_pred_p (else_edge->dest)
2803       && single_succ_p (else_edge->dest)
2804       && single_succ (then_edge->dest) == single_succ (else_edge->dest))
2805     {
2806       then_bb = then_edge->dest;
2807       else_bb = else_edge->dest;
2808       join_bb = single_succ (then_bb);
2809     }
2810   /* Recognize an IF-THEN-JOIN block.  */
2811   else if (single_pred_p (then_edge->dest)
2812 	   && single_succ_p (then_edge->dest)
2813 	   && single_succ (then_edge->dest) == else_edge->dest)
2814     {
2815       then_bb = then_edge->dest;
2816       else_bb = NULL_BLOCK;
2817       join_bb = else_edge->dest;
2818     }
2819   /* Recognize an IF-ELSE-JOIN block.  We can have those because the order
2820      of basic blocks in cfglayout mode does not matter, so the fallthrough
2821      edge can go to any basic block (and not just to bb->next_bb, like in
2822      cfgrtl mode).  */
2823   else if (single_pred_p (else_edge->dest)
2824 	   && single_succ_p (else_edge->dest)
2825 	   && single_succ (else_edge->dest) == then_edge->dest)
2826     {
2827       /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
2828 	 To make this work, we have to invert the THEN and ELSE blocks
2829 	 and reverse the jump condition.  */
2830       then_bb = else_edge->dest;
2831       else_bb = NULL_BLOCK;
2832       join_bb = single_succ (then_bb);
2833       then_else_reversed = true;
2834     }
2835   else
2836     /* Not a form we can handle.  */
2837     return FALSE;
2838 
2839   /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
2840   if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
2841     return FALSE;
2842   if (else_bb
2843       && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
2844     return FALSE;
2845 
2846   num_possible_if_blocks++;
2847 
2848   if (dump_file)
2849     {
2850       fprintf (dump_file,
2851 	       "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
2852 	       (else_bb) ? "-ELSE" : "",
2853 	       pass, test_bb->index, then_bb->index);
2854 
2855       if (else_bb)
2856 	fprintf (dump_file, ", else %d", else_bb->index);
2857 
2858       fprintf (dump_file, ", join %d\n", join_bb->index);
2859     }
2860 
2861   /* If the conditional jump is more than just a conditional
2862      jump, then we can not do if-conversion on this block.  */
2863   jump = BB_END (test_bb);
2864   if (! onlyjump_p (jump))
2865     return FALSE;
2866 
2867   /* If this is not a standard conditional jump, we can't parse it.  */
2868   cond = noce_get_condition (jump,
2869 			     &cond_earliest,
2870 			     then_else_reversed);
2871   if (!cond)
2872     return FALSE;
2873 
2874   /* We must be comparing objects whose modes imply the size.  */
2875   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2876     return FALSE;
2877 
2878   /* Initialize an IF_INFO struct to pass around.  */
2879   memset (&if_info, 0, sizeof if_info);
2880   if_info.test_bb = test_bb;
2881   if_info.then_bb = then_bb;
2882   if_info.else_bb = else_bb;
2883   if_info.join_bb = join_bb;
2884   if_info.cond = cond;
2885   if_info.cond_earliest = cond_earliest;
2886   if_info.jump = jump;
2887   if_info.then_else_reversed = then_else_reversed;
2888   if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
2889 				     predictable_edge_p (then_edge));
2890 
2891   /* Do the real work.  */
2892 
2893   if (noce_process_if_block (&if_info))
2894     return TRUE;
2895 
2896   if (HAVE_conditional_move
2897       && cond_move_process_if_block (&if_info))
2898     return TRUE;
2899 
2900   return FALSE;
2901 }
2902 
2903 
2904 /* Merge the blocks and mark for local life update.  */
2905 
2906 static void
2907 merge_if_block (struct ce_if_block * ce_info)
2908 {
2909   basic_block test_bb = ce_info->test_bb;	/* last test block */
2910   basic_block then_bb = ce_info->then_bb;	/* THEN */
2911   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
2912   basic_block join_bb = ce_info->join_bb;	/* join block */
2913   basic_block combo_bb;
2914 
2915   /* All block merging is done into the lower block numbers.  */
2916 
2917   combo_bb = test_bb;
2918   df_set_bb_dirty (test_bb);
2919 
2920   /* Merge any basic blocks to handle && and || subtests.  Each of
2921      the blocks are on the fallthru path from the predecessor block.  */
2922   if (ce_info->num_multiple_test_blocks > 0)
2923     {
2924       basic_block bb = test_bb;
2925       basic_block last_test_bb = ce_info->last_test_bb;
2926       basic_block fallthru = block_fallthru (bb);
2927 
2928       do
2929 	{
2930 	  bb = fallthru;
2931 	  fallthru = block_fallthru (bb);
2932 	  merge_blocks (combo_bb, bb);
2933 	  num_true_changes++;
2934 	}
2935       while (bb != last_test_bb);
2936     }
2937 
2938   /* Merge TEST block into THEN block.  Normally the THEN block won't have a
2939      label, but it might if there were || tests.  That label's count should be
2940      zero, and it normally should be removed.  */
2941 
2942   if (then_bb)
2943     {
2944       merge_blocks (combo_bb, then_bb);
2945       num_true_changes++;
2946     }
2947 
2948   /* The ELSE block, if it existed, had a label.  That label count
2949      will almost always be zero, but odd things can happen when labels
2950      get their addresses taken.  */
2951   if (else_bb)
2952     {
2953       merge_blocks (combo_bb, else_bb);
2954       num_true_changes++;
2955     }
2956 
2957   /* If there was no join block reported, that means it was not adjacent
2958      to the others, and so we cannot merge them.  */
2959 
2960   if (! join_bb)
2961     {
2962       rtx last = BB_END (combo_bb);
2963 
2964       /* The outgoing edge for the current COMBO block should already
2965 	 be correct.  Verify this.  */
2966       if (EDGE_COUNT (combo_bb->succs) == 0)
2967 	gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
2968 		    || (NONJUMP_INSN_P (last)
2969 			&& GET_CODE (PATTERN (last)) == TRAP_IF
2970 			&& (TRAP_CONDITION (PATTERN (last))
2971 			    == const_true_rtx)));
2972 
2973       else
2974       /* There should still be something at the end of the THEN or ELSE
2975          blocks taking us to our final destination.  */
2976 	gcc_assert (JUMP_P (last)
2977 		    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
2978 			&& CALL_P (last)
2979 			&& SIBLING_CALL_P (last))
2980 		    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
2981 			&& can_throw_internal (last)));
2982     }
2983 
2984   /* The JOIN block may have had quite a number of other predecessors too.
2985      Since we've already merged the TEST, THEN and ELSE blocks, we should
2986      have only one remaining edge from our if-then-else diamond.  If there
2987      is more than one remaining edge, it must come from elsewhere.  There
2988      may be zero incoming edges if the THEN block didn't actually join
2989      back up (as with a call to a non-return function).  */
2990   else if (EDGE_COUNT (join_bb->preds) < 2
2991 	   && join_bb != EXIT_BLOCK_PTR)
2992     {
2993       /* We can merge the JOIN cleanly and update the dataflow try
2994 	 again on this pass.*/
2995       merge_blocks (combo_bb, join_bb);
2996       num_true_changes++;
2997     }
2998   else
2999     {
3000       /* We cannot merge the JOIN.  */
3001 
3002       /* The outgoing edge for the current COMBO block should already
3003 	 be correct.  Verify this.  */
3004       gcc_assert (single_succ_p (combo_bb)
3005 		  && single_succ (combo_bb) == join_bb);
3006 
3007       /* Remove the jump and cruft from the end of the COMBO block.  */
3008       if (join_bb != EXIT_BLOCK_PTR)
3009 	tidy_fallthru_edge (single_succ_edge (combo_bb));
3010     }
3011 
3012   num_updated_if_blocks++;
3013 }
3014 
3015 /* Find a block ending in a simple IF condition and try to transform it
3016    in some way.  When converting a multi-block condition, put the new code
3017    in the first such block and delete the rest.  Return a pointer to this
3018    first block if some transformation was done.  Return NULL otherwise.  */
3019 
3020 static basic_block
3021 find_if_header (basic_block test_bb, int pass)
3022 {
3023   ce_if_block_t ce_info;
3024   edge then_edge;
3025   edge else_edge;
3026 
3027   /* The kind of block we're looking for has exactly two successors.  */
3028   if (EDGE_COUNT (test_bb->succs) != 2)
3029     return NULL;
3030 
3031   then_edge = EDGE_SUCC (test_bb, 0);
3032   else_edge = EDGE_SUCC (test_bb, 1);
3033 
3034   if (df_get_bb_dirty (then_edge->dest))
3035     return NULL;
3036   if (df_get_bb_dirty (else_edge->dest))
3037     return NULL;
3038 
3039   /* Neither edge should be abnormal.  */
3040   if ((then_edge->flags & EDGE_COMPLEX)
3041       || (else_edge->flags & EDGE_COMPLEX))
3042     return NULL;
3043 
3044   /* Nor exit the loop.  */
3045   if ((then_edge->flags & EDGE_LOOP_EXIT)
3046       || (else_edge->flags & EDGE_LOOP_EXIT))
3047     return NULL;
3048 
3049   /* The THEN edge is canonically the one that falls through.  */
3050   if (then_edge->flags & EDGE_FALLTHRU)
3051     ;
3052   else if (else_edge->flags & EDGE_FALLTHRU)
3053     {
3054       edge e = else_edge;
3055       else_edge = then_edge;
3056       then_edge = e;
3057     }
3058   else
3059     /* Otherwise this must be a multiway branch of some sort.  */
3060     return NULL;
3061 
3062   memset (&ce_info, '\0', sizeof (ce_info));
3063   ce_info.test_bb = test_bb;
3064   ce_info.then_bb = then_edge->dest;
3065   ce_info.else_bb = else_edge->dest;
3066   ce_info.pass = pass;
3067 
3068 #ifdef IFCVT_INIT_EXTRA_FIELDS
3069   IFCVT_INIT_EXTRA_FIELDS (&ce_info);
3070 #endif
3071 
3072   if (! reload_completed
3073       && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3074     goto success;
3075 
3076   if (targetm.have_conditional_execution () && reload_completed
3077       && cond_exec_find_if_block (&ce_info))
3078     goto success;
3079 
3080   if (HAVE_trap
3081       && optab_handler (ctrap_optab, word_mode)->insn_code != CODE_FOR_nothing
3082       && find_cond_trap (test_bb, then_edge, else_edge))
3083     goto success;
3084 
3085   if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3086       && (! targetm.have_conditional_execution () || reload_completed))
3087     {
3088       if (find_if_case_1 (test_bb, then_edge, else_edge))
3089 	goto success;
3090       if (find_if_case_2 (test_bb, then_edge, else_edge))
3091 	goto success;
3092     }
3093 
3094   return NULL;
3095 
3096  success:
3097   if (dump_file)
3098     fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3099   /* Set this so we continue looking.  */
3100   cond_exec_changed_p = TRUE;
3101   return ce_info.test_bb;
3102 }
3103 
3104 /* Return true if a block has two edges, one of which falls through to the next
3105    block, and the other jumps to a specific block, so that we can tell if the
3106    block is part of an && test or an || test.  Returns either -1 or the number
3107    of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
3108 
3109 static int
3110 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3111 {
3112   edge cur_edge;
3113   int fallthru_p = FALSE;
3114   int jump_p = FALSE;
3115   rtx insn;
3116   rtx end;
3117   int n_insns = 0;
3118   edge_iterator ei;
3119 
3120   if (!cur_bb || !target_bb)
3121     return -1;
3122 
3123   /* If no edges, obviously it doesn't jump or fallthru.  */
3124   if (EDGE_COUNT (cur_bb->succs) == 0)
3125     return FALSE;
3126 
3127   FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3128     {
3129       if (cur_edge->flags & EDGE_COMPLEX)
3130 	/* Anything complex isn't what we want.  */
3131 	return -1;
3132 
3133       else if (cur_edge->flags & EDGE_FALLTHRU)
3134 	fallthru_p = TRUE;
3135 
3136       else if (cur_edge->dest == target_bb)
3137 	jump_p = TRUE;
3138 
3139       else
3140 	return -1;
3141     }
3142 
3143   if ((jump_p & fallthru_p) == 0)
3144     return -1;
3145 
3146   /* Don't allow calls in the block, since this is used to group && and ||
3147      together for conditional execution support.  ??? we should support
3148      conditional execution support across calls for IA-64 some day, but
3149      for now it makes the code simpler.  */
3150   end = BB_END (cur_bb);
3151   insn = BB_HEAD (cur_bb);
3152 
3153   while (insn != NULL_RTX)
3154     {
3155       if (CALL_P (insn))
3156 	return -1;
3157 
3158       if (INSN_P (insn)
3159 	  && !JUMP_P (insn)
3160 	  && !DEBUG_INSN_P (insn)
3161 	  && GET_CODE (PATTERN (insn)) != USE
3162 	  && GET_CODE (PATTERN (insn)) != CLOBBER)
3163 	n_insns++;
3164 
3165       if (insn == end)
3166 	break;
3167 
3168       insn = NEXT_INSN (insn);
3169     }
3170 
3171   return n_insns;
3172 }
3173 
3174 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3175    block.  If so, we'll try to convert the insns to not require the branch.
3176    Return TRUE if we were successful at converting the block.  */
3177 
3178 static int
3179 cond_exec_find_if_block (struct ce_if_block * ce_info)
3180 {
3181   basic_block test_bb = ce_info->test_bb;
3182   basic_block then_bb = ce_info->then_bb;
3183   basic_block else_bb = ce_info->else_bb;
3184   basic_block join_bb = NULL_BLOCK;
3185   edge cur_edge;
3186   basic_block next;
3187   edge_iterator ei;
3188 
3189   ce_info->last_test_bb = test_bb;
3190 
3191   /* We only ever should get here after reload,
3192      and only if we have conditional execution.  */
3193   gcc_assert (targetm.have_conditional_execution () && reload_completed);
3194 
3195   /* Discover if any fall through predecessors of the current test basic block
3196      were && tests (which jump to the else block) or || tests (which jump to
3197      the then block).  */
3198   if (single_pred_p (test_bb)
3199       && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3200     {
3201       basic_block bb = single_pred (test_bb);
3202       basic_block target_bb;
3203       int max_insns = MAX_CONDITIONAL_EXECUTE;
3204       int n_insns;
3205 
3206       /* Determine if the preceding block is an && or || block.  */
3207       if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3208 	{
3209 	  ce_info->and_and_p = TRUE;
3210 	  target_bb = else_bb;
3211 	}
3212       else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3213 	{
3214 	  ce_info->and_and_p = FALSE;
3215 	  target_bb = then_bb;
3216 	}
3217       else
3218 	target_bb = NULL_BLOCK;
3219 
3220       if (target_bb && n_insns <= max_insns)
3221 	{
3222 	  int total_insns = 0;
3223 	  int blocks = 0;
3224 
3225 	  ce_info->last_test_bb = test_bb;
3226 
3227 	  /* Found at least one && or || block, look for more.  */
3228 	  do
3229 	    {
3230 	      ce_info->test_bb = test_bb = bb;
3231 	      total_insns += n_insns;
3232 	      blocks++;
3233 
3234 	      if (!single_pred_p (bb))
3235 		break;
3236 
3237 	      bb = single_pred (bb);
3238 	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3239 	    }
3240 	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3241 
3242 	  ce_info->num_multiple_test_blocks = blocks;
3243 	  ce_info->num_multiple_test_insns = total_insns;
3244 
3245 	  if (ce_info->and_and_p)
3246 	    ce_info->num_and_and_blocks = blocks;
3247 	  else
3248 	    ce_info->num_or_or_blocks = blocks;
3249 	}
3250     }
3251 
3252   /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3253      other than any || blocks which jump to the THEN block.  */
3254   if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3255     return FALSE;
3256 
3257   /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3258   FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3259     {
3260       if (cur_edge->flags & EDGE_COMPLEX)
3261 	return FALSE;
3262     }
3263 
3264   FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3265     {
3266       if (cur_edge->flags & EDGE_COMPLEX)
3267 	return FALSE;
3268     }
3269 
3270   /* The THEN block of an IF-THEN combo must have zero or one successors.  */
3271   if (EDGE_COUNT (then_bb->succs) > 0
3272       && (!single_succ_p (then_bb)
3273           || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3274 	  || (epilogue_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
3275     return FALSE;
3276 
3277   /* If the THEN block has no successors, conditional execution can still
3278      make a conditional call.  Don't do this unless the ELSE block has
3279      only one incoming edge -- the CFG manipulation is too ugly otherwise.
3280      Check for the last insn of the THEN block being an indirect jump, which
3281      is listed as not having any successors, but confuses the rest of the CE
3282      code processing.  ??? we should fix this in the future.  */
3283   if (EDGE_COUNT (then_bb->succs) == 0)
3284     {
3285       if (single_pred_p (else_bb))
3286 	{
3287 	  rtx last_insn = BB_END (then_bb);
3288 
3289 	  while (last_insn
3290 		 && NOTE_P (last_insn)
3291 		 && last_insn != BB_HEAD (then_bb))
3292 	    last_insn = PREV_INSN (last_insn);
3293 
3294 	  if (last_insn
3295 	      && JUMP_P (last_insn)
3296 	      && ! simplejump_p (last_insn))
3297 	    return FALSE;
3298 
3299 	  join_bb = else_bb;
3300 	  else_bb = NULL_BLOCK;
3301 	}
3302       else
3303 	return FALSE;
3304     }
3305 
3306   /* If the THEN block's successor is the other edge out of the TEST block,
3307      then we have an IF-THEN combo without an ELSE.  */
3308   else if (single_succ (then_bb) == else_bb)
3309     {
3310       join_bb = else_bb;
3311       else_bb = NULL_BLOCK;
3312     }
3313 
3314   /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3315      has exactly one predecessor and one successor, and the outgoing edge
3316      is not complex, then we have an IF-THEN-ELSE combo.  */
3317   else if (single_succ_p (else_bb)
3318 	   && single_succ (then_bb) == single_succ (else_bb)
3319 	   && single_pred_p (else_bb)
3320 	   && ! (single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3321 	   && ! (epilogue_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
3322     join_bb = single_succ (else_bb);
3323 
3324   /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
3325   else
3326     return FALSE;
3327 
3328   num_possible_if_blocks++;
3329 
3330   if (dump_file)
3331     {
3332       fprintf (dump_file,
3333 	       "\nIF-THEN%s block found, pass %d, start block %d "
3334 	       "[insn %d], then %d [%d]",
3335 	       (else_bb) ? "-ELSE" : "",
3336 	       ce_info->pass,
3337 	       test_bb->index,
3338 	       BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3339 	       then_bb->index,
3340 	       BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3341 
3342       if (else_bb)
3343 	fprintf (dump_file, ", else %d [%d]",
3344 		 else_bb->index,
3345 		 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3346 
3347       fprintf (dump_file, ", join %d [%d]",
3348 	       join_bb->index,
3349 	       BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3350 
3351       if (ce_info->num_multiple_test_blocks > 0)
3352 	fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3353 		 ce_info->num_multiple_test_blocks,
3354 		 (ce_info->and_and_p) ? "&&" : "||",
3355 		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3356 		 ce_info->last_test_bb->index,
3357 		 ((BB_HEAD (ce_info->last_test_bb))
3358 		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3359 		  : -1));
3360 
3361       fputc ('\n', dump_file);
3362     }
3363 
3364   /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
3365      first condition for free, since we've already asserted that there's a
3366      fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
3367      we checked the FALLTHRU flag, those are already adjacent to the last IF
3368      block.  */
3369   /* ??? As an enhancement, move the ELSE block.  Have to deal with
3370      BLOCK notes, if by no other means than backing out the merge if they
3371      exist.  Sticky enough I don't want to think about it now.  */
3372   next = then_bb;
3373   if (else_bb && (next = next->next_bb) != else_bb)
3374     return FALSE;
3375   if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
3376     {
3377       if (else_bb)
3378 	join_bb = NULL;
3379       else
3380 	return FALSE;
3381     }
3382 
3383   /* Do the real work.  */
3384 
3385   ce_info->else_bb = else_bb;
3386   ce_info->join_bb = join_bb;
3387 
3388   /* If we have && and || tests, try to first handle combining the && and ||
3389      tests into the conditional code, and if that fails, go back and handle
3390      it without the && and ||, which at present handles the && case if there
3391      was no ELSE block.  */
3392   if (cond_exec_process_if_block (ce_info, TRUE))
3393     return TRUE;
3394 
3395   if (ce_info->num_multiple_test_blocks)
3396     {
3397       cancel_changes (0);
3398 
3399       if (cond_exec_process_if_block (ce_info, FALSE))
3400 	return TRUE;
3401     }
3402 
3403   return FALSE;
3404 }
3405 
3406 /* Convert a branch over a trap, or a branch
3407    to a trap, into a conditional trap.  */
3408 
3409 static int
3410 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3411 {
3412   basic_block then_bb = then_edge->dest;
3413   basic_block else_bb = else_edge->dest;
3414   basic_block other_bb, trap_bb;
3415   rtx trap, jump, cond, cond_earliest, seq;
3416   enum rtx_code code;
3417 
3418   /* Locate the block with the trap instruction.  */
3419   /* ??? While we look for no successors, we really ought to allow
3420      EH successors.  Need to fix merge_if_block for that to work.  */
3421   if ((trap = block_has_only_trap (then_bb)) != NULL)
3422     trap_bb = then_bb, other_bb = else_bb;
3423   else if ((trap = block_has_only_trap (else_bb)) != NULL)
3424     trap_bb = else_bb, other_bb = then_bb;
3425   else
3426     return FALSE;
3427 
3428   if (dump_file)
3429     {
3430       fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3431 	       test_bb->index, trap_bb->index);
3432     }
3433 
3434   /* If this is not a standard conditional jump, we can't parse it.  */
3435   jump = BB_END (test_bb);
3436   cond = noce_get_condition (jump, &cond_earliest, false);
3437   if (! cond)
3438     return FALSE;
3439 
3440   /* If the conditional jump is more than just a conditional jump, then
3441      we can not do if-conversion on this block.  */
3442   if (! onlyjump_p (jump))
3443     return FALSE;
3444 
3445   /* We must be comparing objects whose modes imply the size.  */
3446   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3447     return FALSE;
3448 
3449   /* Reverse the comparison code, if necessary.  */
3450   code = GET_CODE (cond);
3451   if (then_bb == trap_bb)
3452     {
3453       code = reversed_comparison_code (cond, jump);
3454       if (code == UNKNOWN)
3455 	return FALSE;
3456     }
3457 
3458   /* Attempt to generate the conditional trap.  */
3459   seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3460 		       copy_rtx (XEXP (cond, 1)),
3461 		       TRAP_CODE (PATTERN (trap)));
3462   if (seq == NULL)
3463     return FALSE;
3464 
3465   /* Emit the new insns before cond_earliest.  */
3466   emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
3467 
3468   /* Delete the trap block if possible.  */
3469   remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3470   df_set_bb_dirty (test_bb);
3471   df_set_bb_dirty (then_bb);
3472   df_set_bb_dirty (else_bb);
3473 
3474   if (EDGE_COUNT (trap_bb->preds) == 0)
3475     {
3476       delete_basic_block (trap_bb);
3477       num_true_changes++;
3478     }
3479 
3480   /* Wire together the blocks again.  */
3481   if (current_ir_type () == IR_RTL_CFGLAYOUT)
3482     single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3483   else
3484     {
3485       rtx lab, newjump;
3486 
3487       lab = JUMP_LABEL (jump);
3488       newjump = emit_jump_insn_after (gen_jump (lab), jump);
3489       LABEL_NUSES (lab) += 1;
3490       JUMP_LABEL (newjump) = lab;
3491       emit_barrier_after (newjump);
3492     }
3493   delete_insn (jump);
3494 
3495   if (can_merge_blocks_p (test_bb, other_bb))
3496     {
3497       merge_blocks (test_bb, other_bb);
3498       num_true_changes++;
3499     }
3500 
3501   num_updated_if_blocks++;
3502   return TRUE;
3503 }
3504 
3505 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3506    return it.  */
3507 
3508 static rtx
3509 block_has_only_trap (basic_block bb)
3510 {
3511   rtx trap;
3512 
3513   /* We're not the exit block.  */
3514   if (bb == EXIT_BLOCK_PTR)
3515     return NULL_RTX;
3516 
3517   /* The block must have no successors.  */
3518   if (EDGE_COUNT (bb->succs) > 0)
3519     return NULL_RTX;
3520 
3521   /* The only instruction in the THEN block must be the trap.  */
3522   trap = first_active_insn (bb);
3523   if (! (trap == BB_END (bb)
3524 	 && GET_CODE (PATTERN (trap)) == TRAP_IF
3525          && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3526     return NULL_RTX;
3527 
3528   return trap;
3529 }
3530 
3531 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3532    transformable, but not necessarily the other.  There need be no
3533    JOIN block.
3534 
3535    Return TRUE if we were successful at converting the block.
3536 
3537    Cases we'd like to look at:
3538 
3539    (1)
3540 	if (test) goto over; // x not live
3541 	x = a;
3542 	goto label;
3543 	over:
3544 
3545    becomes
3546 
3547 	x = a;
3548 	if (! test) goto label;
3549 
3550    (2)
3551 	if (test) goto E; // x not live
3552 	x = big();
3553 	goto L;
3554 	E:
3555 	x = b;
3556 	goto M;
3557 
3558    becomes
3559 
3560 	x = b;
3561 	if (test) goto M;
3562 	x = big();
3563 	goto L;
3564 
3565    (3) // This one's really only interesting for targets that can do
3566        // multiway branching, e.g. IA-64 BBB bundles.  For other targets
3567        // it results in multiple branches on a cache line, which often
3568        // does not sit well with predictors.
3569 
3570 	if (test1) goto E; // predicted not taken
3571 	x = a;
3572 	if (test2) goto F;
3573 	...
3574 	E:
3575 	x = b;
3576 	J:
3577 
3578    becomes
3579 
3580 	x = a;
3581 	if (test1) goto E;
3582 	if (test2) goto F;
3583 
3584    Notes:
3585 
3586    (A) Don't do (2) if the branch is predicted against the block we're
3587    eliminating.  Do it anyway if we can eliminate a branch; this requires
3588    that the sole successor of the eliminated block postdominate the other
3589    side of the if.
3590 
3591    (B) With CE, on (3) we can steal from both sides of the if, creating
3592 
3593 	if (test1) x = a;
3594 	if (!test1) x = b;
3595 	if (test1) goto J;
3596 	if (test2) goto F;
3597 	...
3598 	J:
3599 
3600    Again, this is most useful if J postdominates.
3601 
3602    (C) CE substitutes for helpful life information.
3603 
3604    (D) These heuristics need a lot of work.  */
3605 
3606 /* Tests for case 1 above.  */
3607 
3608 static int
3609 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3610 {
3611   basic_block then_bb = then_edge->dest;
3612   basic_block else_bb = else_edge->dest;
3613   basic_block new_bb;
3614   int then_bb_index;
3615 
3616   /* If we are partitioning hot/cold basic blocks, we don't want to
3617      mess up unconditional or indirect jumps that cross between hot
3618      and cold sections.
3619 
3620      Basic block partitioning may result in some jumps that appear to
3621      be optimizable (or blocks that appear to be mergeable), but which really
3622      must be left untouched (they are required to make it safely across
3623      partition boundaries).  See  the comments at the top of
3624      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3625 
3626   if ((BB_END (then_bb)
3627        && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3628       || (BB_END (test_bb)
3629 	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3630       || (BB_END (else_bb)
3631 	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3632 			    NULL_RTX)))
3633     return FALSE;
3634 
3635   /* THEN has one successor.  */
3636   if (!single_succ_p (then_bb))
3637     return FALSE;
3638 
3639   /* THEN does not fall through, but is not strange either.  */
3640   if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3641     return FALSE;
3642 
3643   /* THEN has one predecessor.  */
3644   if (!single_pred_p (then_bb))
3645     return FALSE;
3646 
3647   /* THEN must do something.  */
3648   if (forwarder_block_p (then_bb))
3649     return FALSE;
3650 
3651   num_possible_if_blocks++;
3652   if (dump_file)
3653     fprintf (dump_file,
3654 	     "\nIF-CASE-1 found, start %d, then %d\n",
3655 	     test_bb->index, then_bb->index);
3656 
3657   /* THEN is small.  */
3658   if (! cheap_bb_rtx_cost_p (then_bb,
3659 	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
3660 				    predictable_edge_p (then_edge)))))
3661     return FALSE;
3662 
3663   /* Registers set are dead, or are predicable.  */
3664   if (! dead_or_predicable (test_bb, then_bb, else_bb,
3665 			    single_succ (then_bb), 1))
3666     return FALSE;
3667 
3668   /* Conversion went ok, including moving the insns and fixing up the
3669      jump.  Adjust the CFG to match.  */
3670 
3671   /* We can avoid creating a new basic block if then_bb is immediately
3672      followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3673      thru to else_bb.  */
3674 
3675   if (then_bb->next_bb == else_bb
3676       && then_bb->prev_bb == test_bb
3677       && else_bb != EXIT_BLOCK_PTR)
3678     {
3679       redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
3680       new_bb = 0;
3681     }
3682   else
3683     new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3684 					     else_bb);
3685 
3686   df_set_bb_dirty (test_bb);
3687   df_set_bb_dirty (else_bb);
3688 
3689   then_bb_index = then_bb->index;
3690   delete_basic_block (then_bb);
3691 
3692   /* Make rest of code believe that the newly created block is the THEN_BB
3693      block we removed.  */
3694   if (new_bb)
3695     {
3696       df_bb_replace (then_bb_index, new_bb);
3697       /* Since the fallthru edge was redirected from test_bb to new_bb,
3698          we need to ensure that new_bb is in the same partition as
3699          test bb (you can not fall through across section boundaries).  */
3700       BB_COPY_PARTITION (new_bb, test_bb);
3701     }
3702 
3703   num_true_changes++;
3704   num_updated_if_blocks++;
3705 
3706   return TRUE;
3707 }
3708 
3709 /* Test for case 2 above.  */
3710 
3711 static int
3712 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
3713 {
3714   basic_block then_bb = then_edge->dest;
3715   basic_block else_bb = else_edge->dest;
3716   edge else_succ;
3717   rtx note;
3718 
3719   /* If we are partitioning hot/cold basic blocks, we don't want to
3720      mess up unconditional or indirect jumps that cross between hot
3721      and cold sections.
3722 
3723      Basic block partitioning may result in some jumps that appear to
3724      be optimizable (or blocks that appear to be mergeable), but which really
3725      must be left untouched (they are required to make it safely across
3726      partition boundaries).  See  the comments at the top of
3727      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3728 
3729   if ((BB_END (then_bb)
3730        && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3731       || (BB_END (test_bb)
3732 	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3733       || (BB_END (else_bb)
3734 	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3735 			    NULL_RTX)))
3736     return FALSE;
3737 
3738   /* ELSE has one successor.  */
3739   if (!single_succ_p (else_bb))
3740     return FALSE;
3741   else
3742     else_succ = single_succ_edge (else_bb);
3743 
3744   /* ELSE outgoing edge is not complex.  */
3745   if (else_succ->flags & EDGE_COMPLEX)
3746     return FALSE;
3747 
3748   /* ELSE has one predecessor.  */
3749   if (!single_pred_p (else_bb))
3750     return FALSE;
3751 
3752   /* THEN is not EXIT.  */
3753   if (then_bb->index < NUM_FIXED_BLOCKS)
3754     return FALSE;
3755 
3756   /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
3757   note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
3758   if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
3759     ;
3760   else if (else_succ->dest->index < NUM_FIXED_BLOCKS
3761 	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3762 			      else_succ->dest))
3763     ;
3764   else
3765     return FALSE;
3766 
3767   num_possible_if_blocks++;
3768   if (dump_file)
3769     fprintf (dump_file,
3770 	     "\nIF-CASE-2 found, start %d, else %d\n",
3771 	     test_bb->index, else_bb->index);
3772 
3773   /* ELSE is small.  */
3774   if (! cheap_bb_rtx_cost_p (else_bb,
3775 	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
3776 				    predictable_edge_p (else_edge)))))
3777     return FALSE;
3778 
3779   /* Registers set are dead, or are predicable.  */
3780   if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3781     return FALSE;
3782 
3783   /* Conversion went ok, including moving the insns and fixing up the
3784      jump.  Adjust the CFG to match.  */
3785 
3786   df_set_bb_dirty (test_bb);
3787   df_set_bb_dirty (then_bb);
3788   delete_basic_block (else_bb);
3789 
3790   num_true_changes++;
3791   num_updated_if_blocks++;
3792 
3793   /* ??? We may now fallthru from one of THEN's successors into a join
3794      block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
3795 
3796   return TRUE;
3797 }
3798 
3799 /* A subroutine of dead_or_predicable called through for_each_rtx.
3800    Return 1 if a memory is found.  */
3801 
3802 static int
3803 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3804 {
3805   return MEM_P (*px);
3806 }
3807 
3808 /* Used by the code above to perform the actual rtl transformations.
3809    Return TRUE if successful.
3810 
3811    TEST_BB is the block containing the conditional branch.  MERGE_BB
3812    is the block containing the code to manipulate.  NEW_DEST is the
3813    label TEST_BB should be branching to after the conversion.
3814    REVERSEP is true if the sense of the branch should be reversed.  */
3815 
3816 static int
3817 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3818 		    basic_block other_bb, basic_block new_dest, int reversep)
3819 {
3820   rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3821   bitmap merge_set = NULL, merge_set_noclobber = NULL;
3822   /* Number of pending changes.  */
3823   int n_validated_changes = 0;
3824 
3825   jump = BB_END (test_bb);
3826 
3827   /* Find the extent of the real code in the merge block.  */
3828   head = BB_HEAD (merge_bb);
3829   end = BB_END (merge_bb);
3830 
3831   while (DEBUG_INSN_P (end) && end != head)
3832     end = PREV_INSN (end);
3833 
3834   /* If merge_bb ends with a tablejump, predicating/moving insn's
3835      into test_bb and then deleting merge_bb will result in the jumptable
3836      that follows merge_bb being removed along with merge_bb and then we
3837      get an unresolved reference to the jumptable.  */
3838   if (tablejump_p (end, NULL, NULL))
3839     return FALSE;
3840 
3841   if (LABEL_P (head))
3842     head = NEXT_INSN (head);
3843   while (DEBUG_INSN_P (head) && head != end)
3844     head = NEXT_INSN (head);
3845   if (NOTE_P (head))
3846     {
3847       if (head == end)
3848 	{
3849 	  head = end = NULL_RTX;
3850 	  goto no_body;
3851 	}
3852       head = NEXT_INSN (head);
3853       while (DEBUG_INSN_P (head) && head != end)
3854 	head = NEXT_INSN (head);
3855     }
3856 
3857   if (JUMP_P (end))
3858     {
3859       if (head == end)
3860 	{
3861 	  head = end = NULL_RTX;
3862 	  goto no_body;
3863 	}
3864       end = PREV_INSN (end);
3865       while (DEBUG_INSN_P (end) && end != head)
3866 	end = PREV_INSN (end);
3867     }
3868 
3869   /* Disable handling dead code by conditional execution if the machine needs
3870      to do anything funny with the tests, etc.  */
3871 #ifndef IFCVT_MODIFY_TESTS
3872   if (targetm.have_conditional_execution ())
3873     {
3874       /* In the conditional execution case, we have things easy.  We know
3875 	 the condition is reversible.  We don't have to check life info
3876 	 because we're going to conditionally execute the code anyway.
3877 	 All that's left is making sure the insns involved can actually
3878 	 be predicated.  */
3879 
3880       rtx cond, prob_val;
3881 
3882       cond = cond_exec_get_condition (jump);
3883       if (! cond)
3884 	return FALSE;
3885 
3886       prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3887       if (prob_val)
3888 	prob_val = XEXP (prob_val, 0);
3889 
3890       if (reversep)
3891 	{
3892 	  enum rtx_code rev = reversed_comparison_code (cond, jump);
3893 	  if (rev == UNKNOWN)
3894 	    return FALSE;
3895 	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3896 			         XEXP (cond, 1));
3897 	  if (prob_val)
3898 	    prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3899 	}
3900 
3901       if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
3902 	  && verify_changes (0))
3903 	n_validated_changes = num_validated_changes ();
3904       else
3905 	cancel_changes (0);
3906 
3907       earliest = jump;
3908     }
3909 #endif
3910 
3911   /* Try the NCE path if the CE path did not result in any changes.  */
3912   if (n_validated_changes == 0)
3913     {
3914       /* In the non-conditional execution case, we have to verify that there
3915 	 are no trapping operations, no calls, no references to memory, and
3916 	 that any registers modified are dead at the branch site.  */
3917 
3918       rtx insn, cond, prev;
3919       bitmap test_live, test_set;
3920       bool intersect = false;
3921 
3922       /* Check for no calls or trapping operations.  */
3923       for (insn = head; ; insn = NEXT_INSN (insn))
3924 	{
3925 	  if (CALL_P (insn))
3926 	    return FALSE;
3927 	  if (NONDEBUG_INSN_P (insn))
3928 	    {
3929 	      if (may_trap_p (PATTERN (insn)))
3930 		return FALSE;
3931 
3932 	      /* ??? Even non-trapping memories such as stack frame
3933 		 references must be avoided.  For stores, we collect
3934 		 no lifetime info; for reads, we'd have to assert
3935 		 true_dependence false against every store in the
3936 		 TEST range.  */
3937 	      if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3938 		return FALSE;
3939 	    }
3940 	  if (insn == end)
3941 	    break;
3942 	}
3943 
3944       if (! any_condjump_p (jump))
3945 	return FALSE;
3946 
3947       /* Find the extent of the conditional.  */
3948       cond = noce_get_condition (jump, &earliest, false);
3949       if (! cond)
3950 	return FALSE;
3951 
3952       /* Collect:
3953 	   MERGE_SET = set of registers set in MERGE_BB
3954 	   MERGE_SET_NOCLOBBER = like MERGE_SET, but only includes registers
3955 	     that are really set, not just clobbered.
3956 	   TEST_LIVE = set of registers live at EARLIEST
3957 	   TEST_SET = set of registers set between EARLIEST and the
3958 	     end of the block.  */
3959 
3960       merge_set = BITMAP_ALLOC (&reg_obstack);
3961       merge_set_noclobber = BITMAP_ALLOC (&reg_obstack);
3962 
3963       /* If we allocated new pseudos (e.g. in the conditional move
3964 	 expander called from noce_emit_cmove), we must resize the
3965 	 array first.  */
3966       if (max_regno < max_reg_num ())
3967 	max_regno = max_reg_num ();
3968 
3969       FOR_BB_INSNS (merge_bb, insn)
3970 	{
3971 	  if (NONDEBUG_INSN_P (insn))
3972 	    {
3973 	      df_simulate_find_defs (insn, merge_set);
3974 	      df_simulate_find_noclobber_defs (insn, merge_set_noclobber);
3975 	    }
3976 	}
3977 
3978       /* For small register class machines, don't lengthen lifetimes of
3979 	 hard registers before reload.  */
3980       if (SMALL_REGISTER_CLASSES && ! reload_completed)
3981 	{
3982 	  unsigned i;
3983 	  bitmap_iterator bi;
3984 
3985           EXECUTE_IF_SET_IN_BITMAP (merge_set_noclobber, 0, i, bi)
3986 	    {
3987 	      if (i < FIRST_PSEUDO_REGISTER
3988 		  && ! fixed_regs[i]
3989 		  && ! global_regs[i])
3990 		goto fail;
3991 	    }
3992 	}
3993 
3994       /* For TEST, we're interested in a range of insns, not a whole block.
3995 	 Moreover, we're interested in the insns live from OTHER_BB.  */
3996       test_live = BITMAP_ALLOC (&reg_obstack);
3997       test_set = BITMAP_ALLOC (&reg_obstack);
3998 
3999       /* The loop below takes the set of live registers
4000          after JUMP, and calculates the live set before EARLIEST. */
4001       bitmap_copy (test_live, df_get_live_in (other_bb));
4002       df_simulate_initialize_backwards (test_bb, test_live);
4003       for (insn = jump; ; insn = prev)
4004 	{
4005 	  if (INSN_P (insn))
4006 	    {
4007 	      df_simulate_find_defs (insn, test_set);
4008 	      df_simulate_one_insn_backwards (test_bb, insn, test_live);
4009 	    }
4010 	  prev = PREV_INSN (insn);
4011 	  if (insn == earliest)
4012 	    break;
4013 	}
4014 
4015       /* We can perform the transformation if
4016 	   MERGE_SET_NOCLOBBER & TEST_SET
4017 	 and
4018 	   MERGE_SET & TEST_LIVE
4019 	 and
4020 	   TEST_SET & DF_LIVE_IN (merge_bb)
4021 	 are empty.  */
4022 
4023       if (bitmap_intersect_p (merge_set_noclobber, test_set)
4024 	  || bitmap_intersect_p (merge_set, test_live)
4025 	  || bitmap_intersect_p (test_set, df_get_live_in (merge_bb)))
4026 	intersect = true;
4027 
4028       BITMAP_FREE (test_live);
4029       BITMAP_FREE (test_set);
4030 
4031       if (intersect)
4032 	goto fail;
4033     }
4034 
4035  no_body:
4036   /* We don't want to use normal invert_jump or redirect_jump because
4037      we don't want to delete_insn called.  Also, we want to do our own
4038      change group management.  */
4039 
4040   old_dest = JUMP_LABEL (jump);
4041   if (other_bb != new_dest)
4042     {
4043       new_label = block_label (new_dest);
4044       if (reversep
4045 	  ? ! invert_jump_1 (jump, new_label)
4046 	  : ! redirect_jump_1 (jump, new_label))
4047 	goto cancel;
4048     }
4049 
4050   if (verify_changes (n_validated_changes))
4051     confirm_change_group ();
4052   else
4053     goto cancel;
4054 
4055   if (other_bb != new_dest)
4056     {
4057       redirect_jump_2 (jump, old_dest, new_label, 0, reversep);
4058 
4059       redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4060       if (reversep)
4061 	{
4062 	  gcov_type count, probability;
4063 	  count = BRANCH_EDGE (test_bb)->count;
4064 	  BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4065 	  FALLTHRU_EDGE (test_bb)->count = count;
4066 	  probability = BRANCH_EDGE (test_bb)->probability;
4067 	  BRANCH_EDGE (test_bb)->probability
4068 	    = FALLTHRU_EDGE (test_bb)->probability;
4069 	  FALLTHRU_EDGE (test_bb)->probability = probability;
4070 	  update_br_prob_note (test_bb);
4071 	}
4072     }
4073 
4074   /* Move the insns out of MERGE_BB to before the branch.  */
4075   if (head != NULL)
4076     {
4077       rtx insn;
4078 
4079       if (end == BB_END (merge_bb))
4080 	BB_END (merge_bb) = PREV_INSN (head);
4081 
4082       /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4083 	 notes being moved might become invalid.  */
4084       insn = head;
4085       do
4086 	{
4087 	  rtx note, set;
4088 
4089 	  if (! INSN_P (insn))
4090 	    continue;
4091 	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4092 	  if (! note)
4093 	    continue;
4094 	  set = single_set (insn);
4095 	  if (!set || !function_invariant_p (SET_SRC (set))
4096 	      || !function_invariant_p (XEXP (note, 0)))
4097 	    remove_note (insn, note);
4098 	} while (insn != end && (insn = NEXT_INSN (insn)));
4099 
4100       /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4101 	 notes referring to the registers being set might become invalid.  */
4102       if (merge_set)
4103 	{
4104 	  unsigned i;
4105 	  bitmap_iterator bi;
4106 
4107 	  EXECUTE_IF_SET_IN_BITMAP (merge_set_noclobber, 0, i, bi)
4108 	    remove_reg_equal_equiv_notes_for_regno (i);
4109 
4110 	  BITMAP_FREE (merge_set);
4111 	  BITMAP_FREE (merge_set_noclobber);
4112 	}
4113 
4114       reorder_insns (head, end, PREV_INSN (earliest));
4115     }
4116 
4117   /* Remove the jump and edge if we can.  */
4118   if (other_bb == new_dest)
4119     {
4120       delete_insn (jump);
4121       remove_edge (BRANCH_EDGE (test_bb));
4122       /* ??? Can't merge blocks here, as then_bb is still in use.
4123 	 At minimum, the merge will get done just before bb-reorder.  */
4124     }
4125 
4126   return TRUE;
4127 
4128  cancel:
4129   cancel_changes (0);
4130  fail:
4131   if (merge_set)
4132     {
4133       BITMAP_FREE (merge_set);
4134       BITMAP_FREE (merge_set_noclobber);
4135     }
4136   return FALSE;
4137 }
4138 
4139 /* Main entry point for all if-conversion.  */
4140 
4141 static void
4142 if_convert (void)
4143 {
4144   basic_block bb;
4145   int pass;
4146 
4147   if (optimize == 1)
4148     {
4149       df_live_add_problem ();
4150       df_live_set_all_dirty ();
4151     }
4152 
4153   num_possible_if_blocks = 0;
4154   num_updated_if_blocks = 0;
4155   num_true_changes = 0;
4156 
4157   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4158   mark_loop_exit_edges ();
4159   loop_optimizer_finalize ();
4160   free_dominance_info (CDI_DOMINATORS);
4161 
4162   /* Compute postdominators.  */
4163   calculate_dominance_info (CDI_POST_DOMINATORS);
4164 
4165   df_set_flags (DF_LR_RUN_DCE);
4166 
4167   /* Go through each of the basic blocks looking for things to convert.  If we
4168      have conditional execution, we make multiple passes to allow us to handle
4169      IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
4170   pass = 0;
4171   do
4172     {
4173       df_analyze ();
4174       /* Only need to do dce on the first pass.  */
4175       df_clear_flags (DF_LR_RUN_DCE);
4176       cond_exec_changed_p = FALSE;
4177       pass++;
4178 
4179 #ifdef IFCVT_MULTIPLE_DUMPS
4180       if (dump_file && pass > 1)
4181 	fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4182 #endif
4183 
4184       FOR_EACH_BB (bb)
4185 	{
4186           basic_block new_bb;
4187           while (!df_get_bb_dirty (bb)
4188                  && (new_bb = find_if_header (bb, pass)) != NULL)
4189             bb = new_bb;
4190 	}
4191 
4192 #ifdef IFCVT_MULTIPLE_DUMPS
4193       if (dump_file && cond_exec_changed_p)
4194 	{
4195 	  if (dump_flags & TDF_SLIM)
4196 	    print_rtl_slim_with_bb (dump_file, get_insns (), dump_flags);
4197 	  else
4198 	    print_rtl_with_bb (dump_file, get_insns ());
4199 	}
4200 #endif
4201     }
4202   while (cond_exec_changed_p);
4203 
4204 #ifdef IFCVT_MULTIPLE_DUMPS
4205   if (dump_file)
4206     fprintf (dump_file, "\n\n========== no more changes\n");
4207 #endif
4208 
4209   free_dominance_info (CDI_POST_DOMINATORS);
4210 
4211   if (dump_file)
4212     fflush (dump_file);
4213 
4214   clear_aux_for_blocks ();
4215 
4216   /* If we allocated new pseudos, we must resize the array for sched1.  */
4217   if (max_regno < max_reg_num ())
4218     max_regno = max_reg_num ();
4219 
4220   /* Write the final stats.  */
4221   if (dump_file && num_possible_if_blocks > 0)
4222     {
4223       fprintf (dump_file,
4224 	       "\n%d possible IF blocks searched.\n",
4225 	       num_possible_if_blocks);
4226       fprintf (dump_file,
4227 	       "%d IF blocks converted.\n",
4228 	       num_updated_if_blocks);
4229       fprintf (dump_file,
4230 	       "%d true changes made.\n\n\n",
4231 	       num_true_changes);
4232     }
4233 
4234   if (optimize == 1)
4235     df_remove_problem (df_live);
4236 
4237 #ifdef ENABLE_CHECKING
4238   verify_flow_info ();
4239 #endif
4240 }
4241 
4242 static bool
4243 gate_handle_if_conversion (void)
4244 {
4245   return (optimize > 0)
4246     && dbg_cnt (if_conversion);
4247 }
4248 
4249 /* If-conversion and CFG cleanup.  */
4250 static unsigned int
4251 rest_of_handle_if_conversion (void)
4252 {
4253   if (flag_if_conversion)
4254     {
4255       if (dump_file)
4256         dump_flow_info (dump_file, dump_flags);
4257       cleanup_cfg (CLEANUP_EXPENSIVE);
4258       if_convert ();
4259     }
4260 
4261   cleanup_cfg (0);
4262   return 0;
4263 }
4264 
4265 struct rtl_opt_pass pass_rtl_ifcvt =
4266 {
4267  {
4268   RTL_PASS,
4269   "ce1",                                /* name */
4270   gate_handle_if_conversion,            /* gate */
4271   rest_of_handle_if_conversion,         /* execute */
4272   NULL,                                 /* sub */
4273   NULL,                                 /* next */
4274   0,                                    /* static_pass_number */
4275   TV_IFCVT,                             /* tv_id */
4276   0,                                    /* properties_required */
4277   0,                                    /* properties_provided */
4278   0,                                    /* properties_destroyed */
4279   0,                                    /* todo_flags_start */
4280   TODO_df_finish | TODO_verify_rtl_sharing |
4281   TODO_dump_func                        /* todo_flags_finish */
4282  }
4283 };
4284 
4285 static bool
4286 gate_handle_if_after_combine (void)
4287 {
4288   return optimize > 0 && flag_if_conversion
4289     && dbg_cnt (if_after_combine);
4290 }
4291 
4292 
4293 /* Rerun if-conversion, as combine may have simplified things enough
4294    to now meet sequence length restrictions.  */
4295 static unsigned int
4296 rest_of_handle_if_after_combine (void)
4297 {
4298   if_convert ();
4299   return 0;
4300 }
4301 
4302 struct rtl_opt_pass pass_if_after_combine =
4303 {
4304  {
4305   RTL_PASS,
4306   "ce2",                                /* name */
4307   gate_handle_if_after_combine,         /* gate */
4308   rest_of_handle_if_after_combine,      /* execute */
4309   NULL,                                 /* sub */
4310   NULL,                                 /* next */
4311   0,                                    /* static_pass_number */
4312   TV_IFCVT,                             /* tv_id */
4313   0,                                    /* properties_required */
4314   0,                                    /* properties_provided */
4315   0,                                    /* properties_destroyed */
4316   0,                                    /* todo_flags_start */
4317   TODO_df_finish | TODO_verify_rtl_sharing |
4318   TODO_dump_func |
4319   TODO_ggc_collect                      /* todo_flags_finish */
4320  }
4321 };
4322 
4323 
4324 static bool
4325 gate_handle_if_after_reload (void)
4326 {
4327   return optimize > 0 && flag_if_conversion2
4328     && dbg_cnt (if_after_reload);
4329 }
4330 
4331 static unsigned int
4332 rest_of_handle_if_after_reload (void)
4333 {
4334   if_convert ();
4335   return 0;
4336 }
4337 
4338 
4339 struct rtl_opt_pass pass_if_after_reload =
4340 {
4341  {
4342   RTL_PASS,
4343   "ce3",                                /* name */
4344   gate_handle_if_after_reload,          /* gate */
4345   rest_of_handle_if_after_reload,       /* execute */
4346   NULL,                                 /* sub */
4347   NULL,                                 /* next */
4348   0,                                    /* static_pass_number */
4349   TV_IFCVT2,                            /* tv_id */
4350   0,                                    /* properties_required */
4351   0,                                    /* properties_provided */
4352   0,                                    /* properties_destroyed */
4353   0,                                    /* todo_flags_start */
4354   TODO_df_finish | TODO_verify_rtl_sharing |
4355   TODO_dump_func |
4356   TODO_ggc_collect                      /* todo_flags_finish */
4357  }
4358 };
4359