xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-ifcombine.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Combining of if-expressions on trees.
2    Copyright (C) 2007-2015 Free Software Foundation, Inc.
3    Contributed by Richard Guenther <rguenther@suse.de>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 /* rtl is needed only because arm back-end requires it for
26    BRANCH_COST.  */
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hash-set.h"
30 #include "machmode.h"
31 #include "vec.h"
32 #include "double-int.h"
33 #include "input.h"
34 #include "alias.h"
35 #include "symtab.h"
36 #include "wide-int.h"
37 #include "inchash.h"
38 #include "tree.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
41 #include "predict.h"
42 #include "hard-reg-set.h"
43 #include "input.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "tree-pretty-print.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "gimple-fold.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-iterator.h"
57 #include "gimplify-me.h"
58 #include "gimple-ssa.h"
59 #include "tree-cfg.h"
60 #include "tree-phinodes.h"
61 #include "ssa-iterators.h"
62 #include "tree-pass.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 
66 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
67 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
68   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
69                 false) >= 2)
70 #endif
71 
72 /* This pass combines COND_EXPRs to simplify control flow.  It
73    currently recognizes bit tests and comparisons in chains that
74    represent logical and or logical or of two COND_EXPRs.
75 
76    It does so by walking basic blocks in a approximate reverse
77    post-dominator order and trying to match CFG patterns that
78    represent logical and or logical or of two COND_EXPRs.
79    Transformations are done if the COND_EXPR conditions match
80    either
81 
82      1. two single bit tests X & (1 << Yn) (for logical and)
83 
84      2. two bit tests X & Yn (for logical or)
85 
86      3. two comparisons X OPn Y (for logical or)
87 
88    To simplify this pass, removing basic blocks and dead code
89    is left to CFG cleanup and DCE.  */
90 
91 
92 /* Recognize a if-then-else CFG pattern starting to match with the
93    COND_BB basic-block containing the COND_EXPR.  The recognized
94    then end else blocks are stored to *THEN_BB and *ELSE_BB.  If
95    *THEN_BB and/or *ELSE_BB are already set, they are required to
96    match the then and else basic-blocks to make the pattern match.
97    Returns true if the pattern matched, false otherwise.  */
98 
99 static bool
100 recognize_if_then_else (basic_block cond_bb,
101 			basic_block *then_bb, basic_block *else_bb)
102 {
103   edge t, e;
104 
105   if (EDGE_COUNT (cond_bb->succs) != 2)
106     return false;
107 
108   /* Find the then/else edges.  */
109   t = EDGE_SUCC (cond_bb, 0);
110   e = EDGE_SUCC (cond_bb, 1);
111   if (!(t->flags & EDGE_TRUE_VALUE))
112     {
113       edge tmp = t;
114       t = e;
115       e = tmp;
116     }
117   if (!(t->flags & EDGE_TRUE_VALUE)
118       || !(e->flags & EDGE_FALSE_VALUE))
119     return false;
120 
121   /* Check if the edge destinations point to the required block.  */
122   if (*then_bb
123       && t->dest != *then_bb)
124     return false;
125   if (*else_bb
126       && e->dest != *else_bb)
127     return false;
128 
129   if (!*then_bb)
130     *then_bb = t->dest;
131   if (!*else_bb)
132     *else_bb = e->dest;
133 
134   return true;
135 }
136 
137 /* Verify if the basic block BB does not have side-effects.  Return
138    true in this case, else false.  */
139 
140 static bool
141 bb_no_side_effects_p (basic_block bb)
142 {
143   gimple_stmt_iterator gsi;
144 
145   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
146     {
147       gimple stmt = gsi_stmt (gsi);
148 
149       if (is_gimple_debug (stmt))
150 	continue;
151 
152       if (gimple_has_side_effects (stmt)
153 	  || gimple_could_trap_p (stmt)
154 	  || gimple_vuse (stmt))
155 	return false;
156     }
157 
158   return true;
159 }
160 
161 /* Return true if BB is an empty forwarder block to TO_BB.  */
162 
163 static bool
164 forwarder_block_to (basic_block bb, basic_block to_bb)
165 {
166   return empty_block_p (bb)
167 	 && single_succ_p (bb)
168 	 && single_succ (bb) == to_bb;
169 }
170 
171 /* Verify if all PHI node arguments in DEST for edges from BB1 or
172    BB2 to DEST are the same.  This makes the CFG merge point
173    free from side-effects.  Return true in this case, else false.  */
174 
175 static bool
176 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
177 {
178   edge e1 = find_edge (bb1, dest);
179   edge e2 = find_edge (bb2, dest);
180   gphi_iterator gsi;
181   gphi *phi;
182 
183   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
184     {
185       phi = gsi.phi ();
186       if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
187 			    PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
188         return false;
189     }
190 
191   return true;
192 }
193 
194 /* Return the best representative SSA name for CANDIDATE which is used
195    in a bit test.  */
196 
197 static tree
198 get_name_for_bit_test (tree candidate)
199 {
200   /* Skip single-use names in favor of using the name from a
201      non-widening conversion definition.  */
202   if (TREE_CODE (candidate) == SSA_NAME
203       && has_single_use (candidate))
204     {
205       gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
206       if (is_gimple_assign (def_stmt)
207 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
208 	{
209 	  if (TYPE_PRECISION (TREE_TYPE (candidate))
210 	      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
211 	    return gimple_assign_rhs1 (def_stmt);
212 	}
213     }
214 
215   return candidate;
216 }
217 
218 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
219    statements.  Store the name being tested in *NAME and the bit
220    in *BIT.  The GIMPLE_COND computes *NAME & (1 << *BIT).
221    Returns true if the pattern matched, false otherwise.  */
222 
223 static bool
224 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
225 {
226   gimple stmt;
227 
228   /* Get at the definition of the result of the bit test.  */
229   if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
230       || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
231       || !integer_zerop (gimple_cond_rhs (cond)))
232     return false;
233   stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
234   if (!is_gimple_assign (stmt))
235     return false;
236 
237   /* Look at which bit is tested.  One form to recognize is
238      D.1985_5 = state_3(D) >> control1_4(D);
239      D.1986_6 = (int) D.1985_5;
240      D.1987_7 = op0 & 1;
241      if (D.1987_7 != 0)  */
242   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
243       && integer_onep (gimple_assign_rhs2 (stmt))
244       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
245     {
246       tree orig_name = gimple_assign_rhs1 (stmt);
247 
248       /* Look through copies and conversions to eventually
249 	 find the stmt that computes the shift.  */
250       stmt = SSA_NAME_DEF_STMT (orig_name);
251 
252       while (is_gimple_assign (stmt)
253 	     && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
254 		  && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
255 		      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
256 		  && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
257 		 || gimple_assign_ssa_name_copy_p (stmt)))
258 	stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
259 
260       /* If we found such, decompose it.  */
261       if (is_gimple_assign (stmt)
262 	  && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
263 	{
264 	  /* op0 & (1 << op1) */
265 	  *bit = gimple_assign_rhs2 (stmt);
266 	  *name = gimple_assign_rhs1 (stmt);
267 	}
268       else
269 	{
270 	  /* t & 1 */
271 	  *bit = integer_zero_node;
272 	  *name = get_name_for_bit_test (orig_name);
273 	}
274 
275       return true;
276     }
277 
278   /* Another form is
279      D.1987_7 = op0 & (1 << CST)
280      if (D.1987_7 != 0)  */
281   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
282       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
283       && integer_pow2p (gimple_assign_rhs2 (stmt)))
284     {
285       *name = gimple_assign_rhs1 (stmt);
286       *bit = build_int_cst (integer_type_node,
287 			    tree_log2 (gimple_assign_rhs2 (stmt)));
288       return true;
289     }
290 
291   /* Another form is
292      D.1986_6 = 1 << control1_4(D)
293      D.1987_7 = op0 & D.1986_6
294      if (D.1987_7 != 0)  */
295   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
296       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
297       && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
298     {
299       gimple tmp;
300 
301       /* Both arguments of the BIT_AND_EXPR can be the single-bit
302 	 specifying expression.  */
303       tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
304       if (is_gimple_assign (tmp)
305 	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
306 	  && integer_onep (gimple_assign_rhs1 (tmp)))
307 	{
308 	  *name = gimple_assign_rhs2 (stmt);
309 	  *bit = gimple_assign_rhs2 (tmp);
310 	  return true;
311 	}
312 
313       tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
314       if (is_gimple_assign (tmp)
315 	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
316 	  && integer_onep (gimple_assign_rhs1 (tmp)))
317 	{
318 	  *name = gimple_assign_rhs1 (stmt);
319 	  *bit = gimple_assign_rhs2 (tmp);
320 	  return true;
321 	}
322     }
323 
324   return false;
325 }
326 
327 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
328    statements.  Store the name being tested in *NAME and the bits
329    in *BITS.  The COND_EXPR computes *NAME & *BITS.
330    Returns true if the pattern matched, false otherwise.  */
331 
332 static bool
333 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
334 {
335   gimple stmt;
336 
337   /* Get at the definition of the result of the bit test.  */
338   if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
339       || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
340       || !integer_zerop (gimple_cond_rhs (cond)))
341     return false;
342   stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
343   if (!is_gimple_assign (stmt)
344       || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
345     return false;
346 
347   *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
348   *bits = gimple_assign_rhs2 (stmt);
349 
350   return true;
351 }
352 
353 /* If-convert on a and pattern with a common else block.  The inner
354    if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
355    inner_inv, outer_inv and result_inv indicate whether the conditions
356    are inverted.
357    Returns true if the edges to the common else basic-block were merged.  */
358 
359 static bool
360 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
361 		   basic_block outer_cond_bb, bool outer_inv, bool result_inv)
362 {
363   gimple_stmt_iterator gsi;
364   gimple inner_stmt, outer_stmt;
365   gcond *inner_cond, *outer_cond;
366   tree name1, name2, bit1, bit2, bits1, bits2;
367 
368   inner_stmt = last_stmt (inner_cond_bb);
369   if (!inner_stmt
370       || gimple_code (inner_stmt) != GIMPLE_COND)
371     return false;
372   inner_cond = as_a <gcond *> (inner_stmt);
373 
374   outer_stmt = last_stmt (outer_cond_bb);
375   if (!outer_stmt
376       || gimple_code (outer_stmt) != GIMPLE_COND)
377     return false;
378   outer_cond = as_a <gcond *> (outer_stmt);
379 
380   /* See if we test a single bit of the same name in both tests.  In
381      that case remove the outer test, merging both else edges,
382      and change the inner one to test for
383      name & (bit1 | bit2) == (bit1 | bit2).  */
384   if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
385       && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
386       && name1 == name2)
387     {
388       tree t, t2;
389 
390       /* Do it.  */
391       gsi = gsi_for_stmt (inner_cond);
392       t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
393 		       build_int_cst (TREE_TYPE (name1), 1), bit1);
394       t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
395 		        build_int_cst (TREE_TYPE (name1), 1), bit2);
396       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
397       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
398 				    true, GSI_SAME_STMT);
399       t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
400       t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
401 				     true, GSI_SAME_STMT);
402       t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
403 		       boolean_type_node, t2, t);
404       t = canonicalize_cond_expr_cond (t);
405       if (!t)
406 	return false;
407       gimple_cond_set_condition_from_tree (inner_cond, t);
408       update_stmt (inner_cond);
409 
410       /* Leave CFG optimization to cfg_cleanup.  */
411       gimple_cond_set_condition_from_tree (outer_cond,
412 	outer_inv ? boolean_false_node : boolean_true_node);
413       update_stmt (outer_cond);
414 
415       if (dump_file)
416 	{
417 	  fprintf (dump_file, "optimizing double bit test to ");
418 	  print_generic_expr (dump_file, name1, 0);
419 	  fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
420 	  print_generic_expr (dump_file, bit1, 0);
421 	  fprintf (dump_file, ") | (1 << ");
422 	  print_generic_expr (dump_file, bit2, 0);
423 	  fprintf (dump_file, ")\n");
424 	}
425 
426       return true;
427     }
428 
429   /* See if we have two bit tests of the same name in both tests.
430      In that case remove the outer test and change the inner one to
431      test for name & (bits1 | bits2) != 0.  */
432   else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
433       && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
434     {
435       gimple_stmt_iterator gsi;
436       tree t;
437 
438       /* Find the common name which is bit-tested.  */
439       if (name1 == name2)
440 	;
441       else if (bits1 == bits2)
442 	{
443 	  t = name2;
444 	  name2 = bits2;
445 	  bits2 = t;
446 	  t = name1;
447 	  name1 = bits1;
448 	  bits1 = t;
449 	}
450       else if (name1 == bits2)
451 	{
452 	  t = name2;
453 	  name2 = bits2;
454 	  bits2 = t;
455 	}
456       else if (bits1 == name2)
457 	{
458 	  t = name1;
459 	  name1 = bits1;
460 	  bits1 = t;
461 	}
462       else
463 	return false;
464 
465       /* As we strip non-widening conversions in finding a common
466          name that is tested make sure to end up with an integral
467 	 type for building the bit operations.  */
468       if (TYPE_PRECISION (TREE_TYPE (bits1))
469 	  >= TYPE_PRECISION (TREE_TYPE (bits2)))
470 	{
471 	  bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
472 	  name1 = fold_convert (TREE_TYPE (bits1), name1);
473 	  bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
474 	  bits2 = fold_convert (TREE_TYPE (bits1), bits2);
475 	}
476       else
477 	{
478 	  bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
479 	  name1 = fold_convert (TREE_TYPE (bits2), name1);
480 	  bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
481 	  bits1 = fold_convert (TREE_TYPE (bits2), bits1);
482 	}
483 
484       /* Do it.  */
485       gsi = gsi_for_stmt (inner_cond);
486       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
487       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
488 				    true, GSI_SAME_STMT);
489       t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
490       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
491 				    true, GSI_SAME_STMT);
492       t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
493 		       build_int_cst (TREE_TYPE (t), 0));
494       t = canonicalize_cond_expr_cond (t);
495       if (!t)
496 	return false;
497       gimple_cond_set_condition_from_tree (inner_cond, t);
498       update_stmt (inner_cond);
499 
500       /* Leave CFG optimization to cfg_cleanup.  */
501       gimple_cond_set_condition_from_tree (outer_cond,
502 	outer_inv ? boolean_false_node : boolean_true_node);
503       update_stmt (outer_cond);
504 
505       if (dump_file)
506 	{
507 	  fprintf (dump_file, "optimizing bits or bits test to ");
508 	  print_generic_expr (dump_file, name1, 0);
509 	  fprintf (dump_file, " & T != 0\nwith temporary T = ");
510 	  print_generic_expr (dump_file, bits1, 0);
511 	  fprintf (dump_file, " | ");
512 	  print_generic_expr (dump_file, bits2, 0);
513 	  fprintf (dump_file, "\n");
514 	}
515 
516       return true;
517     }
518 
519   /* See if we have two comparisons that we can merge into one.  */
520   else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
521 	   && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
522     {
523       tree t;
524       enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
525       enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
526 
527       /* Invert comparisons if necessary (and possible).  */
528       if (inner_inv)
529 	inner_cond_code = invert_tree_comparison (inner_cond_code,
530 	  HONOR_NANS (gimple_cond_lhs (inner_cond)));
531       if (inner_cond_code == ERROR_MARK)
532 	return false;
533       if (outer_inv)
534 	outer_cond_code = invert_tree_comparison (outer_cond_code,
535 	  HONOR_NANS (gimple_cond_lhs (outer_cond)));
536       if (outer_cond_code == ERROR_MARK)
537 	return false;
538       /* Don't return false so fast, try maybe_fold_or_comparisons?  */
539 
540       if (!(t = maybe_fold_and_comparisons (inner_cond_code,
541 					    gimple_cond_lhs (inner_cond),
542 					    gimple_cond_rhs (inner_cond),
543 					    outer_cond_code,
544 					    gimple_cond_lhs (outer_cond),
545 					    gimple_cond_rhs (outer_cond))))
546 	{
547 	  tree t1, t2;
548 	  gimple_stmt_iterator gsi;
549 	  if (!LOGICAL_OP_NON_SHORT_CIRCUIT)
550 	    return false;
551 	  /* Only do this optimization if the inner bb contains only the conditional. */
552 	  if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
553 	    return false;
554 	  t1 = fold_build2_loc (gimple_location (inner_cond),
555 				inner_cond_code,
556 				boolean_type_node,
557 				gimple_cond_lhs (inner_cond),
558 				gimple_cond_rhs (inner_cond));
559 	  t2 = fold_build2_loc (gimple_location (outer_cond),
560 				outer_cond_code,
561 				boolean_type_node,
562 				gimple_cond_lhs (outer_cond),
563 				gimple_cond_rhs (outer_cond));
564 	  t = fold_build2_loc (gimple_location (inner_cond),
565 			       TRUTH_AND_EXPR, boolean_type_node, t1, t2);
566 	  if (result_inv)
567 	    {
568 	      t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
569 	      result_inv = false;
570 	    }
571 	  gsi = gsi_for_stmt (inner_cond);
572 	  t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
573 					  GSI_SAME_STMT);
574         }
575       if (result_inv)
576 	t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
577       t = canonicalize_cond_expr_cond (t);
578       if (!t)
579 	return false;
580       gimple_cond_set_condition_from_tree (inner_cond, t);
581       update_stmt (inner_cond);
582 
583       /* Leave CFG optimization to cfg_cleanup.  */
584       gimple_cond_set_condition_from_tree (outer_cond,
585 	outer_inv ? boolean_false_node : boolean_true_node);
586       update_stmt (outer_cond);
587 
588       if (dump_file)
589 	{
590 	  fprintf (dump_file, "optimizing two comparisons to ");
591 	  print_generic_expr (dump_file, t, 0);
592 	  fprintf (dump_file, "\n");
593 	}
594 
595       return true;
596     }
597 
598   return false;
599 }
600 
601 /* Helper function for tree_ssa_ifcombine_bb.  Recognize a CFG pattern and
602    dispatch to the appropriate if-conversion helper for a particular
603    set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
604    PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB.  */
605 
606 static bool
607 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
608 			 basic_block then_bb, basic_block else_bb,
609 			 basic_block phi_pred_bb)
610 {
611   /* The && form is characterized by a common else_bb with
612      the two edges leading to it mergable.  The latter is
613      guaranteed by matching PHI arguments in the else_bb and
614      the inner cond_bb having no side-effects.  */
615   if (phi_pred_bb != else_bb
616       && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
617       && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)
618       && bb_no_side_effects_p (inner_cond_bb))
619     {
620       /* We have
621 	   <outer_cond_bb>
622 	     if (q) goto inner_cond_bb; else goto else_bb;
623 	   <inner_cond_bb>
624 	     if (p) goto ...; else goto else_bb;
625 	     ...
626 	   <else_bb>
627 	     ...
628        */
629       return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
630 				false);
631     }
632 
633   /* And a version where the outer condition is negated.  */
634   if (phi_pred_bb != else_bb
635       && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
636       && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)
637       && bb_no_side_effects_p (inner_cond_bb))
638     {
639       /* We have
640 	   <outer_cond_bb>
641 	     if (q) goto else_bb; else goto inner_cond_bb;
642 	   <inner_cond_bb>
643 	     if (p) goto ...; else goto else_bb;
644 	     ...
645 	   <else_bb>
646 	     ...
647        */
648       return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
649 				false);
650     }
651 
652   /* The || form is characterized by a common then_bb with the
653      two edges leading to it mergable.  The latter is guaranteed
654      by matching PHI arguments in the then_bb and the inner cond_bb
655      having no side-effects.  */
656   if (phi_pred_bb != then_bb
657       && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
658       && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)
659       && bb_no_side_effects_p (inner_cond_bb))
660     {
661       /* We have
662 	   <outer_cond_bb>
663 	     if (q) goto then_bb; else goto inner_cond_bb;
664 	   <inner_cond_bb>
665 	     if (q) goto then_bb; else goto ...;
666 	   <then_bb>
667 	     ...
668        */
669       return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
670 				true);
671     }
672 
673   /* And a version where the outer condition is negated.  */
674   if (phi_pred_bb != then_bb
675       && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
676       && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)
677       && bb_no_side_effects_p (inner_cond_bb))
678     {
679       /* We have
680 	   <outer_cond_bb>
681 	     if (q) goto inner_cond_bb; else goto then_bb;
682 	   <inner_cond_bb>
683 	     if (q) goto then_bb; else goto ...;
684 	   <then_bb>
685 	     ...
686        */
687       return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
688 				true);
689     }
690 
691   return false;
692 }
693 
694 /* Recognize a CFG pattern and dispatch to the appropriate
695    if-conversion helper.  We start with BB as the innermost
696    worker basic-block.  Returns true if a transformation was done.  */
697 
698 static bool
699 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
700 {
701   basic_block then_bb = NULL, else_bb = NULL;
702 
703   if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
704     return false;
705 
706   /* Recognize && and || of two conditions with a common
707      then/else block which entry edges we can merge.  That is:
708        if (a || b)
709 	 ;
710      and
711        if (a && b)
712 	 ;
713      This requires a single predecessor of the inner cond_bb.  */
714   if (single_pred_p (inner_cond_bb))
715     {
716       basic_block outer_cond_bb = single_pred (inner_cond_bb);
717 
718       if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
719 				   then_bb, else_bb, inner_cond_bb))
720 	return true;
721 
722       if (forwarder_block_to (else_bb, then_bb))
723 	{
724 	  /* Other possibilities for the && form, if else_bb is
725 	     empty forwarder block to then_bb.  Compared to the above simpler
726 	     forms this can be treated as if then_bb and else_bb were swapped,
727 	     and the corresponding inner_cond_bb not inverted because of that.
728 	     For same_phi_args_p we look at equality of arguments between
729 	     edge from outer_cond_bb and the forwarder block.  */
730 	  if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
731 				       then_bb, else_bb))
732 	    return true;
733 	}
734       else if (forwarder_block_to (then_bb, else_bb))
735 	{
736 	  /* Other possibilities for the || form, if then_bb is
737 	     empty forwarder block to else_bb.  Compared to the above simpler
738 	     forms this can be treated as if then_bb and else_bb were swapped,
739 	     and the corresponding inner_cond_bb not inverted because of that.
740 	     For same_phi_args_p we look at equality of arguments between
741 	     edge from outer_cond_bb and the forwarder block.  */
742 	  if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
743 				       then_bb, then_bb))
744 	    return true;
745 	}
746     }
747 
748   return false;
749 }
750 
751 /* Main entry for the tree if-conversion pass.  */
752 
753 namespace {
754 
755 const pass_data pass_data_tree_ifcombine =
756 {
757   GIMPLE_PASS, /* type */
758   "ifcombine", /* name */
759   OPTGROUP_NONE, /* optinfo_flags */
760   TV_TREE_IFCOMBINE, /* tv_id */
761   ( PROP_cfg | PROP_ssa ), /* properties_required */
762   0, /* properties_provided */
763   0, /* properties_destroyed */
764   0, /* todo_flags_start */
765   TODO_update_ssa, /* todo_flags_finish */
766 };
767 
768 class pass_tree_ifcombine : public gimple_opt_pass
769 {
770 public:
771   pass_tree_ifcombine (gcc::context *ctxt)
772     : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
773   {}
774 
775   /* opt_pass methods: */
776   virtual unsigned int execute (function *);
777 
778 }; // class pass_tree_ifcombine
779 
780 unsigned int
781 pass_tree_ifcombine::execute (function *fun)
782 {
783   basic_block *bbs;
784   bool cfg_changed = false;
785   int i;
786 
787   bbs = single_pred_before_succ_order ();
788   calculate_dominance_info (CDI_DOMINATORS);
789 
790   /* Search every basic block for COND_EXPR we may be able to optimize.
791 
792      We walk the blocks in order that guarantees that a block with
793      a single predecessor is processed after the predecessor.
794      This ensures that we collapse outter ifs before visiting the
795      inner ones, and also that we do not try to visit a removed
796      block.  This is opposite of PHI-OPT, because we cascade the
797      combining rather than cascading PHIs. */
798   for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
799     {
800       basic_block bb = bbs[i];
801       gimple stmt = last_stmt (bb);
802 
803       if (stmt
804 	  && gimple_code (stmt) == GIMPLE_COND)
805 	if (tree_ssa_ifcombine_bb (bb))
806 	  {
807 	    /* Clear range info from all stmts in BB which is now executed
808 	       conditional on a always true/false condition.  */
809 	    reset_flow_sensitive_info_in_bb (bb);
810 	    cfg_changed |= true;
811 	  }
812     }
813 
814   free (bbs);
815 
816   return cfg_changed ? TODO_cleanup_cfg : 0;
817 }
818 
819 } // anon namespace
820 
821 gimple_opt_pass *
822 make_pass_tree_ifcombine (gcc::context *ctxt)
823 {
824   return new pass_tree_ifcombine (ctxt);
825 }
826