1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2022 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
35 BRANCH_COST. */
36 #include "fold-const.h"
37 #include "cfganal.h"
38 #include "gimple-fold.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa.h"
43 #include "attribs.h"
44 #include "asan.h"
45
46 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
47 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
48 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
49 false) >= 2)
50 #endif
51
52 /* This pass combines COND_EXPRs to simplify control flow. It
53 currently recognizes bit tests and comparisons in chains that
54 represent logical and or logical or of two COND_EXPRs.
55
56 It does so by walking basic blocks in a approximate reverse
57 post-dominator order and trying to match CFG patterns that
58 represent logical and or logical or of two COND_EXPRs.
59 Transformations are done if the COND_EXPR conditions match
60 either
61
62 1. two single bit tests X & (1 << Yn) (for logical and)
63
64 2. two bit tests X & Yn (for logical or)
65
66 3. two comparisons X OPn Y (for logical or)
67
68 To simplify this pass, removing basic blocks and dead code
69 is left to CFG cleanup and DCE. */
70
71
72 /* Recognize a if-then-else CFG pattern starting to match with the
73 COND_BB basic-block containing the COND_EXPR. The recognized
74 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
75 *THEN_BB and/or *ELSE_BB are already set, they are required to
76 match the then and else basic-blocks to make the pattern match.
77 Returns true if the pattern matched, false otherwise. */
78
79 static bool
recognize_if_then_else(basic_block cond_bb,basic_block * then_bb,basic_block * else_bb)80 recognize_if_then_else (basic_block cond_bb,
81 basic_block *then_bb, basic_block *else_bb)
82 {
83 edge t, e;
84
85 if (EDGE_COUNT (cond_bb->succs) != 2)
86 return false;
87
88 /* Find the then/else edges. */
89 t = EDGE_SUCC (cond_bb, 0);
90 e = EDGE_SUCC (cond_bb, 1);
91 if (!(t->flags & EDGE_TRUE_VALUE))
92 std::swap (t, e);
93 if (!(t->flags & EDGE_TRUE_VALUE)
94 || !(e->flags & EDGE_FALSE_VALUE))
95 return false;
96
97 /* Check if the edge destinations point to the required block. */
98 if (*then_bb
99 && t->dest != *then_bb)
100 return false;
101 if (*else_bb
102 && e->dest != *else_bb)
103 return false;
104
105 if (!*then_bb)
106 *then_bb = t->dest;
107 if (!*else_bb)
108 *else_bb = e->dest;
109
110 return true;
111 }
112
113 /* Verify if the basic block BB does not have side-effects. Return
114 true in this case, else false. */
115
116 static bool
bb_no_side_effects_p(basic_block bb)117 bb_no_side_effects_p (basic_block bb)
118 {
119 gimple_stmt_iterator gsi;
120
121 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
122 {
123 gimple *stmt = gsi_stmt (gsi);
124
125 if (is_gimple_debug (stmt))
126 continue;
127
128 if (gimple_has_side_effects (stmt)
129 || gimple_uses_undefined_value_p (stmt)
130 || gimple_could_trap_p (stmt)
131 || gimple_vuse (stmt)
132 /* const calls don't match any of the above, yet they could
133 still have some side-effects - they could contain
134 gimple_could_trap_p statements, like floating point
135 exceptions or integer division by zero. See PR70586.
136 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
137 should handle this. */
138 || is_gimple_call (stmt))
139 return false;
140 }
141
142 return true;
143 }
144
145 /* Return true if BB is an empty forwarder block to TO_BB. */
146
147 static bool
forwarder_block_to(basic_block bb,basic_block to_bb)148 forwarder_block_to (basic_block bb, basic_block to_bb)
149 {
150 return empty_block_p (bb)
151 && single_succ_p (bb)
152 && single_succ (bb) == to_bb;
153 }
154
155 /* Verify if all PHI node arguments in DEST for edges from BB1 or
156 BB2 to DEST are the same. This makes the CFG merge point
157 free from side-effects. Return true in this case, else false. */
158
159 static bool
same_phi_args_p(basic_block bb1,basic_block bb2,basic_block dest)160 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
161 {
162 edge e1 = find_edge (bb1, dest);
163 edge e2 = find_edge (bb2, dest);
164 gphi_iterator gsi;
165 gphi *phi;
166
167 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
168 {
169 phi = gsi.phi ();
170 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
171 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
172 return false;
173 }
174
175 return true;
176 }
177
178 /* Return the best representative SSA name for CANDIDATE which is used
179 in a bit test. */
180
181 static tree
get_name_for_bit_test(tree candidate)182 get_name_for_bit_test (tree candidate)
183 {
184 /* Skip single-use names in favor of using the name from a
185 non-widening conversion definition. */
186 if (TREE_CODE (candidate) == SSA_NAME
187 && has_single_use (candidate))
188 {
189 gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
190 if (is_gimple_assign (def_stmt)
191 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
192 {
193 if (TYPE_PRECISION (TREE_TYPE (candidate))
194 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
195 return gimple_assign_rhs1 (def_stmt);
196 }
197 }
198
199 return candidate;
200 }
201
202 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
203 statements. Store the name being tested in *NAME and the bit
204 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
205 Returns true if the pattern matched, false otherwise. */
206
207 static bool
recognize_single_bit_test(gcond * cond,tree * name,tree * bit,bool inv)208 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
209 {
210 gimple *stmt;
211
212 /* Get at the definition of the result of the bit test. */
213 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
214 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
215 || !integer_zerop (gimple_cond_rhs (cond)))
216 return false;
217 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
218 if (!is_gimple_assign (stmt))
219 return false;
220
221 /* Look at which bit is tested. One form to recognize is
222 D.1985_5 = state_3(D) >> control1_4(D);
223 D.1986_6 = (int) D.1985_5;
224 D.1987_7 = op0 & 1;
225 if (D.1987_7 != 0) */
226 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
227 && integer_onep (gimple_assign_rhs2 (stmt))
228 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
229 {
230 tree orig_name = gimple_assign_rhs1 (stmt);
231
232 /* Look through copies and conversions to eventually
233 find the stmt that computes the shift. */
234 stmt = SSA_NAME_DEF_STMT (orig_name);
235
236 while (is_gimple_assign (stmt)
237 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
238 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
239 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
240 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
241 || gimple_assign_ssa_name_copy_p (stmt)))
242 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
243
244 /* If we found such, decompose it. */
245 if (is_gimple_assign (stmt)
246 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
247 {
248 /* op0 & (1 << op1) */
249 *bit = gimple_assign_rhs2 (stmt);
250 *name = gimple_assign_rhs1 (stmt);
251 }
252 else
253 {
254 /* t & 1 */
255 *bit = integer_zero_node;
256 *name = get_name_for_bit_test (orig_name);
257 }
258
259 return true;
260 }
261
262 /* Another form is
263 D.1987_7 = op0 & (1 << CST)
264 if (D.1987_7 != 0) */
265 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
266 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
267 && integer_pow2p (gimple_assign_rhs2 (stmt)))
268 {
269 *name = gimple_assign_rhs1 (stmt);
270 *bit = build_int_cst (integer_type_node,
271 tree_log2 (gimple_assign_rhs2 (stmt)));
272 return true;
273 }
274
275 /* Another form is
276 D.1986_6 = 1 << control1_4(D)
277 D.1987_7 = op0 & D.1986_6
278 if (D.1987_7 != 0) */
279 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
280 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
281 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
282 {
283 gimple *tmp;
284
285 /* Both arguments of the BIT_AND_EXPR can be the single-bit
286 specifying expression. */
287 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
288 if (is_gimple_assign (tmp)
289 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
290 && integer_onep (gimple_assign_rhs1 (tmp)))
291 {
292 *name = gimple_assign_rhs2 (stmt);
293 *bit = gimple_assign_rhs2 (tmp);
294 return true;
295 }
296
297 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
298 if (is_gimple_assign (tmp)
299 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
300 && integer_onep (gimple_assign_rhs1 (tmp)))
301 {
302 *name = gimple_assign_rhs1 (stmt);
303 *bit = gimple_assign_rhs2 (tmp);
304 return true;
305 }
306 }
307
308 return false;
309 }
310
311 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
312 statements. Store the name being tested in *NAME and the bits
313 in *BITS. The COND_EXPR computes *NAME & *BITS.
314 Returns true if the pattern matched, false otherwise. */
315
316 static bool
recognize_bits_test(gcond * cond,tree * name,tree * bits,bool inv)317 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
318 {
319 gimple *stmt;
320
321 /* Get at the definition of the result of the bit test. */
322 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
323 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
324 || !integer_zerop (gimple_cond_rhs (cond)))
325 return false;
326 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
327 if (!is_gimple_assign (stmt)
328 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
329 return false;
330
331 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
332 *bits = gimple_assign_rhs2 (stmt);
333
334 return true;
335 }
336
337
338 /* Update profile after code in outer_cond_bb was adjusted so
339 outer_cond_bb has no condition. */
340
341 static void
update_profile_after_ifcombine(basic_block inner_cond_bb,basic_block outer_cond_bb)342 update_profile_after_ifcombine (basic_block inner_cond_bb,
343 basic_block outer_cond_bb)
344 {
345 edge outer_to_inner = find_edge (outer_cond_bb, inner_cond_bb);
346 edge outer2 = (EDGE_SUCC (outer_cond_bb, 0) == outer_to_inner
347 ? EDGE_SUCC (outer_cond_bb, 1)
348 : EDGE_SUCC (outer_cond_bb, 0));
349 edge inner_taken = EDGE_SUCC (inner_cond_bb, 0);
350 edge inner_not_taken = EDGE_SUCC (inner_cond_bb, 1);
351
352 if (inner_taken->dest != outer2->dest)
353 std::swap (inner_taken, inner_not_taken);
354 gcc_assert (inner_taken->dest == outer2->dest);
355
356 /* In the following we assume that inner_cond_bb has single predecessor. */
357 gcc_assert (single_pred_p (inner_cond_bb));
358
359 /* Path outer_cond_bb->(outer2) needs to be merged into path
360 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
361 and probability of inner_not_taken updated. */
362
363 inner_cond_bb->count = outer_cond_bb->count;
364
365 /* Handle special case where inner_taken probability is always. In this case
366 we know that the overall outcome will be always as well, but combining
367 probabilities will be conservative because it does not know that
368 outer2->probability is inverse of outer_to_inner->probability. */
369 if (inner_taken->probability == profile_probability::always ())
370 ;
371 else
372 inner_taken->probability = outer2->probability + outer_to_inner->probability
373 * inner_taken->probability;
374 inner_not_taken->probability = profile_probability::always ()
375 - inner_taken->probability;
376
377 outer_to_inner->probability = profile_probability::always ();
378 outer2->probability = profile_probability::never ();
379 }
380
381 /* If-convert on a and pattern with a common else block. The inner
382 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
383 inner_inv, outer_inv and result_inv indicate whether the conditions
384 are inverted.
385 Returns true if the edges to the common else basic-block were merged. */
386
387 static bool
ifcombine_ifandif(basic_block inner_cond_bb,bool inner_inv,basic_block outer_cond_bb,bool outer_inv,bool result_inv)388 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
389 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
390 {
391 gimple_stmt_iterator gsi;
392 gimple *inner_stmt, *outer_stmt;
393 gcond *inner_cond, *outer_cond;
394 tree name1, name2, bit1, bit2, bits1, bits2;
395
396 inner_stmt = last_stmt (inner_cond_bb);
397 if (!inner_stmt
398 || gimple_code (inner_stmt) != GIMPLE_COND)
399 return false;
400 inner_cond = as_a <gcond *> (inner_stmt);
401
402 outer_stmt = last_stmt (outer_cond_bb);
403 if (!outer_stmt
404 || gimple_code (outer_stmt) != GIMPLE_COND)
405 return false;
406 outer_cond = as_a <gcond *> (outer_stmt);
407
408 /* See if we test a single bit of the same name in both tests. In
409 that case remove the outer test, merging both else edges,
410 and change the inner one to test for
411 name & (bit1 | bit2) == (bit1 | bit2). */
412 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
413 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
414 && name1 == name2)
415 {
416 tree t, t2;
417
418 if (TREE_CODE (name1) == SSA_NAME
419 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name1))
420 return false;
421
422 /* Do it. */
423 gsi = gsi_for_stmt (inner_cond);
424 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
425 build_int_cst (TREE_TYPE (name1), 1), bit1);
426 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
427 build_int_cst (TREE_TYPE (name1), 1), bit2);
428 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
429 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
430 true, GSI_SAME_STMT);
431 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
432 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
433 true, GSI_SAME_STMT);
434 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
435 boolean_type_node, t2, t);
436 t = canonicalize_cond_expr_cond (t);
437 if (!t)
438 return false;
439 gimple_cond_set_condition_from_tree (inner_cond, t);
440 update_stmt (inner_cond);
441
442 /* Leave CFG optimization to cfg_cleanup. */
443 gimple_cond_set_condition_from_tree (outer_cond,
444 outer_inv ? boolean_false_node : boolean_true_node);
445 update_stmt (outer_cond);
446
447 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
448
449 if (dump_file)
450 {
451 fprintf (dump_file, "optimizing double bit test to ");
452 print_generic_expr (dump_file, name1);
453 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
454 print_generic_expr (dump_file, bit1);
455 fprintf (dump_file, ") | (1 << ");
456 print_generic_expr (dump_file, bit2);
457 fprintf (dump_file, ")\n");
458 }
459
460 return true;
461 }
462
463 /* See if we have two bit tests of the same name in both tests.
464 In that case remove the outer test and change the inner one to
465 test for name & (bits1 | bits2) != 0. */
466 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
467 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
468 {
469 gimple_stmt_iterator gsi;
470 tree t;
471
472 if ((TREE_CODE (name1) == SSA_NAME
473 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name1))
474 || (TREE_CODE (name2) == SSA_NAME
475 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name2)))
476 return false;
477
478 /* Find the common name which is bit-tested. */
479 if (name1 == name2)
480 ;
481 else if (bits1 == bits2)
482 {
483 std::swap (name2, bits2);
484 std::swap (name1, bits1);
485 }
486 else if (name1 == bits2)
487 std::swap (name2, bits2);
488 else if (bits1 == name2)
489 std::swap (name1, bits1);
490 else
491 return false;
492
493 /* As we strip non-widening conversions in finding a common
494 name that is tested make sure to end up with an integral
495 type for building the bit operations. */
496 if (TYPE_PRECISION (TREE_TYPE (bits1))
497 >= TYPE_PRECISION (TREE_TYPE (bits2)))
498 {
499 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
500 name1 = fold_convert (TREE_TYPE (bits1), name1);
501 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
502 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
503 }
504 else
505 {
506 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
507 name1 = fold_convert (TREE_TYPE (bits2), name1);
508 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
509 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
510 }
511
512 /* Do it. */
513 gsi = gsi_for_stmt (inner_cond);
514 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
515 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
516 true, GSI_SAME_STMT);
517 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
518 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
519 true, GSI_SAME_STMT);
520 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
521 build_int_cst (TREE_TYPE (t), 0));
522 t = canonicalize_cond_expr_cond (t);
523 if (!t)
524 return false;
525 gimple_cond_set_condition_from_tree (inner_cond, t);
526 update_stmt (inner_cond);
527
528 /* Leave CFG optimization to cfg_cleanup. */
529 gimple_cond_set_condition_from_tree (outer_cond,
530 outer_inv ? boolean_false_node : boolean_true_node);
531 update_stmt (outer_cond);
532 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
533
534 if (dump_file)
535 {
536 fprintf (dump_file, "optimizing bits or bits test to ");
537 print_generic_expr (dump_file, name1);
538 fprintf (dump_file, " & T != 0\nwith temporary T = ");
539 print_generic_expr (dump_file, bits1);
540 fprintf (dump_file, " | ");
541 print_generic_expr (dump_file, bits2);
542 fprintf (dump_file, "\n");
543 }
544
545 return true;
546 }
547
548 /* See if we have two comparisons that we can merge into one. */
549 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
550 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
551 {
552 tree t;
553 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
554 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
555
556 /* Invert comparisons if necessary (and possible). */
557 if (inner_inv)
558 inner_cond_code = invert_tree_comparison (inner_cond_code,
559 HONOR_NANS (gimple_cond_lhs (inner_cond)));
560 if (inner_cond_code == ERROR_MARK)
561 return false;
562 if (outer_inv)
563 outer_cond_code = invert_tree_comparison (outer_cond_code,
564 HONOR_NANS (gimple_cond_lhs (outer_cond)));
565 if (outer_cond_code == ERROR_MARK)
566 return false;
567 /* Don't return false so fast, try maybe_fold_or_comparisons? */
568
569 if (!(t = maybe_fold_and_comparisons (boolean_type_node, inner_cond_code,
570 gimple_cond_lhs (inner_cond),
571 gimple_cond_rhs (inner_cond),
572 outer_cond_code,
573 gimple_cond_lhs (outer_cond),
574 gimple_cond_rhs (outer_cond),
575 gimple_bb (outer_cond))))
576 {
577 tree t1, t2;
578 gimple_stmt_iterator gsi;
579 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
580 if (param_logical_op_non_short_circuit != -1)
581 logical_op_non_short_circuit
582 = param_logical_op_non_short_circuit;
583 if (!logical_op_non_short_circuit || sanitize_coverage_p ())
584 return false;
585 /* Only do this optimization if the inner bb contains only the conditional. */
586 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
587 return false;
588 t1 = fold_build2_loc (gimple_location (inner_cond),
589 inner_cond_code,
590 boolean_type_node,
591 gimple_cond_lhs (inner_cond),
592 gimple_cond_rhs (inner_cond));
593 t2 = fold_build2_loc (gimple_location (outer_cond),
594 outer_cond_code,
595 boolean_type_node,
596 gimple_cond_lhs (outer_cond),
597 gimple_cond_rhs (outer_cond));
598 t = fold_build2_loc (gimple_location (inner_cond),
599 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
600 if (result_inv)
601 {
602 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
603 result_inv = false;
604 }
605 gsi = gsi_for_stmt (inner_cond);
606 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
607 GSI_SAME_STMT);
608 }
609 if (result_inv)
610 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
611 t = canonicalize_cond_expr_cond (t);
612 if (!t)
613 return false;
614 if (!is_gimple_condexpr_for_cond (t))
615 {
616 gsi = gsi_for_stmt (inner_cond);
617 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr_for_cond,
618 NULL, true, GSI_SAME_STMT);
619 }
620 gimple_cond_set_condition_from_tree (inner_cond, t);
621 update_stmt (inner_cond);
622
623 /* Leave CFG optimization to cfg_cleanup. */
624 gimple_cond_set_condition_from_tree (outer_cond,
625 outer_inv ? boolean_false_node : boolean_true_node);
626 update_stmt (outer_cond);
627 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
628
629 if (dump_file)
630 {
631 fprintf (dump_file, "optimizing two comparisons to ");
632 print_generic_expr (dump_file, t);
633 fprintf (dump_file, "\n");
634 }
635
636 return true;
637 }
638
639 return false;
640 }
641
642 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
643 dispatch to the appropriate if-conversion helper for a particular
644 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
645 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
646
647 static bool
tree_ssa_ifcombine_bb_1(basic_block inner_cond_bb,basic_block outer_cond_bb,basic_block then_bb,basic_block else_bb,basic_block phi_pred_bb)648 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
649 basic_block then_bb, basic_block else_bb,
650 basic_block phi_pred_bb)
651 {
652 /* The && form is characterized by a common else_bb with
653 the two edges leading to it mergable. The latter is
654 guaranteed by matching PHI arguments in the else_bb and
655 the inner cond_bb having no side-effects. */
656 if (phi_pred_bb != else_bb
657 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
658 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
659 {
660 /* We have
661 <outer_cond_bb>
662 if (q) goto inner_cond_bb; else goto else_bb;
663 <inner_cond_bb>
664 if (p) goto ...; else goto else_bb;
665 ...
666 <else_bb>
667 ...
668 */
669 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
670 false);
671 }
672
673 /* And a version where the outer condition is negated. */
674 if (phi_pred_bb != else_bb
675 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
676 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
677 {
678 /* We have
679 <outer_cond_bb>
680 if (q) goto else_bb; else goto inner_cond_bb;
681 <inner_cond_bb>
682 if (p) goto ...; else goto else_bb;
683 ...
684 <else_bb>
685 ...
686 */
687 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
688 false);
689 }
690
691 /* The || form is characterized by a common then_bb with the
692 two edges leading to it mergable. The latter is guaranteed
693 by matching PHI arguments in the then_bb and the inner cond_bb
694 having no side-effects. */
695 if (phi_pred_bb != then_bb
696 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
697 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
698 {
699 /* We have
700 <outer_cond_bb>
701 if (q) goto then_bb; else goto inner_cond_bb;
702 <inner_cond_bb>
703 if (q) goto then_bb; else goto ...;
704 <then_bb>
705 ...
706 */
707 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
708 true);
709 }
710
711 /* And a version where the outer condition is negated. */
712 if (phi_pred_bb != then_bb
713 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
714 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
715 {
716 /* We have
717 <outer_cond_bb>
718 if (q) goto inner_cond_bb; else goto then_bb;
719 <inner_cond_bb>
720 if (q) goto then_bb; else goto ...;
721 <then_bb>
722 ...
723 */
724 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
725 true);
726 }
727
728 return false;
729 }
730
731 /* Recognize a CFG pattern and dispatch to the appropriate
732 if-conversion helper. We start with BB as the innermost
733 worker basic-block. Returns true if a transformation was done. */
734
735 static bool
tree_ssa_ifcombine_bb(basic_block inner_cond_bb)736 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
737 {
738 basic_block then_bb = NULL, else_bb = NULL;
739
740 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
741 return false;
742
743 /* Recognize && and || of two conditions with a common
744 then/else block which entry edges we can merge. That is:
745 if (a || b)
746 ;
747 and
748 if (a && b)
749 ;
750 This requires a single predecessor of the inner cond_bb. */
751 if (single_pred_p (inner_cond_bb)
752 && bb_no_side_effects_p (inner_cond_bb))
753 {
754 basic_block outer_cond_bb = single_pred (inner_cond_bb);
755
756 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
757 then_bb, else_bb, inner_cond_bb))
758 return true;
759
760 if (forwarder_block_to (else_bb, then_bb))
761 {
762 /* Other possibilities for the && form, if else_bb is
763 empty forwarder block to then_bb. Compared to the above simpler
764 forms this can be treated as if then_bb and else_bb were swapped,
765 and the corresponding inner_cond_bb not inverted because of that.
766 For same_phi_args_p we look at equality of arguments between
767 edge from outer_cond_bb and the forwarder block. */
768 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
769 then_bb, else_bb))
770 return true;
771 }
772 else if (forwarder_block_to (then_bb, else_bb))
773 {
774 /* Other possibilities for the || form, if then_bb is
775 empty forwarder block to else_bb. Compared to the above simpler
776 forms this can be treated as if then_bb and else_bb were swapped,
777 and the corresponding inner_cond_bb not inverted because of that.
778 For same_phi_args_p we look at equality of arguments between
779 edge from outer_cond_bb and the forwarder block. */
780 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
781 then_bb, then_bb))
782 return true;
783 }
784 }
785
786 return false;
787 }
788
789 /* Main entry for the tree if-conversion pass. */
790
791 namespace {
792
793 const pass_data pass_data_tree_ifcombine =
794 {
795 GIMPLE_PASS, /* type */
796 "ifcombine", /* name */
797 OPTGROUP_NONE, /* optinfo_flags */
798 TV_TREE_IFCOMBINE, /* tv_id */
799 ( PROP_cfg | PROP_ssa ), /* properties_required */
800 0, /* properties_provided */
801 0, /* properties_destroyed */
802 0, /* todo_flags_start */
803 TODO_update_ssa, /* todo_flags_finish */
804 };
805
806 class pass_tree_ifcombine : public gimple_opt_pass
807 {
808 public:
pass_tree_ifcombine(gcc::context * ctxt)809 pass_tree_ifcombine (gcc::context *ctxt)
810 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
811 {}
812
813 /* opt_pass methods: */
814 virtual unsigned int execute (function *);
815
816 }; // class pass_tree_ifcombine
817
818 unsigned int
execute(function * fun)819 pass_tree_ifcombine::execute (function *fun)
820 {
821 basic_block *bbs;
822 bool cfg_changed = false;
823 int i;
824
825 bbs = single_pred_before_succ_order ();
826 calculate_dominance_info (CDI_DOMINATORS);
827
828 /* Search every basic block for COND_EXPR we may be able to optimize.
829
830 We walk the blocks in order that guarantees that a block with
831 a single predecessor is processed after the predecessor.
832 This ensures that we collapse outter ifs before visiting the
833 inner ones, and also that we do not try to visit a removed
834 block. This is opposite of PHI-OPT, because we cascade the
835 combining rather than cascading PHIs. */
836 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
837 {
838 basic_block bb = bbs[i];
839 gimple *stmt = last_stmt (bb);
840
841 if (stmt
842 && gimple_code (stmt) == GIMPLE_COND)
843 if (tree_ssa_ifcombine_bb (bb))
844 {
845 /* Clear range info from all stmts in BB which is now executed
846 conditional on a always true/false condition. */
847 reset_flow_sensitive_info_in_bb (bb);
848 cfg_changed |= true;
849 }
850 }
851
852 free (bbs);
853
854 return cfg_changed ? TODO_cleanup_cfg : 0;
855 }
856
857 } // anon namespace
858
859 gimple_opt_pass *
make_pass_tree_ifcombine(gcc::context * ctxt)860 make_pass_tree_ifcombine (gcc::context *ctxt)
861 {
862 return new pass_tree_ifcombine (ctxt);
863 }
864