xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/dojump.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "optabs.h"
31 #include "emit-rtl.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
35 #include "dojump.h"
36 #include "explow.h"
37 #include "expr.h"
38 #include "langhooks.h"
39 
40 static bool prefer_and_bit_test (scalar_int_mode, int);
41 static void do_jump (tree, rtx_code_label *, rtx_code_label *,
42 		     profile_probability);
43 static void do_jump_by_parts_greater (scalar_int_mode, tree, tree, int,
44 				      rtx_code_label *, rtx_code_label *,
45 				      profile_probability);
46 static void do_jump_by_parts_equality (scalar_int_mode, tree, tree,
47 				       rtx_code_label *, rtx_code_label *,
48 				       profile_probability);
49 static void do_compare_and_jump	(tree, tree, enum rtx_code, enum rtx_code,
50 				 rtx_code_label *, rtx_code_label *,
51 				 profile_probability);
52 
53 /* At the start of a function, record that we have no previously-pushed
54    arguments waiting to be popped.  */
55 
56 void
init_pending_stack_adjust(void)57 init_pending_stack_adjust (void)
58 {
59   pending_stack_adjust = 0;
60 }
61 
62 /* Discard any pending stack adjustment.  This avoid relying on the
63    RTL optimizers to remove useless adjustments when we know the
64    stack pointer value is dead.  */
65 void
discard_pending_stack_adjust(void)66 discard_pending_stack_adjust (void)
67 {
68   stack_pointer_delta -= pending_stack_adjust;
69   pending_stack_adjust = 0;
70 }
71 
72 /* When exiting from function, if safe, clear out any pending stack adjust
73    so the adjustment won't get done.
74 
75    Note, if the current function calls alloca, then it must have a
76    frame pointer regardless of the value of flag_omit_frame_pointer.  */
77 
78 void
clear_pending_stack_adjust(void)79 clear_pending_stack_adjust (void)
80 {
81   if (optimize > 0
82       && (! flag_omit_frame_pointer || cfun->calls_alloca)
83       && EXIT_IGNORE_STACK)
84     discard_pending_stack_adjust ();
85 }
86 
87 /* Pop any previously-pushed arguments that have not been popped yet.  */
88 
89 void
do_pending_stack_adjust(void)90 do_pending_stack_adjust (void)
91 {
92   if (inhibit_defer_pop == 0)
93     {
94       if (maybe_ne (pending_stack_adjust, 0))
95 	adjust_stack (gen_int_mode (pending_stack_adjust, Pmode));
96       pending_stack_adjust = 0;
97     }
98 }
99 
100 /* Remember pending_stack_adjust/stack_pointer_delta.
101    To be used around code that may call do_pending_stack_adjust (),
102    but the generated code could be discarded e.g. using delete_insns_since.  */
103 
104 void
save_pending_stack_adjust(saved_pending_stack_adjust * save)105 save_pending_stack_adjust (saved_pending_stack_adjust *save)
106 {
107   save->x_pending_stack_adjust = pending_stack_adjust;
108   save->x_stack_pointer_delta = stack_pointer_delta;
109 }
110 
111 /* Restore the saved pending_stack_adjust/stack_pointer_delta.  */
112 
113 void
restore_pending_stack_adjust(saved_pending_stack_adjust * save)114 restore_pending_stack_adjust (saved_pending_stack_adjust *save)
115 {
116   if (inhibit_defer_pop == 0)
117     {
118       pending_stack_adjust = save->x_pending_stack_adjust;
119       stack_pointer_delta = save->x_stack_pointer_delta;
120     }
121 }
122 
123 /* Used internally by prefer_and_bit_test.  */
124 
125 static GTY(()) rtx and_reg;
126 static GTY(()) rtx and_test;
127 static GTY(()) rtx shift_test;
128 
129 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
130    where X is an arbitrary register of mode MODE.  Return true if the former
131    is preferred.  */
132 
133 static bool
prefer_and_bit_test(scalar_int_mode mode,int bitnum)134 prefer_and_bit_test (scalar_int_mode mode, int bitnum)
135 {
136   bool speed_p;
137   wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
138 
139   if (and_test == 0)
140     {
141       /* Set up rtxes for the two variations.  Use NULL as a placeholder
142 	 for the BITNUM-based constants.  */
143       and_reg = gen_rtx_REG (mode, LAST_VIRTUAL_REGISTER + 1);
144       and_test = gen_rtx_AND (mode, and_reg, NULL);
145       shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
146 				const1_rtx);
147     }
148   else
149     {
150       /* Change the mode of the previously-created rtxes.  */
151       PUT_MODE (and_reg, mode);
152       PUT_MODE (and_test, mode);
153       PUT_MODE (shift_test, mode);
154       PUT_MODE (XEXP (shift_test, 0), mode);
155     }
156 
157   /* Fill in the integers.  */
158   XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
159   XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
160 
161   speed_p = optimize_insn_for_speed_p ();
162   return (rtx_cost (and_test, mode, IF_THEN_ELSE, 0, speed_p)
163 	  <= rtx_cost (shift_test, mode, IF_THEN_ELSE, 0, speed_p));
164 }
165 
166 /* Subroutine of do_jump, dealing with exploded comparisons of the type
167    OP0 CODE OP1 .  IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
168    PROB is probability of jump to if_true_label.  */
169 
170 static void
do_jump_1(enum tree_code code,tree op0,tree op1,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)171 do_jump_1 (enum tree_code code, tree op0, tree op1,
172 	   rtx_code_label *if_false_label, rtx_code_label *if_true_label,
173 	   profile_probability prob)
174 {
175   machine_mode mode;
176   rtx_code_label *drop_through_label = 0;
177   scalar_int_mode int_mode;
178 
179   switch (code)
180     {
181     case EQ_EXPR:
182       {
183         tree inner_type = TREE_TYPE (op0);
184 
185         gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
186 		    != MODE_COMPLEX_FLOAT);
187 	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
188 		    != MODE_COMPLEX_INT);
189 
190         if (integer_zerop (op1))
191 	  do_jump (op0, if_true_label, if_false_label,
192 		   prob.invert ());
193 	else if (is_int_mode (TYPE_MODE (inner_type), &int_mode)
194 		 && !can_compare_p (EQ, int_mode, ccp_jump))
195 	  do_jump_by_parts_equality (int_mode, op0, op1, if_false_label,
196 				     if_true_label, prob);
197         else
198 	  do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
199 			       prob);
200         break;
201       }
202 
203     case NE_EXPR:
204       {
205         tree inner_type = TREE_TYPE (op0);
206 
207         gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
208 		    != MODE_COMPLEX_FLOAT);
209 	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
210 		    != MODE_COMPLEX_INT);
211 
212         if (integer_zerop (op1))
213 	  do_jump (op0, if_false_label, if_true_label, prob);
214 	else if (is_int_mode (TYPE_MODE (inner_type), &int_mode)
215 		 && !can_compare_p (NE, int_mode, ccp_jump))
216 	  do_jump_by_parts_equality (int_mode, op0, op1, if_true_label,
217 				     if_false_label, prob.invert ());
218         else
219 	  do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
220 			       prob);
221         break;
222       }
223 
224     case LT_EXPR:
225       mode = TYPE_MODE (TREE_TYPE (op0));
226       if (is_int_mode (mode, &int_mode)
227 	  && ! can_compare_p (LT, int_mode, ccp_jump))
228 	do_jump_by_parts_greater (int_mode, op0, op1, 1, if_false_label,
229 				  if_true_label, prob);
230       else
231 	do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
232 			     prob);
233       break;
234 
235     case LE_EXPR:
236       mode = TYPE_MODE (TREE_TYPE (op0));
237       if (is_int_mode (mode, &int_mode)
238 	  && ! can_compare_p (LE, int_mode, ccp_jump))
239 	do_jump_by_parts_greater (int_mode, op0, op1, 0, if_true_label,
240 				  if_false_label, prob.invert ());
241       else
242 	do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
243 			     prob);
244       break;
245 
246     case GT_EXPR:
247       mode = TYPE_MODE (TREE_TYPE (op0));
248       if (is_int_mode (mode, &int_mode)
249 	  && ! can_compare_p (GT, int_mode, ccp_jump))
250 	do_jump_by_parts_greater (int_mode, op0, op1, 0, if_false_label,
251 				  if_true_label, prob);
252       else
253 	do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
254 			     prob);
255       break;
256 
257     case GE_EXPR:
258       mode = TYPE_MODE (TREE_TYPE (op0));
259       if (is_int_mode (mode, &int_mode)
260 	  && ! can_compare_p (GE, int_mode, ccp_jump))
261 	do_jump_by_parts_greater (int_mode, op0, op1, 1, if_true_label,
262 				  if_false_label, prob.invert ());
263       else
264 	do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
265 			     prob);
266       break;
267 
268     case ORDERED_EXPR:
269       do_compare_and_jump (op0, op1, ORDERED, ORDERED,
270 			   if_false_label, if_true_label, prob);
271       break;
272 
273     case UNORDERED_EXPR:
274       do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
275 			   if_false_label, if_true_label, prob);
276       break;
277 
278     case UNLT_EXPR:
279       do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
280 			   prob);
281       break;
282 
283     case UNLE_EXPR:
284       do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
285 			   prob);
286       break;
287 
288     case UNGT_EXPR:
289       do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
290 			   prob);
291       break;
292 
293     case UNGE_EXPR:
294       do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
295 			   prob);
296       break;
297 
298     case UNEQ_EXPR:
299       do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
300 			   prob);
301       break;
302 
303     case LTGT_EXPR:
304       do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
305 			   prob);
306       break;
307 
308     case TRUTH_ANDIF_EXPR:
309       {
310         /* Spread the probability that the expression is false evenly between
311            the two conditions. So the first condition is false half the total
312            probability of being false. The second condition is false the other
313            half of the total probability of being false, so its jump has a false
314            probability of half the total, relative to the probability we
315            reached it (i.e. the first condition was true).  */
316         profile_probability op0_prob = profile_probability::uninitialized ();
317         profile_probability op1_prob = profile_probability::uninitialized ();
318         if (prob.initialized_p ())
319           {
320 	    op1_prob = prob.invert ();
321 	    op0_prob = op1_prob.split (profile_probability::even ());
322             /* Get the probability that each jump below is true.  */
323 	    op0_prob = op0_prob.invert ();
324 	    op1_prob = op1_prob.invert ();
325           }
326 	if (if_false_label == NULL)
327           {
328             drop_through_label = gen_label_rtx ();
329 	    do_jump (op0, drop_through_label, NULL, op0_prob);
330 	    do_jump (op1, NULL, if_true_label, op1_prob);
331           }
332         else
333           {
334 	    do_jump (op0, if_false_label, NULL, op0_prob);
335             do_jump (op1, if_false_label, if_true_label, op1_prob);
336           }
337         break;
338       }
339 
340     case TRUTH_ORIF_EXPR:
341       {
342         /* Spread the probability evenly between the two conditions. So
343            the first condition has half the total probability of being true.
344            The second condition has the other half of the total probability,
345            so its jump has a probability of half the total, relative to
346            the probability we reached it (i.e. the first condition was false).  */
347         profile_probability op0_prob = profile_probability::uninitialized ();
348         profile_probability op1_prob = profile_probability::uninitialized ();
349         if (prob.initialized_p ())
350           {
351 	    op1_prob = prob;
352 	    op0_prob = op1_prob.split (profile_probability::even ());
353 	  }
354 	if (if_true_label == NULL)
355 	  {
356 	    drop_through_label = gen_label_rtx ();
357 	    do_jump (op0, NULL, drop_through_label, op0_prob);
358 	    do_jump (op1, if_false_label, NULL, op1_prob);
359 	  }
360 	else
361 	  {
362 	    do_jump (op0, NULL, if_true_label, op0_prob);
363 	    do_jump (op1, if_false_label, if_true_label, op1_prob);
364 	  }
365         break;
366       }
367 
368     default:
369       gcc_unreachable ();
370     }
371 
372   if (drop_through_label)
373     {
374       do_pending_stack_adjust ();
375       emit_label (drop_through_label);
376     }
377 }
378 
379 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
380    the result is zero, or IF_TRUE_LABEL if the result is one.
381    Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
382    meaning fall through in that case.
383 
384    do_jump always does any pending stack adjust except when it does not
385    actually perform a jump.  An example where there is no jump
386    is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
387 
388    PROB is probability of jump to if_true_label.  */
389 
390 static void
do_jump(tree exp,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)391 do_jump (tree exp, rtx_code_label *if_false_label,
392 	 rtx_code_label *if_true_label, profile_probability prob)
393 {
394   enum tree_code code = TREE_CODE (exp);
395   rtx temp;
396   int i;
397   tree type;
398   scalar_int_mode mode;
399   rtx_code_label *drop_through_label = NULL;
400 
401   switch (code)
402     {
403     case ERROR_MARK:
404       break;
405 
406     case INTEGER_CST:
407       {
408 	rtx_code_label *lab = integer_zerop (exp) ? if_false_label
409 						  : if_true_label;
410 	if (lab)
411 	  emit_jump (lab);
412 	break;
413       }
414 
415 #if 0
416       /* This is not true with #pragma weak  */
417     case ADDR_EXPR:
418       /* The address of something can never be zero.  */
419       if (if_true_label)
420         emit_jump (if_true_label);
421       break;
422 #endif
423 
424     case NOP_EXPR:
425       if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
426           || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
427           || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
428           || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
429         goto normal;
430       /* FALLTHRU */
431     case CONVERT_EXPR:
432       /* If we are narrowing the operand, we have to do the compare in the
433          narrower mode.  */
434       if ((TYPE_PRECISION (TREE_TYPE (exp))
435            < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
436         goto normal;
437       /* FALLTHRU */
438     case NON_LVALUE_EXPR:
439     case ABS_EXPR:
440     case ABSU_EXPR:
441     case NEGATE_EXPR:
442     case LROTATE_EXPR:
443     case RROTATE_EXPR:
444       /* These cannot change zero->nonzero or vice versa.  */
445       do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
446       break;
447 
448     case TRUTH_NOT_EXPR:
449       do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
450 	       prob.invert ());
451       break;
452 
453     case COND_EXPR:
454       {
455 	rtx_code_label *label1 = gen_label_rtx ();
456 	if (!if_true_label || !if_false_label)
457 	  {
458 	    drop_through_label = gen_label_rtx ();
459 	    if (!if_true_label)
460 	      if_true_label = drop_through_label;
461 	    if (!if_false_label)
462 	      if_false_label = drop_through_label;
463 	  }
464 
465         do_pending_stack_adjust ();
466 	do_jump (TREE_OPERAND (exp, 0), label1, NULL,
467 		 profile_probability::uninitialized ());
468 	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
469         emit_label (label1);
470 	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
471 	break;
472       }
473 
474     case COMPOUND_EXPR:
475       /* Lowered by gimplify.c.  */
476       gcc_unreachable ();
477 
478     case MINUS_EXPR:
479       /* Nonzero iff operands of minus differ.  */
480       code = NE_EXPR;
481 
482       /* FALLTHRU */
483     case EQ_EXPR:
484     case NE_EXPR:
485     case LT_EXPR:
486     case LE_EXPR:
487     case GT_EXPR:
488     case GE_EXPR:
489     case ORDERED_EXPR:
490     case UNORDERED_EXPR:
491     case UNLT_EXPR:
492     case UNLE_EXPR:
493     case UNGT_EXPR:
494     case UNGE_EXPR:
495     case UNEQ_EXPR:
496     case LTGT_EXPR:
497     case TRUTH_ANDIF_EXPR:
498     case TRUTH_ORIF_EXPR:
499     other_code:
500       do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
501 		 if_false_label, if_true_label, prob);
502       break;
503 
504     case BIT_AND_EXPR:
505       /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
506 	 See if the former is preferred for jump tests and restore it
507 	 if so.  */
508       if (integer_onep (TREE_OPERAND (exp, 1)))
509 	{
510 	  tree exp0 = TREE_OPERAND (exp, 0);
511 	  rtx_code_label *set_label, *clr_label;
512 	  profile_probability setclr_prob = prob;
513 
514 	  /* Strip narrowing integral type conversions.  */
515 	  while (CONVERT_EXPR_P (exp0)
516 		 && TREE_OPERAND (exp0, 0) != error_mark_node
517 		 && TYPE_PRECISION (TREE_TYPE (exp0))
518 		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
519 	    exp0 = TREE_OPERAND (exp0, 0);
520 
521 	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
522 	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
523 	      && integer_onep (TREE_OPERAND (exp0, 1)))
524 	    {
525 	      exp0 = TREE_OPERAND (exp0, 0);
526 	      clr_label = if_true_label;
527 	      set_label = if_false_label;
528 	      setclr_prob = prob.invert ();
529 	    }
530 	  else
531 	    {
532 	      clr_label = if_false_label;
533 	      set_label = if_true_label;
534 	    }
535 
536 	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
537 	    {
538 	      tree arg = TREE_OPERAND (exp0, 0);
539 	      tree shift = TREE_OPERAND (exp0, 1);
540 	      tree argtype = TREE_TYPE (arg);
541 	      if (TREE_CODE (shift) == INTEGER_CST
542 		  && compare_tree_int (shift, 0) >= 0
543 		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
544 		  && prefer_and_bit_test (SCALAR_INT_TYPE_MODE (argtype),
545 					  TREE_INT_CST_LOW (shift)))
546 		{
547 		  unsigned HOST_WIDE_INT mask
548 		    = HOST_WIDE_INT_1U << TREE_INT_CST_LOW (shift);
549 		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
550 				   build_int_cstu (argtype, mask)),
551 			   clr_label, set_label, setclr_prob);
552 		  break;
553 		}
554 	    }
555 	}
556 
557       /* If we are AND'ing with a small constant, do this comparison in the
558          smallest type that fits.  If the machine doesn't have comparisons
559          that small, it will be converted back to the wider comparison.
560          This helps if we are testing the sign bit of a narrower object.
561          combine can't do this for us because it can't know whether a
562          ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
563 
564       if (! SLOW_BYTE_ACCESS
565           && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
566           && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
567           && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
568 	  && int_mode_for_size (i + 1, 0).exists (&mode)
569           && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
570           && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
571           && have_insn_for (COMPARE, TYPE_MODE (type)))
572         {
573 	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
574 		   prob);
575           break;
576         }
577 
578       if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
579 	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
580 	goto normal;
581 
582       /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */
583       /* FALLTHRU */
584 
585     case TRUTH_AND_EXPR:
586       /* High branch cost, expand as the bitwise AND of the conditions.
587 	 Do the same if the RHS has side effects, because we're effectively
588 	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
589       if (BRANCH_COST (optimize_insn_for_speed_p (),
590 		       false) >= 4
591 	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
592 	goto normal;
593       code = TRUTH_ANDIF_EXPR;
594       goto other_code;
595 
596     case BIT_IOR_EXPR:
597     case TRUTH_OR_EXPR:
598       /* High branch cost, expand as the bitwise OR of the conditions.
599 	 Do the same if the RHS has side effects, because we're effectively
600 	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
601       if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
602 	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
603 	goto normal;
604       code = TRUTH_ORIF_EXPR;
605       goto other_code;
606 
607       /* Fall through and generate the normal code.  */
608     default:
609     normal:
610       temp = expand_normal (exp);
611       do_pending_stack_adjust ();
612       /* The RTL optimizers prefer comparisons against pseudos.  */
613       if (GET_CODE (temp) == SUBREG)
614 	{
615 	  /* Compare promoted variables in their promoted mode.  */
616 	  if (SUBREG_PROMOTED_VAR_P (temp)
617 	      && REG_P (XEXP (temp, 0)))
618 	    temp = XEXP (temp, 0);
619 	  else
620 	    temp = copy_to_reg (temp);
621 	}
622       do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
623 			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
624 			       GET_MODE (temp), NULL_RTX,
625 			       if_false_label, if_true_label, prob);
626     }
627 
628   if (drop_through_label)
629     {
630       do_pending_stack_adjust ();
631       emit_label (drop_through_label);
632     }
633 }
634 
635 /* Compare OP0 with OP1, word at a time, in mode MODE.
636    UNSIGNEDP says to do unsigned comparison.
637    Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
638 
639 static void
do_jump_by_parts_greater_rtx(scalar_int_mode mode,int unsignedp,rtx op0,rtx op1,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)640 do_jump_by_parts_greater_rtx (scalar_int_mode mode, int unsignedp, rtx op0,
641 			      rtx op1, rtx_code_label *if_false_label,
642 			      rtx_code_label *if_true_label,
643 			      profile_probability prob)
644 {
645   int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
646   rtx_code_label *drop_through_label = 0;
647   bool drop_through_if_true = false, drop_through_if_false = false;
648   enum rtx_code code = GT;
649   int i;
650 
651   if (! if_true_label || ! if_false_label)
652     drop_through_label = gen_label_rtx ();
653   if (! if_true_label)
654     {
655       if_true_label = drop_through_label;
656       drop_through_if_true = true;
657     }
658   if (! if_false_label)
659     {
660       if_false_label = drop_through_label;
661       drop_through_if_false = true;
662     }
663 
664   /* Deal with the special case 0 > x: only one comparison is necessary and
665      we reverse it to avoid jumping to the drop-through label.  */
666   if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
667     {
668       code = LE;
669       if_true_label = if_false_label;
670       if_false_label = drop_through_label;
671       prob = prob.invert ();
672     }
673 
674   /* Compare a word at a time, high order first.  */
675   for (i = 0; i < nwords; i++)
676     {
677       rtx op0_word, op1_word;
678 
679       if (WORDS_BIG_ENDIAN)
680         {
681           op0_word = operand_subword_force (op0, i, mode);
682           op1_word = operand_subword_force (op1, i, mode);
683         }
684       else
685         {
686           op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
687           op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
688         }
689 
690       /* All but high-order word must be compared as unsigned.  */
691       do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
692 			       word_mode, NULL_RTX, NULL, if_true_label,
693 			       prob);
694 
695       /* Emit only one comparison for 0.  Do not emit the last cond jump.  */
696       if (op0 == const0_rtx || i == nwords - 1)
697 	break;
698 
699       /* Consider lower words only if these are equal.  */
700       do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
701 			       NULL_RTX, NULL, if_false_label,
702 			       prob.invert ());
703     }
704 
705   if (!drop_through_if_false)
706     emit_jump (if_false_label);
707   if (drop_through_label)
708     emit_label (drop_through_label);
709 }
710 
711 /* Given a comparison expression EXP for values too wide to be compared
712    with one insn, test the comparison and jump to the appropriate label.
713    The code of EXP is ignored; we always test GT if SWAP is 0,
714    and LT if SWAP is 1.  MODE is the mode of the two operands.  */
715 
716 static void
do_jump_by_parts_greater(scalar_int_mode mode,tree treeop0,tree treeop1,int swap,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)717 do_jump_by_parts_greater (scalar_int_mode mode, tree treeop0, tree treeop1,
718 			  int swap, rtx_code_label *if_false_label,
719 			  rtx_code_label *if_true_label,
720 			  profile_probability prob)
721 {
722   rtx op0 = expand_normal (swap ? treeop1 : treeop0);
723   rtx op1 = expand_normal (swap ? treeop0 : treeop1);
724   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
725 
726   do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
727 				if_true_label, prob);
728 }
729 
730 /* Jump according to whether OP0 is 0.  We assume that OP0 has an integer
731    mode, MODE, that is too wide for the available compare insns.  Either
732    Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
733    to indicate drop through.  */
734 
735 static void
do_jump_by_parts_zero_rtx(scalar_int_mode mode,rtx op0,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)736 do_jump_by_parts_zero_rtx (scalar_int_mode mode, rtx op0,
737 			   rtx_code_label *if_false_label,
738 			   rtx_code_label *if_true_label,
739 			   profile_probability prob)
740 {
741   int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
742   rtx part;
743   int i;
744   rtx_code_label *drop_through_label = NULL;
745 
746   /* The fastest way of doing this comparison on almost any machine is to
747      "or" all the words and compare the result.  If all have to be loaded
748      from memory and this is a very wide item, it's possible this may
749      be slower, but that's highly unlikely.  */
750 
751   part = gen_reg_rtx (word_mode);
752   emit_move_insn (part, operand_subword_force (op0, 0, mode));
753   for (i = 1; i < nwords && part != 0; i++)
754     part = expand_binop (word_mode, ior_optab, part,
755                          operand_subword_force (op0, i, mode),
756                          part, 1, OPTAB_WIDEN);
757 
758   if (part != 0)
759     {
760       do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
761 			       NULL_RTX, if_false_label, if_true_label, prob);
762       return;
763     }
764 
765   /* If we couldn't do the "or" simply, do this with a series of compares.  */
766   if (! if_false_label)
767     if_false_label = drop_through_label = gen_label_rtx ();
768 
769   for (i = 0; i < nwords; i++)
770     do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
771                              const0_rtx, EQ, 1, word_mode, NULL_RTX,
772 			     if_false_label, NULL, prob);
773 
774   if (if_true_label)
775     emit_jump (if_true_label);
776 
777   if (drop_through_label)
778     emit_label (drop_through_label);
779 }
780 
781 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
782    where MODE is an integer mode too wide to be compared with one insn.
783    Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
784    to indicate drop through.  */
785 
786 static void
do_jump_by_parts_equality_rtx(scalar_int_mode mode,rtx op0,rtx op1,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)787 do_jump_by_parts_equality_rtx (scalar_int_mode mode, rtx op0, rtx op1,
788 			       rtx_code_label *if_false_label,
789 			       rtx_code_label *if_true_label,
790 			       profile_probability prob)
791 {
792   int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
793   rtx_code_label *drop_through_label = NULL;
794   int i;
795 
796   if (op1 == const0_rtx)
797     {
798       do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
799 				 prob);
800       return;
801     }
802   else if (op0 == const0_rtx)
803     {
804       do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
805 				 prob);
806       return;
807     }
808 
809   if (! if_false_label)
810     drop_through_label = if_false_label = gen_label_rtx ();
811 
812   for (i = 0; i < nwords; i++)
813     do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
814                              operand_subword_force (op1, i, mode),
815                              EQ, 0, word_mode, NULL_RTX,
816 			     if_false_label, NULL, prob);
817 
818   if (if_true_label)
819     emit_jump (if_true_label);
820   if (drop_through_label)
821     emit_label (drop_through_label);
822 }
823 
824 /* Given an EQ_EXPR expression EXP for values too wide to be compared
825    with one insn, test the comparison and jump to the appropriate label.
826    MODE is the mode of the two operands.  */
827 
828 static void
do_jump_by_parts_equality(scalar_int_mode mode,tree treeop0,tree treeop1,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)829 do_jump_by_parts_equality (scalar_int_mode mode, tree treeop0, tree treeop1,
830 			   rtx_code_label *if_false_label,
831 			   rtx_code_label *if_true_label,
832 			   profile_probability prob)
833 {
834   rtx op0 = expand_normal (treeop0);
835   rtx op1 = expand_normal (treeop1);
836   do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
837 				 if_true_label, prob);
838 }
839 
840 /* Split a comparison into two others, the second of which has the other
841    "orderedness".  The first is always ORDERED or UNORDERED if MODE
842    does not honor NaNs (which means that it can be skipped in that case;
843    see do_compare_rtx_and_jump).
844 
845    The two conditions are written in *CODE1 and *CODE2.  Return true if
846    the conditions must be ANDed, false if they must be ORed.  */
847 
848 bool
split_comparison(enum rtx_code code,machine_mode mode,enum rtx_code * code1,enum rtx_code * code2)849 split_comparison (enum rtx_code code, machine_mode mode,
850 		  enum rtx_code *code1, enum rtx_code *code2)
851 {
852   switch (code)
853     {
854     case LT:
855       *code1 = ORDERED;
856       *code2 = UNLT;
857       return true;
858     case LE:
859       *code1 = ORDERED;
860       *code2 = UNLE;
861       return true;
862     case GT:
863       *code1 = ORDERED;
864       *code2 = UNGT;
865       return true;
866     case GE:
867       *code1 = ORDERED;
868       *code2 = UNGE;
869       return true;
870     case EQ:
871       *code1 = ORDERED;
872       *code2 = UNEQ;
873       return true;
874     case NE:
875       *code1 = UNORDERED;
876       *code2 = LTGT;
877       return false;
878     case UNLT:
879       *code1 = UNORDERED;
880       *code2 = LT;
881       return false;
882     case UNLE:
883       *code1 = UNORDERED;
884       *code2 = LE;
885       return false;
886     case UNGT:
887       *code1 = UNORDERED;
888       *code2 = GT;
889       return false;
890     case UNGE:
891       *code1 = UNORDERED;
892       *code2 = GE;
893       return false;
894     case UNEQ:
895       *code1 = UNORDERED;
896       *code2 = EQ;
897       return false;
898     case LTGT:
899       /* Do not turn a trapping comparison into a non-trapping one.  */
900       if (HONOR_NANS (mode))
901 	{
902           *code1 = LT;
903           *code2 = GT;
904           return false;
905 	}
906       else
907 	{
908 	  *code1 = ORDERED;
909 	  *code2 = NE;
910 	  return true;
911 	}
912     default:
913       gcc_unreachable ();
914     }
915 }
916 
917 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.
918    PROB is probability of jump to LABEL.  */
919 
920 void
jumpif(tree exp,rtx_code_label * label,profile_probability prob)921 jumpif (tree exp, rtx_code_label *label, profile_probability prob)
922 {
923   do_jump (exp, NULL, label, prob);
924 }
925 
926 /* Similar to jumpif but dealing with exploded comparisons of the type
927    OP0 CODE OP1 .  LABEL and PROB are like in jumpif.  */
928 
929 void
jumpif_1(enum tree_code code,tree op0,tree op1,rtx_code_label * label,profile_probability prob)930 jumpif_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
931 	  profile_probability prob)
932 {
933   do_jump_1 (code, op0, op1, NULL, label, prob);
934 }
935 
936 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
937    PROB is probability of jump to LABEL.  */
938 
939 void
jumpifnot(tree exp,rtx_code_label * label,profile_probability prob)940 jumpifnot (tree exp, rtx_code_label *label, profile_probability prob)
941 {
942   do_jump (exp, label, NULL, prob.invert ());
943 }
944 
945 /* Similar to jumpifnot but dealing with exploded comparisons of the type
946    OP0 CODE OP1 .  LABEL and PROB are like in jumpifnot.  */
947 
948 void
jumpifnot_1(enum tree_code code,tree op0,tree op1,rtx_code_label * label,profile_probability prob)949 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
950 	     profile_probability prob)
951 {
952   do_jump_1 (code, op0, op1, label, NULL, prob.invert ());
953 }
954 
955 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
956    The decision as to signed or unsigned comparison must be made by the caller.
957 
958    If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
959    compared.  */
960 
961 void
do_compare_rtx_and_jump(rtx op0,rtx op1,enum rtx_code code,int unsignedp,machine_mode mode,rtx size,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)962 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
963 			 machine_mode mode, rtx size,
964 			 rtx_code_label *if_false_label,
965 			 rtx_code_label *if_true_label,
966 			 profile_probability prob)
967 {
968   rtx tem;
969   rtx_code_label *dummy_label = NULL;
970 
971   /* Reverse the comparison if that is safe and we want to jump if it is
972      false.  Also convert to the reverse comparison if the target can
973      implement it.  */
974   if ((! if_true_label
975        || ! can_compare_p (code, mode, ccp_jump))
976       && (! FLOAT_MODE_P (mode)
977 	  || code == ORDERED || code == UNORDERED
978 	  || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
979 	  || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
980     {
981       enum rtx_code rcode;
982       if (FLOAT_MODE_P (mode))
983         rcode = reverse_condition_maybe_unordered (code);
984       else
985         rcode = reverse_condition (code);
986 
987       /* Canonicalize to UNORDERED for the libcall.  */
988       if (can_compare_p (rcode, mode, ccp_jump)
989 	  || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
990 	{
991 	  std::swap (if_true_label, if_false_label);
992 	  code = rcode;
993 	  prob = prob.invert ();
994 	}
995     }
996 
997   /* If one operand is constant, make it the second one.  Only do this
998      if the other operand is not constant as well.  */
999 
1000   if (swap_commutative_operands_p (op0, op1))
1001     {
1002       std::swap (op0, op1);
1003       code = swap_condition (code);
1004     }
1005 
1006   do_pending_stack_adjust ();
1007 
1008   code = unsignedp ? unsigned_condition (code) : code;
1009   if ((tem = simplify_relational_operation (code, mode, VOIDmode,
1010 					    op0, op1)) != 0)
1011     {
1012       if (CONSTANT_P (tem))
1013 	{
1014 	  rtx_code_label *label = (tem == const0_rtx
1015 				   || tem == CONST0_RTX (mode))
1016 					? if_false_label : if_true_label;
1017 	  if (label)
1018 	    emit_jump (label);
1019 	  return;
1020 	}
1021 
1022       code = GET_CODE (tem);
1023       mode = GET_MODE (tem);
1024       op0 = XEXP (tem, 0);
1025       op1 = XEXP (tem, 1);
1026       unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1027     }
1028 
1029   if (! if_true_label)
1030     dummy_label = if_true_label = gen_label_rtx ();
1031 
1032   scalar_int_mode int_mode;
1033   if (is_int_mode (mode, &int_mode)
1034       && ! can_compare_p (code, int_mode, ccp_jump))
1035     {
1036       switch (code)
1037 	{
1038 	case LTU:
1039 	  do_jump_by_parts_greater_rtx (int_mode, 1, op1, op0,
1040 					if_false_label, if_true_label, prob);
1041 	  break;
1042 
1043 	case LEU:
1044 	  do_jump_by_parts_greater_rtx (int_mode, 1, op0, op1,
1045 					if_true_label, if_false_label,
1046 					prob.invert ());
1047 	  break;
1048 
1049 	case GTU:
1050 	  do_jump_by_parts_greater_rtx (int_mode, 1, op0, op1,
1051 					if_false_label, if_true_label, prob);
1052 	  break;
1053 
1054 	case GEU:
1055 	  do_jump_by_parts_greater_rtx (int_mode, 1, op1, op0,
1056 					if_true_label, if_false_label,
1057 					prob.invert ());
1058 	  break;
1059 
1060 	case LT:
1061 	  do_jump_by_parts_greater_rtx (int_mode, 0, op1, op0,
1062 					if_false_label, if_true_label, prob);
1063 	  break;
1064 
1065 	case LE:
1066 	  do_jump_by_parts_greater_rtx (int_mode, 0, op0, op1,
1067 					if_true_label, if_false_label,
1068 					prob.invert ());
1069 	  break;
1070 
1071 	case GT:
1072 	  do_jump_by_parts_greater_rtx (int_mode, 0, op0, op1,
1073 					if_false_label, if_true_label, prob);
1074 	  break;
1075 
1076 	case GE:
1077 	  do_jump_by_parts_greater_rtx (int_mode, 0, op1, op0,
1078 					if_true_label, if_false_label,
1079 					prob.invert ());
1080 	  break;
1081 
1082 	case EQ:
1083 	  do_jump_by_parts_equality_rtx (int_mode, op0, op1, if_false_label,
1084 					 if_true_label, prob);
1085 	  break;
1086 
1087 	case NE:
1088 	  do_jump_by_parts_equality_rtx (int_mode, op0, op1, if_true_label,
1089 					 if_false_label,
1090 					 prob.invert ());
1091 	  break;
1092 
1093 	default:
1094 	  gcc_unreachable ();
1095 	}
1096     }
1097   else
1098     {
1099       if (SCALAR_FLOAT_MODE_P (mode)
1100 	  && ! can_compare_p (code, mode, ccp_jump)
1101 	  && can_compare_p (swap_condition (code), mode, ccp_jump))
1102 	{
1103 	  code = swap_condition (code);
1104 	  std::swap (op0, op1);
1105 	}
1106       else if (SCALAR_FLOAT_MODE_P (mode)
1107 	       && ! can_compare_p (code, mode, ccp_jump)
1108 	       /* Never split ORDERED and UNORDERED.
1109 		  These must be implemented.  */
1110 	       && (code != ORDERED && code != UNORDERED)
1111                /* Split a floating-point comparison if
1112 		  we can jump on other conditions...  */
1113 	       && (have_insn_for (COMPARE, mode)
1114 	           /* ... or if there is no libcall for it.  */
1115 	           || code_to_optab (code) == unknown_optab))
1116         {
1117 	  enum rtx_code first_code;
1118 	  bool and_them = split_comparison (code, mode, &first_code, &code);
1119 
1120 	  /* If there are no NaNs, the first comparison should always fall
1121 	     through.  */
1122 	  if (!HONOR_NANS (mode))
1123 	    gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1124 
1125 	  else
1126 	    {
1127 	      profile_probability cprob
1128 		= profile_probability::guessed_always ();
1129 	      if (first_code == UNORDERED)
1130 		cprob = cprob.apply_scale (1, 100);
1131 	      else if (first_code == ORDERED)
1132 		cprob = cprob.apply_scale (99, 100);
1133 	      else
1134 		cprob = profile_probability::even ();
1135 	      /* We want to split:
1136 		 if (x) goto t; // prob;
1137 		 into
1138 		 if (a) goto t; // first_prob;
1139 		 if (b) goto t; // prob;
1140 		 such that the overall probability of jumping to t
1141 		 remains the same and first_prob is prob * cprob.  */
1142 	      if (and_them)
1143 		{
1144 		  rtx_code_label *dest_label;
1145 		  prob = prob.invert ();
1146 		  profile_probability first_prob = prob.split (cprob).invert ();
1147 		  prob = prob.invert ();
1148 		  /* If we only jump if true, just bypass the second jump.  */
1149 		  if (! if_false_label)
1150 		    {
1151 		      if (! dummy_label)
1152 		        dummy_label = gen_label_rtx ();
1153 		      dest_label = dummy_label;
1154 		    }
1155 		  else
1156 		    dest_label = if_false_label;
1157                   do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1158 					   size, dest_label, NULL, first_prob);
1159 		}
1160               else
1161 		{
1162 		  profile_probability first_prob = prob.split (cprob);
1163 		  do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1164 					   size, NULL, if_true_label, first_prob);
1165 		}
1166 	    }
1167 	}
1168 
1169       emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1170 			       if_true_label, prob);
1171     }
1172 
1173   if (if_false_label)
1174     emit_jump (if_false_label);
1175   if (dummy_label)
1176     emit_label (dummy_label);
1177 }
1178 
1179 /* Generate code for a comparison expression EXP (including code to compute
1180    the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1181    IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
1182    generated code will drop through.
1183    SIGNED_CODE should be the rtx operation for this comparison for
1184    signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1185 
1186    We force a stack adjustment unless there are currently
1187    things pushed on the stack that aren't yet used.  */
1188 
1189 static void
do_compare_and_jump(tree treeop0,tree treeop1,enum rtx_code signed_code,enum rtx_code unsigned_code,rtx_code_label * if_false_label,rtx_code_label * if_true_label,profile_probability prob)1190 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1191 		     enum rtx_code unsigned_code,
1192 		     rtx_code_label *if_false_label,
1193 		     rtx_code_label *if_true_label, profile_probability prob)
1194 {
1195   rtx op0, op1;
1196   tree type;
1197   machine_mode mode;
1198   int unsignedp;
1199   enum rtx_code code;
1200 
1201   /* Don't crash if the comparison was erroneous.  */
1202   op0 = expand_normal (treeop0);
1203   if (TREE_CODE (treeop0) == ERROR_MARK)
1204     return;
1205 
1206   op1 = expand_normal (treeop1);
1207   if (TREE_CODE (treeop1) == ERROR_MARK)
1208     return;
1209 
1210   type = TREE_TYPE (treeop0);
1211   if (TREE_CODE (treeop0) == INTEGER_CST
1212       && (TREE_CODE (treeop1) != INTEGER_CST
1213 	  || (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type))
1214 	      > GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (treeop1))))))
1215     /* op0 might have been replaced by promoted constant, in which
1216        case the type of second argument should be used.  */
1217     type = TREE_TYPE (treeop1);
1218   mode = TYPE_MODE (type);
1219   unsignedp = TYPE_UNSIGNED (type);
1220   code = unsignedp ? unsigned_code : signed_code;
1221 
1222   /* If function pointers need to be "canonicalized" before they can
1223      be reliably compared, then canonicalize them.  Canonicalize the
1224      expression when one of the operands is a function pointer.  This
1225      handles the case where the other operand is a void pointer.  See
1226      PR middle-end/17564.  */
1227   if (targetm.have_canonicalize_funcptr_for_compare ()
1228       && ((POINTER_TYPE_P (TREE_TYPE (treeop0))
1229 	   && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0))))
1230 	  || (POINTER_TYPE_P (TREE_TYPE (treeop1))
1231 	      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop1))))))
1232     {
1233       rtx new_op0 = gen_reg_rtx (mode);
1234       rtx new_op1 = gen_reg_rtx (mode);
1235 
1236       emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op0, op0));
1237       op0 = new_op0;
1238 
1239       emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op1, op1));
1240       op1 = new_op1;
1241     }
1242 
1243   do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1244                            ((mode == BLKmode)
1245                             ? expr_size (treeop0) : NULL_RTX),
1246 			   if_false_label, if_true_label, prob);
1247 }
1248 
1249 #include "gt-dojump.h"
1250