1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 2002,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "real.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55 #include "hashtab.h"
56 #include "langhooks.h"
57
58 static void encode PARAMS ((HOST_WIDE_INT *,
59 unsigned HOST_WIDE_INT,
60 HOST_WIDE_INT));
61 static void decode PARAMS ((HOST_WIDE_INT *,
62 unsigned HOST_WIDE_INT *,
63 HOST_WIDE_INT *));
64 static tree negate_expr PARAMS ((tree));
65 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
66 tree *, int));
67 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
68 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
70 static hashval_t size_htab_hash PARAMS ((const void *));
71 static int size_htab_eq PARAMS ((const void *, const void *));
72 static tree fold_convert PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int comparison_to_compcode PARAMS ((enum tree_code));
76 static enum tree_code compcode_to_comparison PARAMS ((int));
77 static int truth_value_p PARAMS ((enum tree_code));
78 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
79 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
80 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
81 static tree omit_one_operand PARAMS ((tree, tree, tree));
82 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
83 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
84 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
85 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
86 tree, tree));
87 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
88 HOST_WIDE_INT *,
89 enum machine_mode *, int *,
90 int *, tree *, tree *));
91 static int all_ones_mask_p PARAMS ((tree, int));
92 static tree sign_bit_p PARAMS ((tree, tree));
93 static int simple_operand_p PARAMS ((tree));
94 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
95 tree, int));
96 static tree make_range PARAMS ((tree, int *, tree *, tree *));
97 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
98 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
99 int, tree, tree));
100 static tree fold_range_test PARAMS ((tree));
101 static tree unextend PARAMS ((tree, int, int, tree));
102 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
103 static tree optimize_minmax_comparison PARAMS ((tree));
104 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
105 static tree extract_muldiv_1 PARAMS ((tree, tree, enum tree_code, tree));
106 static tree strip_compound_expr PARAMS ((tree, tree));
107 static int multiple_of_p PARAMS ((tree, tree, tree));
108 static tree constant_boolean_node PARAMS ((int, tree));
109 static int count_cond PARAMS ((tree, int));
110 static tree fold_binary_op_with_conditional_arg
111 PARAMS ((enum tree_code, tree, tree, tree, int));
112 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
113
114 /* The following constants represent a bit based encoding of GCC's
115 comparison operators. This encoding simplifies transformations
116 on relational comparison operators, such as AND and OR. */
117 #define COMPCODE_FALSE 0
118 #define COMPCODE_LT 1
119 #define COMPCODE_EQ 2
120 #define COMPCODE_LE 3
121 #define COMPCODE_GT 4
122 #define COMPCODE_NE 5
123 #define COMPCODE_GE 6
124 #define COMPCODE_TRUE 7
125
126 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
127 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
128 and SUM1. Then this yields nonzero if overflow occurred during the
129 addition.
130
131 Overflow occurs if A and B have the same sign, but A and SUM differ in
132 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
133 sign. */
134 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
135
136 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
137 We do that by representing the two-word integer in 4 words, with only
138 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
139 number. The value of the word is LOWPART + HIGHPART * BASE. */
140
141 #define LOWPART(x) \
142 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
143 #define HIGHPART(x) \
144 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
145 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
146
147 /* Unpack a two-word integer into 4 words.
148 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
149 WORDS points to the array of HOST_WIDE_INTs. */
150
151 static void
encode(words,low,hi)152 encode (words, low, hi)
153 HOST_WIDE_INT *words;
154 unsigned HOST_WIDE_INT low;
155 HOST_WIDE_INT hi;
156 {
157 words[0] = LOWPART (low);
158 words[1] = HIGHPART (low);
159 words[2] = LOWPART (hi);
160 words[3] = HIGHPART (hi);
161 }
162
163 /* Pack an array of 4 words into a two-word integer.
164 WORDS points to the array of words.
165 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
166
167 static void
decode(words,low,hi)168 decode (words, low, hi)
169 HOST_WIDE_INT *words;
170 unsigned HOST_WIDE_INT *low;
171 HOST_WIDE_INT *hi;
172 {
173 *low = words[0] + words[1] * BASE;
174 *hi = words[2] + words[3] * BASE;
175 }
176
177 /* Make the integer constant T valid for its type by setting to 0 or 1 all
178 the bits in the constant that don't belong in the type.
179
180 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
181 nonzero, a signed overflow has already occurred in calculating T, so
182 propagate it. */
183
184 int
force_fit_type(t,overflow)185 force_fit_type (t, overflow)
186 tree t;
187 int overflow;
188 {
189 unsigned HOST_WIDE_INT low;
190 HOST_WIDE_INT high;
191 unsigned int prec;
192
193 if (TREE_CODE (t) == REAL_CST)
194 {
195 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
196 Consider doing it via real_convert now. */
197 return overflow;
198 }
199
200 else if (TREE_CODE (t) != INTEGER_CST)
201 return overflow;
202
203 low = TREE_INT_CST_LOW (t);
204 high = TREE_INT_CST_HIGH (t);
205
206 if (POINTER_TYPE_P (TREE_TYPE (t)))
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (TREE_TYPE (t));
210
211 /* First clear all bits that are beyond the type's precision. */
212
213 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
214 ;
215 else if (prec > HOST_BITS_PER_WIDE_INT)
216 TREE_INT_CST_HIGH (t)
217 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
218 else
219 {
220 TREE_INT_CST_HIGH (t) = 0;
221 if (prec < HOST_BITS_PER_WIDE_INT)
222 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
223 }
224
225 /* Unsigned types do not suffer sign extension or overflow unless they
226 are a sizetype. */
227 if (TREE_UNSIGNED (TREE_TYPE (t))
228 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
230 return overflow;
231
232 /* If the value's sign bit is set, extend the sign. */
233 if (prec != 2 * HOST_BITS_PER_WIDE_INT
234 && (prec > HOST_BITS_PER_WIDE_INT
235 ? 0 != (TREE_INT_CST_HIGH (t)
236 & ((HOST_WIDE_INT) 1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 : 0 != (TREE_INT_CST_LOW (t)
239 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
240 {
241 /* Value is negative:
242 set to 1 all the bits that are outside this type's precision. */
243 if (prec > HOST_BITS_PER_WIDE_INT)
244 TREE_INT_CST_HIGH (t)
245 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
246 else
247 {
248 TREE_INT_CST_HIGH (t) = -1;
249 if (prec < HOST_BITS_PER_WIDE_INT)
250 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
251 }
252 }
253
254 /* Return nonzero if signed overflow occurred. */
255 return
256 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
257 != 0);
258 }
259
260 /* Add two doubleword integers with doubleword result.
261 Each argument is given as two `HOST_WIDE_INT' pieces.
262 One argument is L1 and H1; the other, L2 and H2.
263 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
264
265 int
add_double(l1,h1,l2,h2,lv,hv)266 add_double (l1, h1, l2, h2, lv, hv)
267 unsigned HOST_WIDE_INT l1, l2;
268 HOST_WIDE_INT h1, h2;
269 unsigned HOST_WIDE_INT *lv;
270 HOST_WIDE_INT *hv;
271 {
272 unsigned HOST_WIDE_INT l;
273 HOST_WIDE_INT h;
274
275 l = l1 + l2;
276 h = h1 + h2 + (l < l1);
277
278 *lv = l;
279 *hv = h;
280 return OVERFLOW_SUM_SIGN (h1, h2, h);
281 }
282
283 /* Negate a doubleword integer with doubleword result.
284 Return nonzero if the operation overflows, assuming it's signed.
285 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
286 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
287
288 int
neg_double(l1,h1,lv,hv)289 neg_double (l1, h1, lv, hv)
290 unsigned HOST_WIDE_INT l1;
291 HOST_WIDE_INT h1;
292 unsigned HOST_WIDE_INT *lv;
293 HOST_WIDE_INT *hv;
294 {
295 if (l1 == 0)
296 {
297 *lv = 0;
298 *hv = - h1;
299 return (*hv & h1) < 0;
300 }
301 else
302 {
303 *lv = -l1;
304 *hv = ~h1;
305 return 0;
306 }
307 }
308
309 /* Multiply two doubleword integers with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 Each argument is given as two `HOST_WIDE_INT' pieces.
312 One argument is L1 and H1; the other, L2 and H2.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314
315 int
mul_double(l1,h1,l2,h2,lv,hv)316 mul_double (l1, h1, l2, h2, lv, hv)
317 unsigned HOST_WIDE_INT l1, l2;
318 HOST_WIDE_INT h1, h2;
319 unsigned HOST_WIDE_INT *lv;
320 HOST_WIDE_INT *hv;
321 {
322 HOST_WIDE_INT arg1[4];
323 HOST_WIDE_INT arg2[4];
324 HOST_WIDE_INT prod[4 * 2];
325 unsigned HOST_WIDE_INT carry;
326 int i, j, k;
327 unsigned HOST_WIDE_INT toplow, neglow;
328 HOST_WIDE_INT tophigh, neghigh;
329
330 encode (arg1, l1, h1);
331 encode (arg2, l2, h2);
332
333 memset ((char *) prod, 0, sizeof prod);
334
335 for (i = 0; i < 4; i++)
336 {
337 carry = 0;
338 for (j = 0; j < 4; j++)
339 {
340 k = i + j;
341 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
342 carry += arg1[i] * arg2[j];
343 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
344 carry += prod[k];
345 prod[k] = LOWPART (carry);
346 carry = HIGHPART (carry);
347 }
348 prod[i + 4] = carry;
349 }
350
351 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
352
353 /* Check for overflow by calculating the top half of the answer in full;
354 it should agree with the low half's sign bit. */
355 decode (prod + 4, &toplow, &tophigh);
356 if (h1 < 0)
357 {
358 neg_double (l2, h2, &neglow, &neghigh);
359 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
360 }
361 if (h2 < 0)
362 {
363 neg_double (l1, h1, &neglow, &neghigh);
364 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365 }
366 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
367 }
368
369 /* Shift the doubleword integer in L1, H1 left by COUNT places
370 keeping only PREC bits of result.
371 Shift right if COUNT is negative.
372 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
373 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374
375 void
lshift_double(l1,h1,count,prec,lv,hv,arith)376 lshift_double (l1, h1, count, prec, lv, hv, arith)
377 unsigned HOST_WIDE_INT l1;
378 HOST_WIDE_INT h1, count;
379 unsigned int prec;
380 unsigned HOST_WIDE_INT *lv;
381 HOST_WIDE_INT *hv;
382 int arith;
383 {
384 unsigned HOST_WIDE_INT signmask;
385
386 if (count < 0)
387 {
388 rshift_double (l1, h1, -count, prec, lv, hv, arith);
389 return;
390 }
391
392 #ifdef SHIFT_COUNT_TRUNCATED
393 if (SHIFT_COUNT_TRUNCATED)
394 count %= prec;
395 #endif
396
397 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
398 {
399 /* Shifting by the host word size is undefined according to the
400 ANSI standard, so we must handle this as a special case. */
401 *hv = 0;
402 *lv = 0;
403 }
404 else if (count >= HOST_BITS_PER_WIDE_INT)
405 {
406 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
407 *lv = 0;
408 }
409 else
410 {
411 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
412 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
413 *lv = l1 << count;
414 }
415
416 /* Sign extend all bits that are beyond the precision. */
417
418 signmask = -((prec > HOST_BITS_PER_WIDE_INT
419 ? ((unsigned HOST_WIDE_INT) *hv
420 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
421 : (*lv >> (prec - 1))) & 1);
422
423 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
424 ;
425 else if (prec >= HOST_BITS_PER_WIDE_INT)
426 {
427 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
428 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
429 }
430 else
431 {
432 *hv = signmask;
433 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
434 *lv |= signmask << prec;
435 }
436 }
437
438 /* Shift the doubleword integer in L1, H1 right by COUNT places
439 keeping only PREC bits of result. COUNT must be positive.
440 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
441 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
442
443 void
rshift_double(l1,h1,count,prec,lv,hv,arith)444 rshift_double (l1, h1, count, prec, lv, hv, arith)
445 unsigned HOST_WIDE_INT l1;
446 HOST_WIDE_INT h1, count;
447 unsigned int prec;
448 unsigned HOST_WIDE_INT *lv;
449 HOST_WIDE_INT *hv;
450 int arith;
451 {
452 unsigned HOST_WIDE_INT signmask;
453
454 signmask = (arith
455 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
456 : 0);
457
458 #ifdef SHIFT_COUNT_TRUNCATED
459 if (SHIFT_COUNT_TRUNCATED)
460 count %= prec;
461 #endif
462
463 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
464 {
465 /* Shifting by the host word size is undefined according to the
466 ANSI standard, so we must handle this as a special case. */
467 *hv = 0;
468 *lv = 0;
469 }
470 else if (count >= HOST_BITS_PER_WIDE_INT)
471 {
472 *hv = 0;
473 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
474 }
475 else
476 {
477 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
478 *lv = ((l1 >> count)
479 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
480 }
481
482 /* Zero / sign extend all bits that are beyond the precision. */
483
484 if (count >= (HOST_WIDE_INT)prec)
485 {
486 *hv = signmask;
487 *lv = signmask;
488 }
489 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
490 ;
491 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
492 {
493 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
494 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
495 }
496 else
497 {
498 *hv = signmask;
499 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
500 *lv |= signmask << (prec - count);
501 }
502 }
503
504 /* Rotate the doubleword integer in L1, H1 left by COUNT places
505 keeping only PREC bits of result.
506 Rotate right if COUNT is negative.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508
509 void
lrotate_double(l1,h1,count,prec,lv,hv)510 lrotate_double (l1, h1, count, prec, lv, hv)
511 unsigned HOST_WIDE_INT l1;
512 HOST_WIDE_INT h1, count;
513 unsigned int prec;
514 unsigned HOST_WIDE_INT *lv;
515 HOST_WIDE_INT *hv;
516 {
517 unsigned HOST_WIDE_INT s1l, s2l;
518 HOST_WIDE_INT s1h, s2h;
519
520 count %= prec;
521 if (count < 0)
522 count += prec;
523
524 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
526 *lv = s1l | s2l;
527 *hv = s1h | s2h;
528 }
529
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result. COUNT must be positive.
532 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
533
534 void
rrotate_double(l1,h1,count,prec,lv,hv)535 rrotate_double (l1, h1, count, prec, lv, hv)
536 unsigned HOST_WIDE_INT l1;
537 HOST_WIDE_INT h1, count;
538 unsigned int prec;
539 unsigned HOST_WIDE_INT *lv;
540 HOST_WIDE_INT *hv;
541 {
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
544
545 count %= prec;
546 if (count < 0)
547 count += prec;
548
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
553 }
554
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
563
564 int
div_and_round_double(code,uns,lnum_orig,hnum_orig,lden_orig,hden_orig,lquo,hquo,lrem,hrem)565 div_and_round_double (code, uns,
566 lnum_orig, hnum_orig, lden_orig, hden_orig,
567 lquo, hquo, lrem, hrem)
568 enum tree_code code;
569 int uns;
570 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig;
572 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig;
574 unsigned HOST_WIDE_INT *lquo, *lrem;
575 HOST_WIDE_INT *hquo, *hrem;
576 {
577 int quo_neg = 0;
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
580 int i, j;
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
587 int overflow = 0;
588
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
591
592 /* calculate quotient sign and convert operands to unsigned. */
593 if (!uns)
594 {
595 if (hnum < 0)
596 {
597 quo_neg = ~ quo_neg;
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
601 overflow = 1;
602 }
603 if (hden < 0)
604 {
605 quo_neg = ~ quo_neg;
606 neg_double (lden, hden, &lden, &hden);
607 }
608 }
609
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
612 *hquo = *hrem = 0;
613 /* This unsigned division rounds toward zero. */
614 *lquo = lnum / lden;
615 goto finish_up;
616 }
617
618 if (hnum == 0)
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
621 *hquo = *lquo = 0;
622 *hrem = hnum;
623 *lrem = lnum;
624 goto finish_up;
625 }
626
627 memset ((char *) quo, 0, sizeof quo);
628
629 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
630 memset ((char *) den, 0, sizeof den);
631
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
634
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
637 {
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
640 {
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
643 carry = work % lden;
644 }
645 }
646 else
647 {
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
652
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
655 if (den[i] != 0)
656 {
657 den_hi_sig = i;
658 break;
659 }
660
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
663
664 scale = BASE / (den[den_hi_sig] + 1);
665 if (scale > 1)
666 { /* scale divisor and dividend */
667 carry = 0;
668 for (i = 0; i <= 4 - 1; i++)
669 {
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
673 }
674
675 num[4] = carry;
676 carry = 0;
677 for (i = 0; i <= 4 - 1; i++)
678 {
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
683 }
684 }
685
686 num_hi_sig = 4;
687
688 /* Main loop */
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
690 {
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
695
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
700 else
701 quo_est = BASE - 1;
702
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
705 if (tmp < BASE
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
708 quo_est--;
709
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
713
714 carry = 0;
715 for (j = 0; j <= den_hi_sig; j++)
716 {
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
722 }
723
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
727 {
728 quo_est--;
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
731 {
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
735 }
736
737 num [num_hi_sig] += carry;
738 }
739
740 /* Store the quotient digit. */
741 quo[i] = quo_est;
742 }
743 }
744
745 decode (quo, lquo, hquo);
746
747 finish_up:
748 /* if result is negative, make it so. */
749 if (quo_neg)
750 neg_double (*lquo, *hquo, lquo, hquo);
751
752 /* compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
756
757 switch (code)
758 {
759 case TRUNC_DIV_EXPR:
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 return overflow;
763
764 case FLOOR_DIV_EXPR:
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
767 {
768 /* quo = quo - 1; */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
770 lquo, hquo);
771 }
772 else
773 return overflow;
774 break;
775
776 case CEIL_DIV_EXPR:
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
779 {
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781 lquo, hquo);
782 }
783 else
784 return overflow;
785 break;
786
787 case ROUND_DIV_EXPR:
788 case ROUND_MOD_EXPR: /* round to closest integer */
789 {
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
794
795 /* Get absolute values */
796 if (*hrem < 0)
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798 if (hden < 0)
799 neg_double (lden, hden, &labs_den, &habs_den);
800
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, <wice, &htwice);
804
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
810 {
811 if (*hquo < 0)
812 /* quo = quo - 1; */
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 else
816 /* quo = quo + 1; */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
819 }
820 else
821 return overflow;
822 }
823 break;
824
825 default:
826 abort ();
827 }
828
829 /* compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 return overflow;
834 }
835
836 /* Given T, an expression, return the negation of T. Allow for T to be
837 null, in which case return null. */
838
839 static tree
negate_expr(t)840 negate_expr (t)
841 tree t;
842 {
843 tree type;
844 tree tem;
845
846 if (t == 0)
847 return 0;
848
849 type = TREE_TYPE (t);
850 STRIP_SIGN_NOPS (t);
851
852 switch (TREE_CODE (t))
853 {
854 case INTEGER_CST:
855 case REAL_CST:
856 if (! TREE_UNSIGNED (type)
857 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
858 && ! TREE_OVERFLOW (tem))
859 return tem;
860 break;
861
862 case NEGATE_EXPR:
863 return convert (type, TREE_OPERAND (t, 0));
864
865 case MINUS_EXPR:
866 /* - (A - B) -> B - A */
867 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
868 return convert (type,
869 fold (build (MINUS_EXPR, TREE_TYPE (t),
870 TREE_OPERAND (t, 1),
871 TREE_OPERAND (t, 0))));
872 break;
873
874 default:
875 break;
876 }
877
878 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
879 }
880
881 /* Split a tree IN into a constant, literal and variable parts that could be
882 combined with CODE to make IN. "constant" means an expression with
883 TREE_CONSTANT but that isn't an actual constant. CODE must be a
884 commutative arithmetic operation. Store the constant part into *CONP,
885 the literal in *LITP and return the variable part. If a part isn't
886 present, set it to null. If the tree does not decompose in this way,
887 return the entire tree as the variable part and the other parts as null.
888
889 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
890 case, we negate an operand that was subtracted. Except if it is a
891 literal for which we use *MINUS_LITP instead.
892
893 If NEGATE_P is true, we are negating all of IN, again except a literal
894 for which we use *MINUS_LITP instead.
895
896 If IN is itself a literal or constant, return it as appropriate.
897
898 Note that we do not guarantee that any of the three values will be the
899 same type as IN, but they will have the same signedness and mode. */
900
901 static tree
split_tree(in,code,conp,litp,minus_litp,negate_p)902 split_tree (in, code, conp, litp, minus_litp, negate_p)
903 tree in;
904 enum tree_code code;
905 tree *conp, *litp, *minus_litp;
906 int negate_p;
907 {
908 tree var = 0;
909
910 *conp = 0;
911 *litp = 0;
912 *minus_litp = 0;
913
914 /* Strip any conversions that don't change the machine mode or signedness. */
915 STRIP_SIGN_NOPS (in);
916
917 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
918 *litp = in;
919 else if (TREE_CODE (in) == code
920 || (! FLOAT_TYPE_P (TREE_TYPE (in))
921 /* We can associate addition and subtraction together (even
922 though the C standard doesn't say so) for integers because
923 the value is not affected. For reals, the value might be
924 affected, so we can't. */
925 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
926 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
927 {
928 tree op0 = TREE_OPERAND (in, 0);
929 tree op1 = TREE_OPERAND (in, 1);
930 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
931 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
932
933 /* First see if either of the operands is a literal, then a constant. */
934 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
935 *litp = op0, op0 = 0;
936 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
937 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
938
939 if (op0 != 0 && TREE_CONSTANT (op0))
940 *conp = op0, op0 = 0;
941 else if (op1 != 0 && TREE_CONSTANT (op1))
942 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
943
944 /* If we haven't dealt with either operand, this is not a case we can
945 decompose. Otherwise, VAR is either of the ones remaining, if any. */
946 if (op0 != 0 && op1 != 0)
947 var = in;
948 else if (op0 != 0)
949 var = op0;
950 else
951 var = op1, neg_var_p = neg1_p;
952
953 /* Now do any needed negations. */
954 if (neg_litp_p)
955 *minus_litp = *litp, *litp = 0;
956 if (neg_conp_p)
957 *conp = negate_expr (*conp);
958 if (neg_var_p)
959 var = negate_expr (var);
960 }
961 else if (TREE_CONSTANT (in))
962 *conp = in;
963 else
964 var = in;
965
966 if (negate_p)
967 {
968 if (*litp)
969 *minus_litp = *litp, *litp = 0;
970 else if (*minus_litp)
971 *litp = *minus_litp, *minus_litp = 0;
972 *conp = negate_expr (*conp);
973 var = negate_expr (var);
974 }
975
976 return var;
977 }
978
979 /* Re-associate trees split by the above function. T1 and T2 are either
980 expressions to associate or null. Return the new expression, if any. If
981 we build an operation, do it in TYPE and with CODE. */
982
983 static tree
associate_trees(t1,t2,code,type)984 associate_trees (t1, t2, code, type)
985 tree t1, t2;
986 enum tree_code code;
987 tree type;
988 {
989 if (t1 == 0)
990 return t2;
991 else if (t2 == 0)
992 return t1;
993
994 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
995 try to fold this since we will have infinite recursion. But do
996 deal with any NEGATE_EXPRs. */
997 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
998 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
999 {
1000 if (code == PLUS_EXPR)
1001 {
1002 if (TREE_CODE (t1) == NEGATE_EXPR)
1003 return build (MINUS_EXPR, type, convert (type, t2),
1004 convert (type, TREE_OPERAND (t1, 0)));
1005 else if (TREE_CODE (t2) == NEGATE_EXPR)
1006 return build (MINUS_EXPR, type, convert (type, t1),
1007 convert (type, TREE_OPERAND (t2, 0)));
1008 }
1009 return build (code, type, convert (type, t1), convert (type, t2));
1010 }
1011
1012 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1013 }
1014
1015 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1016 to produce a new constant.
1017
1018 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1019
1020 static tree
int_const_binop(code,arg1,arg2,notrunc)1021 int_const_binop (code, arg1, arg2, notrunc)
1022 enum tree_code code;
1023 tree arg1, arg2;
1024 int notrunc;
1025 {
1026 unsigned HOST_WIDE_INT int1l, int2l;
1027 HOST_WIDE_INT int1h, int2h;
1028 unsigned HOST_WIDE_INT low;
1029 HOST_WIDE_INT hi;
1030 unsigned HOST_WIDE_INT garbagel;
1031 HOST_WIDE_INT garbageh;
1032 tree t;
1033 tree type = TREE_TYPE (arg1);
1034 int uns = TREE_UNSIGNED (type);
1035 int is_sizetype
1036 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1037 int overflow = 0;
1038 int no_overflow = 0;
1039 int sizeof_flag = 0;
1040
1041 if (SIZEOF_PTR_DERIVED (arg1) == 1 || SIZEOF_PTR_DERIVED (arg2) == 1)
1042 sizeof_flag = 1;
1043
1044 int1l = TREE_INT_CST_LOW (arg1);
1045 int1h = TREE_INT_CST_HIGH (arg1);
1046 int2l = TREE_INT_CST_LOW (arg2);
1047 int2h = TREE_INT_CST_HIGH (arg2);
1048
1049 switch (code)
1050 {
1051 case BIT_IOR_EXPR:
1052 low = int1l | int2l, hi = int1h | int2h;
1053 break;
1054
1055 case BIT_XOR_EXPR:
1056 low = int1l ^ int2l, hi = int1h ^ int2h;
1057 break;
1058
1059 case BIT_AND_EXPR:
1060 low = int1l & int2l, hi = int1h & int2h;
1061 break;
1062
1063 case BIT_ANDTC_EXPR:
1064 low = int1l & ~int2l, hi = int1h & ~int2h;
1065 break;
1066
1067 case RSHIFT_EXPR:
1068 int2l = -int2l;
1069 case LSHIFT_EXPR:
1070 /* It's unclear from the C standard whether shifts can overflow.
1071 The following code ignores overflow; perhaps a C standard
1072 interpretation ruling is needed. */
1073 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1074 &low, &hi, !uns);
1075 no_overflow = 1;
1076 break;
1077
1078 case RROTATE_EXPR:
1079 int2l = - int2l;
1080 case LROTATE_EXPR:
1081 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1082 &low, &hi);
1083 break;
1084
1085 case PLUS_EXPR:
1086 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1087 break;
1088
1089 case MINUS_EXPR:
1090 neg_double (int2l, int2h, &low, &hi);
1091 add_double (int1l, int1h, low, hi, &low, &hi);
1092 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1093 break;
1094
1095 case MULT_EXPR:
1096 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1097 break;
1098
1099 case TRUNC_DIV_EXPR:
1100 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1101 case EXACT_DIV_EXPR:
1102 /* This is a shortcut for a common special case. */
1103 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1104 && ! TREE_CONSTANT_OVERFLOW (arg1)
1105 && ! TREE_CONSTANT_OVERFLOW (arg2)
1106 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1107 {
1108 if (code == CEIL_DIV_EXPR)
1109 int1l += int2l - 1;
1110
1111 low = int1l / int2l, hi = 0;
1112 break;
1113 }
1114
1115 /* ... fall through ... */
1116
1117 case ROUND_DIV_EXPR:
1118 if (int2h == 0 && int2l == 1)
1119 {
1120 low = int1l, hi = int1h;
1121 break;
1122 }
1123 if (int1l == int2l && int1h == int2h
1124 && ! (int1l == 0 && int1h == 0))
1125 {
1126 low = 1, hi = 0;
1127 break;
1128 }
1129 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1130 &low, &hi, &garbagel, &garbageh);
1131 break;
1132
1133 case TRUNC_MOD_EXPR:
1134 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1135 /* This is a shortcut for a common special case. */
1136 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1137 && ! TREE_CONSTANT_OVERFLOW (arg1)
1138 && ! TREE_CONSTANT_OVERFLOW (arg2)
1139 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1140 {
1141 if (code == CEIL_MOD_EXPR)
1142 int1l += int2l - 1;
1143 low = int1l % int2l, hi = 0;
1144 break;
1145 }
1146
1147 /* ... fall through ... */
1148
1149 case ROUND_MOD_EXPR:
1150 overflow = div_and_round_double (code, uns,
1151 int1l, int1h, int2l, int2h,
1152 &garbagel, &garbageh, &low, &hi);
1153 break;
1154
1155 case MIN_EXPR:
1156 case MAX_EXPR:
1157 if (uns)
1158 low = (((unsigned HOST_WIDE_INT) int1h
1159 < (unsigned HOST_WIDE_INT) int2h)
1160 || (((unsigned HOST_WIDE_INT) int1h
1161 == (unsigned HOST_WIDE_INT) int2h)
1162 && int1l < int2l));
1163 else
1164 low = (int1h < int2h
1165 || (int1h == int2h && int1l < int2l));
1166
1167 if (low == (code == MIN_EXPR))
1168 low = int1l, hi = int1h;
1169 else
1170 low = int2l, hi = int2h;
1171 break;
1172
1173 default:
1174 abort ();
1175 }
1176
1177 /* If this is for a sizetype, can be represented as one (signed)
1178 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1179 constants. */
1180 if (is_sizetype
1181 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1182 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1183 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1184 return size_int_type_wide (low, type);
1185 else
1186 {
1187 t = build_int_2 (low, hi);
1188 TREE_TYPE (t) = TREE_TYPE (arg1);
1189 }
1190
1191 TREE_OVERFLOW (t)
1192 = ((notrunc
1193 ? (!uns || is_sizetype) && overflow
1194 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1195 && ! no_overflow))
1196 | TREE_OVERFLOW (arg1)
1197 | TREE_OVERFLOW (arg2));
1198
1199 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1200 So check if force_fit_type truncated the value. */
1201 if (is_sizetype
1202 && ! TREE_OVERFLOW (t)
1203 && (TREE_INT_CST_HIGH (t) != hi
1204 || TREE_INT_CST_LOW (t) != low))
1205 TREE_OVERFLOW (t) = 1;
1206
1207 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1208 | TREE_CONSTANT_OVERFLOW (arg1)
1209 | TREE_CONSTANT_OVERFLOW (arg2));
1210
1211 if (sizeof_flag == 1)
1212 SIZEOF_PTR_DERIVED (t) = 1;
1213
1214 return t;
1215 }
1216
1217 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1218 constant. We assume ARG1 and ARG2 have the same data type, or at least
1219 are the same kind of constant and the same machine mode.
1220
1221 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1222
1223 static tree
const_binop(code,arg1,arg2,notrunc)1224 const_binop (code, arg1, arg2, notrunc)
1225 enum tree_code code;
1226 tree arg1, arg2;
1227 int notrunc;
1228 {
1229 STRIP_NOPS (arg1);
1230 STRIP_NOPS (arg2);
1231
1232 if (TREE_CODE (arg1) == INTEGER_CST)
1233 return int_const_binop (code, arg1, arg2, notrunc);
1234
1235 if (TREE_CODE (arg1) == REAL_CST)
1236 {
1237 REAL_VALUE_TYPE d1;
1238 REAL_VALUE_TYPE d2;
1239 REAL_VALUE_TYPE value;
1240 tree t;
1241
1242 d1 = TREE_REAL_CST (arg1);
1243 d2 = TREE_REAL_CST (arg2);
1244
1245 /* If either operand is a NaN, just return it. Otherwise, set up
1246 for floating-point trap; we return an overflow. */
1247 if (REAL_VALUE_ISNAN (d1))
1248 return arg1;
1249 else if (REAL_VALUE_ISNAN (d2))
1250 return arg2;
1251
1252 REAL_ARITHMETIC (value, code, d1, d2);
1253
1254 t = build_real (TREE_TYPE (arg1),
1255 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1256 value));
1257
1258 TREE_OVERFLOW (t)
1259 = (force_fit_type (t, 0)
1260 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1261 TREE_CONSTANT_OVERFLOW (t)
1262 = TREE_OVERFLOW (t)
1263 | TREE_CONSTANT_OVERFLOW (arg1)
1264 | TREE_CONSTANT_OVERFLOW (arg2);
1265 return t;
1266 }
1267 if (TREE_CODE (arg1) == COMPLEX_CST)
1268 {
1269 tree type = TREE_TYPE (arg1);
1270 tree r1 = TREE_REALPART (arg1);
1271 tree i1 = TREE_IMAGPART (arg1);
1272 tree r2 = TREE_REALPART (arg2);
1273 tree i2 = TREE_IMAGPART (arg2);
1274 tree t;
1275
1276 switch (code)
1277 {
1278 case PLUS_EXPR:
1279 t = build_complex (type,
1280 const_binop (PLUS_EXPR, r1, r2, notrunc),
1281 const_binop (PLUS_EXPR, i1, i2, notrunc));
1282 break;
1283
1284 case MINUS_EXPR:
1285 t = build_complex (type,
1286 const_binop (MINUS_EXPR, r1, r2, notrunc),
1287 const_binop (MINUS_EXPR, i1, i2, notrunc));
1288 break;
1289
1290 case MULT_EXPR:
1291 t = build_complex (type,
1292 const_binop (MINUS_EXPR,
1293 const_binop (MULT_EXPR,
1294 r1, r2, notrunc),
1295 const_binop (MULT_EXPR,
1296 i1, i2, notrunc),
1297 notrunc),
1298 const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR,
1300 r1, i2, notrunc),
1301 const_binop (MULT_EXPR,
1302 i1, r2, notrunc),
1303 notrunc));
1304 break;
1305
1306 case RDIV_EXPR:
1307 {
1308 tree magsquared
1309 = const_binop (PLUS_EXPR,
1310 const_binop (MULT_EXPR, r2, r2, notrunc),
1311 const_binop (MULT_EXPR, i2, i2, notrunc),
1312 notrunc);
1313
1314 t = build_complex (type,
1315 const_binop
1316 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1317 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1318 const_binop (PLUS_EXPR,
1319 const_binop (MULT_EXPR, r1, r2,
1320 notrunc),
1321 const_binop (MULT_EXPR, i1, i2,
1322 notrunc),
1323 notrunc),
1324 magsquared, notrunc),
1325 const_binop
1326 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1327 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1328 const_binop (MINUS_EXPR,
1329 const_binop (MULT_EXPR, i1, r2,
1330 notrunc),
1331 const_binop (MULT_EXPR, r1, i2,
1332 notrunc),
1333 notrunc),
1334 magsquared, notrunc));
1335 }
1336 break;
1337
1338 default:
1339 abort ();
1340 }
1341 return t;
1342 }
1343 return 0;
1344 }
1345
1346 /* These are the hash table functions for the hash table of INTEGER_CST
1347 nodes of a sizetype. */
1348
1349 /* Return the hash code code X, an INTEGER_CST. */
1350
1351 static hashval_t
size_htab_hash(x)1352 size_htab_hash (x)
1353 const void *x;
1354 {
1355 tree t = (tree) x;
1356
1357 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1358 ^ htab_hash_pointer (TREE_TYPE (t))
1359 ^ (TREE_OVERFLOW (t) << 20));
1360 }
1361
1362 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1363 is the same as that given by *Y, which is the same. */
1364
1365 static int
size_htab_eq(x,y)1366 size_htab_eq (x, y)
1367 const void *x;
1368 const void *y;
1369 {
1370 tree xt = (tree) x;
1371 tree yt = (tree) y;
1372
1373 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1374 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1375 && TREE_TYPE (xt) == TREE_TYPE (yt)
1376 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1377 }
1378
1379 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1380 bits are given by NUMBER and of the sizetype represented by KIND. */
1381
1382 tree
size_int_wide(number,kind)1383 size_int_wide (number, kind)
1384 HOST_WIDE_INT number;
1385 enum size_type_kind kind;
1386 {
1387 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1388 }
1389
1390 /* Likewise, but the desired type is specified explicitly. */
1391
1392 static GTY (()) tree new_const;
1393 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1394 htab_t size_htab;
1395
1396 tree
size_int_type_wide(number,type)1397 size_int_type_wide (number, type)
1398 HOST_WIDE_INT number;
1399 tree type;
1400 {
1401 PTR *slot;
1402
1403 if (size_htab == 0)
1404 {
1405 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1406 new_const = make_node (INTEGER_CST);
1407 }
1408
1409 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1410 hash table, we return the value from the hash table. Otherwise, we
1411 place that in the hash table and make a new node for the next time. */
1412 TREE_INT_CST_LOW (new_const) = number;
1413 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1414 TREE_TYPE (new_const) = type;
1415 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1416 = force_fit_type (new_const, 0);
1417
1418 slot = htab_find_slot (size_htab, new_const, INSERT);
1419 if (*slot == 0)
1420 {
1421 tree t = new_const;
1422
1423 *slot = (PTR) new_const;
1424 new_const = make_node (INTEGER_CST);
1425 return t;
1426 }
1427 else
1428 return (tree) *slot;
1429 }
1430
1431 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1432 is a tree code. The type of the result is taken from the operands.
1433 Both must be the same type integer type and it must be a size type.
1434 If the operands are constant, so is the result. */
1435
1436 tree
size_binop(code,arg0,arg1)1437 size_binop (code, arg0, arg1)
1438 enum tree_code code;
1439 tree arg0, arg1;
1440 {
1441 tree type = TREE_TYPE (arg0);
1442
1443 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1444 || type != TREE_TYPE (arg1))
1445 abort ();
1446
1447 /* Handle the special case of two integer constants faster. */
1448 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1449 {
1450 /* And some specific cases even faster than that. */
1451 if (code == PLUS_EXPR && integer_zerop (arg0))
1452 return arg1;
1453 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1454 && integer_zerop (arg1))
1455 return arg0;
1456 else if (code == MULT_EXPR && integer_onep (arg0))
1457 return arg1;
1458
1459 /* Handle general case of two integer constants. */
1460 return int_const_binop (code, arg0, arg1, 0);
1461 }
1462
1463 if (arg0 == error_mark_node || arg1 == error_mark_node)
1464 return error_mark_node;
1465
1466 return fold (build (code, type, arg0, arg1));
1467 }
1468
1469 /* Given two values, either both of sizetype or both of bitsizetype,
1470 compute the difference between the two values. Return the value
1471 in signed type corresponding to the type of the operands. */
1472
1473 tree
size_diffop(arg0,arg1)1474 size_diffop (arg0, arg1)
1475 tree arg0, arg1;
1476 {
1477 tree type = TREE_TYPE (arg0);
1478 tree ctype;
1479
1480 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1481 || type != TREE_TYPE (arg1))
1482 abort ();
1483
1484 /* If the type is already signed, just do the simple thing. */
1485 if (! TREE_UNSIGNED (type))
1486 return size_binop (MINUS_EXPR, arg0, arg1);
1487
1488 ctype = (type == bitsizetype || type == ubitsizetype
1489 ? sbitsizetype : ssizetype);
1490
1491 /* If either operand is not a constant, do the conversions to the signed
1492 type and subtract. The hardware will do the right thing with any
1493 overflow in the subtraction. */
1494 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1495 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1496 convert (ctype, arg1));
1497
1498 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1499 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1500 overflow) and negate (which can't either). Special-case a result
1501 of zero while we're here. */
1502 if (tree_int_cst_equal (arg0, arg1))
1503 return convert (ctype, integer_zero_node);
1504 else if (tree_int_cst_lt (arg1, arg0))
1505 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1506 else
1507 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1508 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1509 }
1510
1511
1512 /* Given T, a tree representing type conversion of ARG1, a constant,
1513 return a constant tree representing the result of conversion. */
1514
1515 static tree
fold_convert(t,arg1)1516 fold_convert (t, arg1)
1517 tree t;
1518 tree arg1;
1519 {
1520 tree type = TREE_TYPE (t);
1521 int overflow = 0;
1522
1523 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1524 {
1525 if (TREE_CODE (arg1) == INTEGER_CST)
1526 {
1527 /* If we would build a constant wider than GCC supports,
1528 leave the conversion unfolded. */
1529 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1530 return t;
1531
1532 /* If we are trying to make a sizetype for a small integer, use
1533 size_int to pick up cached types to reduce duplicate nodes. */
1534 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1535 && !TREE_CONSTANT_OVERFLOW (arg1)
1536 && compare_tree_int (arg1, 10000) < 0)
1537 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1538
1539 /* Given an integer constant, make new constant with new type,
1540 appropriately sign-extended or truncated. */
1541 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1542 TREE_INT_CST_HIGH (arg1));
1543 TREE_TYPE (t) = type;
1544 /* Indicate an overflow if (1) ARG1 already overflowed,
1545 or (2) force_fit_type indicates an overflow.
1546 Tell force_fit_type that an overflow has already occurred
1547 if ARG1 is a too-large unsigned value and T is signed.
1548 But don't indicate an overflow if converting a pointer. */
1549 TREE_OVERFLOW (t)
1550 = ((force_fit_type (t,
1551 (TREE_INT_CST_HIGH (arg1) < 0
1552 && (TREE_UNSIGNED (type)
1553 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1554 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1555 || TREE_OVERFLOW (arg1));
1556 TREE_CONSTANT_OVERFLOW (t)
1557 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1558 }
1559 else if (TREE_CODE (arg1) == REAL_CST)
1560 {
1561 /* Don't initialize these, use assignments.
1562 Initialized local aggregates don't work on old compilers. */
1563 REAL_VALUE_TYPE x;
1564 REAL_VALUE_TYPE l;
1565 REAL_VALUE_TYPE u;
1566 tree type1 = TREE_TYPE (arg1);
1567 int no_upper_bound;
1568
1569 x = TREE_REAL_CST (arg1);
1570 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1571
1572 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1573 if (!no_upper_bound)
1574 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1575
1576 /* See if X will be in range after truncation towards 0.
1577 To compensate for truncation, move the bounds away from 0,
1578 but reject if X exactly equals the adjusted bounds. */
1579 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1580 if (!no_upper_bound)
1581 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1582 /* If X is a NaN, use zero instead and show we have an overflow.
1583 Otherwise, range check. */
1584 if (REAL_VALUE_ISNAN (x))
1585 overflow = 1, x = dconst0;
1586 else if (! (REAL_VALUES_LESS (l, x)
1587 && !no_upper_bound
1588 && REAL_VALUES_LESS (x, u)))
1589 overflow = 1;
1590
1591 {
1592 HOST_WIDE_INT low, high;
1593 REAL_VALUE_TO_INT (&low, &high, x);
1594 t = build_int_2 (low, high);
1595 }
1596 TREE_TYPE (t) = type;
1597 TREE_OVERFLOW (t)
1598 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1599 TREE_CONSTANT_OVERFLOW (t)
1600 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1601 }
1602 TREE_TYPE (t) = type;
1603 }
1604 else if (TREE_CODE (type) == REAL_TYPE)
1605 {
1606 if (TREE_CODE (arg1) == INTEGER_CST)
1607 return build_real_from_int_cst (type, arg1);
1608 if (TREE_CODE (arg1) == REAL_CST)
1609 {
1610 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1611 {
1612 /* We make a copy of ARG1 so that we don't modify an
1613 existing constant tree. */
1614 t = copy_node (arg1);
1615 TREE_TYPE (t) = type;
1616 return t;
1617 }
1618
1619 t = build_real (type,
1620 real_value_truncate (TYPE_MODE (type),
1621 TREE_REAL_CST (arg1)));
1622
1623 TREE_OVERFLOW (t)
1624 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1625 TREE_CONSTANT_OVERFLOW (t)
1626 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1627 return t;
1628 }
1629 }
1630 TREE_CONSTANT (t) = 1;
1631 return t;
1632 }
1633
1634 /* Return an expr equal to X but certainly not valid as an lvalue. */
1635
1636 tree
non_lvalue(x)1637 non_lvalue (x)
1638 tree x;
1639 {
1640 tree result;
1641
1642 /* These things are certainly not lvalues. */
1643 if (TREE_CODE (x) == NON_LVALUE_EXPR
1644 || TREE_CODE (x) == INTEGER_CST
1645 || TREE_CODE (x) == REAL_CST
1646 || TREE_CODE (x) == STRING_CST
1647 || TREE_CODE (x) == ADDR_EXPR)
1648 return x;
1649
1650 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1651 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1652 return result;
1653 }
1654
1655 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1656 Zero means allow extended lvalues. */
1657
1658 int pedantic_lvalues;
1659
1660 /* When pedantic, return an expr equal to X but certainly not valid as a
1661 pedantic lvalue. Otherwise, return X. */
1662
1663 tree
pedantic_non_lvalue(x)1664 pedantic_non_lvalue (x)
1665 tree x;
1666 {
1667 if (pedantic_lvalues)
1668 return non_lvalue (x);
1669 else
1670 return x;
1671 }
1672
1673 /* Given a tree comparison code, return the code that is the logical inverse
1674 of the given code. It is not safe to do this for floating-point
1675 comparisons, except for NE_EXPR and EQ_EXPR. */
1676
1677 static enum tree_code
invert_tree_comparison(code)1678 invert_tree_comparison (code)
1679 enum tree_code code;
1680 {
1681 switch (code)
1682 {
1683 case EQ_EXPR:
1684 return NE_EXPR;
1685 case NE_EXPR:
1686 return EQ_EXPR;
1687 case GT_EXPR:
1688 return LE_EXPR;
1689 case GE_EXPR:
1690 return LT_EXPR;
1691 case LT_EXPR:
1692 return GE_EXPR;
1693 case LE_EXPR:
1694 return GT_EXPR;
1695 default:
1696 abort ();
1697 }
1698 }
1699
1700 /* Similar, but return the comparison that results if the operands are
1701 swapped. This is safe for floating-point. */
1702
1703 static enum tree_code
swap_tree_comparison(code)1704 swap_tree_comparison (code)
1705 enum tree_code code;
1706 {
1707 switch (code)
1708 {
1709 case EQ_EXPR:
1710 case NE_EXPR:
1711 return code;
1712 case GT_EXPR:
1713 return LT_EXPR;
1714 case GE_EXPR:
1715 return LE_EXPR;
1716 case LT_EXPR:
1717 return GT_EXPR;
1718 case LE_EXPR:
1719 return GE_EXPR;
1720 default:
1721 abort ();
1722 }
1723 }
1724
1725
1726 /* Convert a comparison tree code from an enum tree_code representation
1727 into a compcode bit-based encoding. This function is the inverse of
1728 compcode_to_comparison. */
1729
1730 static int
comparison_to_compcode(code)1731 comparison_to_compcode (code)
1732 enum tree_code code;
1733 {
1734 switch (code)
1735 {
1736 case LT_EXPR:
1737 return COMPCODE_LT;
1738 case EQ_EXPR:
1739 return COMPCODE_EQ;
1740 case LE_EXPR:
1741 return COMPCODE_LE;
1742 case GT_EXPR:
1743 return COMPCODE_GT;
1744 case NE_EXPR:
1745 return COMPCODE_NE;
1746 case GE_EXPR:
1747 return COMPCODE_GE;
1748 default:
1749 abort ();
1750 }
1751 }
1752
1753 /* Convert a compcode bit-based encoding of a comparison operator back
1754 to GCC's enum tree_code representation. This function is the
1755 inverse of comparison_to_compcode. */
1756
1757 static enum tree_code
compcode_to_comparison(code)1758 compcode_to_comparison (code)
1759 int code;
1760 {
1761 switch (code)
1762 {
1763 case COMPCODE_LT:
1764 return LT_EXPR;
1765 case COMPCODE_EQ:
1766 return EQ_EXPR;
1767 case COMPCODE_LE:
1768 return LE_EXPR;
1769 case COMPCODE_GT:
1770 return GT_EXPR;
1771 case COMPCODE_NE:
1772 return NE_EXPR;
1773 case COMPCODE_GE:
1774 return GE_EXPR;
1775 default:
1776 abort ();
1777 }
1778 }
1779
1780 /* Return nonzero if CODE is a tree code that represents a truth value. */
1781
1782 static int
truth_value_p(code)1783 truth_value_p (code)
1784 enum tree_code code;
1785 {
1786 return (TREE_CODE_CLASS (code) == '<'
1787 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1788 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1789 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1790 }
1791
1792 /* Return nonzero if two operands are necessarily equal.
1793 If ONLY_CONST is nonzero, only return nonzero for constants.
1794 This function tests whether the operands are indistinguishable;
1795 it does not test whether they are equal using C's == operation.
1796 The distinction is important for IEEE floating point, because
1797 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1798 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1799
1800 int
operand_equal_p(arg0,arg1,only_const)1801 operand_equal_p (arg0, arg1, only_const)
1802 tree arg0, arg1;
1803 int only_const;
1804 {
1805 /* If both types don't have the same signedness, then we can't consider
1806 them equal. We must check this before the STRIP_NOPS calls
1807 because they may change the signedness of the arguments. */
1808 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1809 return 0;
1810
1811 STRIP_NOPS (arg0);
1812 STRIP_NOPS (arg1);
1813
1814 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1815 /* This is needed for conversions and for COMPONENT_REF.
1816 Might as well play it safe and always test this. */
1817 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1818 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1819 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1820 return 0;
1821
1822 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1823 We don't care about side effects in that case because the SAVE_EXPR
1824 takes care of that for us. In all other cases, two expressions are
1825 equal if they have no side effects. If we have two identical
1826 expressions with side effects that should be treated the same due
1827 to the only side effects being identical SAVE_EXPR's, that will
1828 be detected in the recursive calls below. */
1829 if (arg0 == arg1 && ! only_const
1830 && (TREE_CODE (arg0) == SAVE_EXPR
1831 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1832 return 1;
1833
1834 /* Next handle constant cases, those for which we can return 1 even
1835 if ONLY_CONST is set. */
1836 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1837 switch (TREE_CODE (arg0))
1838 {
1839 case INTEGER_CST:
1840 return (! TREE_CONSTANT_OVERFLOW (arg0)
1841 && ! TREE_CONSTANT_OVERFLOW (arg1)
1842 && tree_int_cst_equal (arg0, arg1));
1843
1844 case REAL_CST:
1845 return (! TREE_CONSTANT_OVERFLOW (arg0)
1846 && ! TREE_CONSTANT_OVERFLOW (arg1)
1847 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1848 TREE_REAL_CST (arg1)));
1849
1850 case VECTOR_CST:
1851 {
1852 tree v1, v2;
1853
1854 if (TREE_CONSTANT_OVERFLOW (arg0)
1855 || TREE_CONSTANT_OVERFLOW (arg1))
1856 return 0;
1857
1858 v1 = TREE_VECTOR_CST_ELTS (arg0);
1859 v2 = TREE_VECTOR_CST_ELTS (arg1);
1860 while (v1 && v2)
1861 {
1862 if (!operand_equal_p (v1, v2, only_const))
1863 return 0;
1864 v1 = TREE_CHAIN (v1);
1865 v2 = TREE_CHAIN (v2);
1866 }
1867
1868 return 1;
1869 }
1870
1871 case COMPLEX_CST:
1872 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1873 only_const)
1874 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1875 only_const));
1876
1877 case STRING_CST:
1878 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1879 && ! memcmp (TREE_STRING_POINTER (arg0),
1880 TREE_STRING_POINTER (arg1),
1881 TREE_STRING_LENGTH (arg0)));
1882
1883 case ADDR_EXPR:
1884 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1885 0);
1886 default:
1887 break;
1888 }
1889
1890 if (only_const)
1891 return 0;
1892
1893 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1894 {
1895 case '1':
1896 /* Two conversions are equal only if signedness and modes match. */
1897 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1898 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1899 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1900 return 0;
1901
1902 return operand_equal_p (TREE_OPERAND (arg0, 0),
1903 TREE_OPERAND (arg1, 0), 0);
1904
1905 case '<':
1906 case '2':
1907 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1908 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1909 0))
1910 return 1;
1911
1912 /* For commutative ops, allow the other order. */
1913 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1914 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1915 || TREE_CODE (arg0) == BIT_IOR_EXPR
1916 || TREE_CODE (arg0) == BIT_XOR_EXPR
1917 || TREE_CODE (arg0) == BIT_AND_EXPR
1918 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1919 && operand_equal_p (TREE_OPERAND (arg0, 0),
1920 TREE_OPERAND (arg1, 1), 0)
1921 && operand_equal_p (TREE_OPERAND (arg0, 1),
1922 TREE_OPERAND (arg1, 0), 0));
1923
1924 case 'r':
1925 /* If either of the pointer (or reference) expressions we are dereferencing
1926 contain a side effect, these cannot be equal. */
1927 if (TREE_SIDE_EFFECTS (arg0)
1928 || TREE_SIDE_EFFECTS (arg1))
1929 return 0;
1930
1931 switch (TREE_CODE (arg0))
1932 {
1933 case INDIRECT_REF:
1934 return operand_equal_p (TREE_OPERAND (arg0, 0),
1935 TREE_OPERAND (arg1, 0), 0);
1936
1937 case COMPONENT_REF:
1938 case ARRAY_REF:
1939 case ARRAY_RANGE_REF:
1940 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1941 TREE_OPERAND (arg1, 0), 0)
1942 && operand_equal_p (TREE_OPERAND (arg0, 1),
1943 TREE_OPERAND (arg1, 1), 0));
1944
1945 case BIT_FIELD_REF:
1946 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1947 TREE_OPERAND (arg1, 0), 0)
1948 && operand_equal_p (TREE_OPERAND (arg0, 1),
1949 TREE_OPERAND (arg1, 1), 0)
1950 && operand_equal_p (TREE_OPERAND (arg0, 2),
1951 TREE_OPERAND (arg1, 2), 0));
1952 default:
1953 return 0;
1954 }
1955
1956 case 'e':
1957 if (TREE_CODE (arg0) == RTL_EXPR)
1958 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1959 return 0;
1960
1961 default:
1962 return 0;
1963 }
1964 }
1965
1966 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1967 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1968
1969 When in doubt, return 0. */
1970
1971 static int
operand_equal_for_comparison_p(arg0,arg1,other)1972 operand_equal_for_comparison_p (arg0, arg1, other)
1973 tree arg0, arg1;
1974 tree other;
1975 {
1976 int unsignedp1, unsignedpo;
1977 tree primarg0, primarg1, primother;
1978 unsigned int correct_width;
1979
1980 if (operand_equal_p (arg0, arg1, 0))
1981 return 1;
1982
1983 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1984 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1985 return 0;
1986
1987 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1988 and see if the inner values are the same. This removes any
1989 signedness comparison, which doesn't matter here. */
1990 primarg0 = arg0, primarg1 = arg1;
1991 STRIP_NOPS (primarg0);
1992 STRIP_NOPS (primarg1);
1993 if (operand_equal_p (primarg0, primarg1, 0))
1994 return 1;
1995
1996 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1997 actual comparison operand, ARG0.
1998
1999 First throw away any conversions to wider types
2000 already present in the operands. */
2001
2002 primarg1 = get_narrower (arg1, &unsignedp1);
2003 primother = get_narrower (other, &unsignedpo);
2004
2005 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2006 if (unsignedp1 == unsignedpo
2007 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2008 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2009 {
2010 tree type = TREE_TYPE (arg0);
2011
2012 /* Make sure shorter operand is extended the right way
2013 to match the longer operand. */
2014 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2015 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2016
2017 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2018 return 1;
2019 }
2020
2021 return 0;
2022 }
2023
2024 /* See if ARG is an expression that is either a comparison or is performing
2025 arithmetic on comparisons. The comparisons must only be comparing
2026 two different values, which will be stored in *CVAL1 and *CVAL2; if
2027 they are nonzero it means that some operands have already been found.
2028 No variables may be used anywhere else in the expression except in the
2029 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2030 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2031
2032 If this is true, return 1. Otherwise, return zero. */
2033
2034 static int
twoval_comparison_p(arg,cval1,cval2,save_p)2035 twoval_comparison_p (arg, cval1, cval2, save_p)
2036 tree arg;
2037 tree *cval1, *cval2;
2038 int *save_p;
2039 {
2040 enum tree_code code = TREE_CODE (arg);
2041 char class = TREE_CODE_CLASS (code);
2042
2043 /* We can handle some of the 'e' cases here. */
2044 if (class == 'e' && code == TRUTH_NOT_EXPR)
2045 class = '1';
2046 else if (class == 'e'
2047 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2048 || code == COMPOUND_EXPR))
2049 class = '2';
2050
2051 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2052 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2053 {
2054 /* If we've already found a CVAL1 or CVAL2, this expression is
2055 two complex to handle. */
2056 if (*cval1 || *cval2)
2057 return 0;
2058
2059 class = '1';
2060 *save_p = 1;
2061 }
2062
2063 switch (class)
2064 {
2065 case '1':
2066 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2067
2068 case '2':
2069 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2070 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2071 cval1, cval2, save_p));
2072
2073 case 'c':
2074 return 1;
2075
2076 case 'e':
2077 if (code == COND_EXPR)
2078 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2079 cval1, cval2, save_p)
2080 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2081 cval1, cval2, save_p)
2082 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2083 cval1, cval2, save_p));
2084 return 0;
2085
2086 case '<':
2087 /* First see if we can handle the first operand, then the second. For
2088 the second operand, we know *CVAL1 can't be zero. It must be that
2089 one side of the comparison is each of the values; test for the
2090 case where this isn't true by failing if the two operands
2091 are the same. */
2092
2093 if (operand_equal_p (TREE_OPERAND (arg, 0),
2094 TREE_OPERAND (arg, 1), 0))
2095 return 0;
2096
2097 if (*cval1 == 0)
2098 *cval1 = TREE_OPERAND (arg, 0);
2099 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2100 ;
2101 else if (*cval2 == 0)
2102 *cval2 = TREE_OPERAND (arg, 0);
2103 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2104 ;
2105 else
2106 return 0;
2107
2108 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2109 ;
2110 else if (*cval2 == 0)
2111 *cval2 = TREE_OPERAND (arg, 1);
2112 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2113 ;
2114 else
2115 return 0;
2116
2117 return 1;
2118
2119 default:
2120 return 0;
2121 }
2122 }
2123
2124 /* ARG is a tree that is known to contain just arithmetic operations and
2125 comparisons. Evaluate the operations in the tree substituting NEW0 for
2126 any occurrence of OLD0 as an operand of a comparison and likewise for
2127 NEW1 and OLD1. */
2128
2129 static tree
eval_subst(arg,old0,new0,old1,new1)2130 eval_subst (arg, old0, new0, old1, new1)
2131 tree arg;
2132 tree old0, new0, old1, new1;
2133 {
2134 tree type = TREE_TYPE (arg);
2135 enum tree_code code = TREE_CODE (arg);
2136 char class = TREE_CODE_CLASS (code);
2137
2138 /* We can handle some of the 'e' cases here. */
2139 if (class == 'e' && code == TRUTH_NOT_EXPR)
2140 class = '1';
2141 else if (class == 'e'
2142 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2143 class = '2';
2144
2145 switch (class)
2146 {
2147 case '1':
2148 return fold (build1 (code, type,
2149 eval_subst (TREE_OPERAND (arg, 0),
2150 old0, new0, old1, new1)));
2151
2152 case '2':
2153 return fold (build (code, type,
2154 eval_subst (TREE_OPERAND (arg, 0),
2155 old0, new0, old1, new1),
2156 eval_subst (TREE_OPERAND (arg, 1),
2157 old0, new0, old1, new1)));
2158
2159 case 'e':
2160 switch (code)
2161 {
2162 case SAVE_EXPR:
2163 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2164
2165 case COMPOUND_EXPR:
2166 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2167
2168 case COND_EXPR:
2169 return fold (build (code, type,
2170 eval_subst (TREE_OPERAND (arg, 0),
2171 old0, new0, old1, new1),
2172 eval_subst (TREE_OPERAND (arg, 1),
2173 old0, new0, old1, new1),
2174 eval_subst (TREE_OPERAND (arg, 2),
2175 old0, new0, old1, new1)));
2176 default:
2177 break;
2178 }
2179 /* fall through - ??? */
2180
2181 case '<':
2182 {
2183 tree arg0 = TREE_OPERAND (arg, 0);
2184 tree arg1 = TREE_OPERAND (arg, 1);
2185
2186 /* We need to check both for exact equality and tree equality. The
2187 former will be true if the operand has a side-effect. In that
2188 case, we know the operand occurred exactly once. */
2189
2190 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2191 arg0 = new0;
2192 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2193 arg0 = new1;
2194
2195 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2196 arg1 = new0;
2197 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2198 arg1 = new1;
2199
2200 return fold (build (code, type, arg0, arg1));
2201 }
2202
2203 default:
2204 return arg;
2205 }
2206 }
2207
2208 /* Return a tree for the case when the result of an expression is RESULT
2209 converted to TYPE and OMITTED was previously an operand of the expression
2210 but is now not needed (e.g., we folded OMITTED * 0).
2211
2212 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2213 the conversion of RESULT to TYPE. */
2214
2215 static tree
omit_one_operand(type,result,omitted)2216 omit_one_operand (type, result, omitted)
2217 tree type, result, omitted;
2218 {
2219 tree t = convert (type, result);
2220
2221 if (TREE_SIDE_EFFECTS (omitted))
2222 return build (COMPOUND_EXPR, type, omitted, t);
2223
2224 return non_lvalue (t);
2225 }
2226
2227 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2228
2229 static tree
pedantic_omit_one_operand(type,result,omitted)2230 pedantic_omit_one_operand (type, result, omitted)
2231 tree type, result, omitted;
2232 {
2233 tree t = convert (type, result);
2234
2235 if (TREE_SIDE_EFFECTS (omitted))
2236 return build (COMPOUND_EXPR, type, omitted, t);
2237
2238 return pedantic_non_lvalue (t);
2239 }
2240
2241 /* Return a simplified tree node for the truth-negation of ARG. This
2242 never alters ARG itself. We assume that ARG is an operation that
2243 returns a truth value (0 or 1). */
2244
2245 tree
invert_truthvalue(arg)2246 invert_truthvalue (arg)
2247 tree arg;
2248 {
2249 tree type = TREE_TYPE (arg);
2250 enum tree_code code = TREE_CODE (arg);
2251
2252 if (code == ERROR_MARK)
2253 return arg;
2254
2255 /* If this is a comparison, we can simply invert it, except for
2256 floating-point non-equality comparisons, in which case we just
2257 enclose a TRUTH_NOT_EXPR around what we have. */
2258
2259 if (TREE_CODE_CLASS (code) == '<')
2260 {
2261 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2262 && !flag_unsafe_math_optimizations
2263 && code != NE_EXPR
2264 && code != EQ_EXPR)
2265 return build1 (TRUTH_NOT_EXPR, type, arg);
2266 else
2267 return build (invert_tree_comparison (code), type,
2268 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2269 }
2270
2271 switch (code)
2272 {
2273 case INTEGER_CST:
2274 return convert (type, build_int_2 (integer_zerop (arg), 0));
2275
2276 case TRUTH_AND_EXPR:
2277 return build (TRUTH_OR_EXPR, type,
2278 invert_truthvalue (TREE_OPERAND (arg, 0)),
2279 invert_truthvalue (TREE_OPERAND (arg, 1)));
2280
2281 case TRUTH_OR_EXPR:
2282 return build (TRUTH_AND_EXPR, type,
2283 invert_truthvalue (TREE_OPERAND (arg, 0)),
2284 invert_truthvalue (TREE_OPERAND (arg, 1)));
2285
2286 case TRUTH_XOR_EXPR:
2287 /* Here we can invert either operand. We invert the first operand
2288 unless the second operand is a TRUTH_NOT_EXPR in which case our
2289 result is the XOR of the first operand with the inside of the
2290 negation of the second operand. */
2291
2292 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2293 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2294 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2295 else
2296 return build (TRUTH_XOR_EXPR, type,
2297 invert_truthvalue (TREE_OPERAND (arg, 0)),
2298 TREE_OPERAND (arg, 1));
2299
2300 case TRUTH_ANDIF_EXPR:
2301 return build (TRUTH_ORIF_EXPR, type,
2302 invert_truthvalue (TREE_OPERAND (arg, 0)),
2303 invert_truthvalue (TREE_OPERAND (arg, 1)));
2304
2305 case TRUTH_ORIF_EXPR:
2306 return build (TRUTH_ANDIF_EXPR, type,
2307 invert_truthvalue (TREE_OPERAND (arg, 0)),
2308 invert_truthvalue (TREE_OPERAND (arg, 1)));
2309
2310 case TRUTH_NOT_EXPR:
2311 return TREE_OPERAND (arg, 0);
2312
2313 case COND_EXPR:
2314 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2315 invert_truthvalue (TREE_OPERAND (arg, 1)),
2316 invert_truthvalue (TREE_OPERAND (arg, 2)));
2317
2318 case COMPOUND_EXPR:
2319 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2320 invert_truthvalue (TREE_OPERAND (arg, 1)));
2321
2322 case WITH_RECORD_EXPR:
2323 return build (WITH_RECORD_EXPR, type,
2324 invert_truthvalue (TREE_OPERAND (arg, 0)),
2325 TREE_OPERAND (arg, 1));
2326
2327 case NON_LVALUE_EXPR:
2328 return invert_truthvalue (TREE_OPERAND (arg, 0));
2329
2330 case NOP_EXPR:
2331 case CONVERT_EXPR:
2332 case FLOAT_EXPR:
2333 return build1 (TREE_CODE (arg), type,
2334 invert_truthvalue (TREE_OPERAND (arg, 0)));
2335
2336 case BIT_AND_EXPR:
2337 if (!integer_onep (TREE_OPERAND (arg, 1)))
2338 break;
2339 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2340
2341 case SAVE_EXPR:
2342 return build1 (TRUTH_NOT_EXPR, type, arg);
2343
2344 case CLEANUP_POINT_EXPR:
2345 return build1 (CLEANUP_POINT_EXPR, type,
2346 invert_truthvalue (TREE_OPERAND (arg, 0)));
2347
2348 default:
2349 break;
2350 }
2351 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2352 abort ();
2353 return build1 (TRUTH_NOT_EXPR, type, arg);
2354 }
2355
2356 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2357 operands are another bit-wise operation with a common input. If so,
2358 distribute the bit operations to save an operation and possibly two if
2359 constants are involved. For example, convert
2360 (A | B) & (A | C) into A | (B & C)
2361 Further simplification will occur if B and C are constants.
2362
2363 If this optimization cannot be done, 0 will be returned. */
2364
2365 static tree
distribute_bit_expr(code,type,arg0,arg1)2366 distribute_bit_expr (code, type, arg0, arg1)
2367 enum tree_code code;
2368 tree type;
2369 tree arg0, arg1;
2370 {
2371 tree common;
2372 tree left, right;
2373
2374 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2375 || TREE_CODE (arg0) == code
2376 || (TREE_CODE (arg0) != BIT_AND_EXPR
2377 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2378 return 0;
2379
2380 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2381 {
2382 common = TREE_OPERAND (arg0, 0);
2383 left = TREE_OPERAND (arg0, 1);
2384 right = TREE_OPERAND (arg1, 1);
2385 }
2386 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2387 {
2388 common = TREE_OPERAND (arg0, 0);
2389 left = TREE_OPERAND (arg0, 1);
2390 right = TREE_OPERAND (arg1, 0);
2391 }
2392 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2393 {
2394 common = TREE_OPERAND (arg0, 1);
2395 left = TREE_OPERAND (arg0, 0);
2396 right = TREE_OPERAND (arg1, 1);
2397 }
2398 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2399 {
2400 common = TREE_OPERAND (arg0, 1);
2401 left = TREE_OPERAND (arg0, 0);
2402 right = TREE_OPERAND (arg1, 0);
2403 }
2404 else
2405 return 0;
2406
2407 return fold (build (TREE_CODE (arg0), type, common,
2408 fold (build (code, type, left, right))));
2409 }
2410
2411 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2412 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2413
2414 static tree
make_bit_field_ref(inner,type,bitsize,bitpos,unsignedp)2415 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2416 tree inner;
2417 tree type;
2418 int bitsize, bitpos;
2419 int unsignedp;
2420 {
2421 tree result = build (BIT_FIELD_REF, type, inner,
2422 size_int (bitsize), bitsize_int (bitpos));
2423
2424 TREE_UNSIGNED (result) = unsignedp;
2425
2426 return result;
2427 }
2428
2429 /* Optimize a bit-field compare.
2430
2431 There are two cases: First is a compare against a constant and the
2432 second is a comparison of two items where the fields are at the same
2433 bit position relative to the start of a chunk (byte, halfword, word)
2434 large enough to contain it. In these cases we can avoid the shift
2435 implicit in bitfield extractions.
2436
2437 For constants, we emit a compare of the shifted constant with the
2438 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2439 compared. For two fields at the same position, we do the ANDs with the
2440 similar mask and compare the result of the ANDs.
2441
2442 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2443 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2444 are the left and right operands of the comparison, respectively.
2445
2446 If the optimization described above can be done, we return the resulting
2447 tree. Otherwise we return zero. */
2448
2449 static tree
optimize_bit_field_compare(code,compare_type,lhs,rhs)2450 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2451 enum tree_code code;
2452 tree compare_type;
2453 tree lhs, rhs;
2454 {
2455 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2456 tree type = TREE_TYPE (lhs);
2457 tree signed_type, unsigned_type;
2458 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2459 enum machine_mode lmode, rmode, nmode;
2460 int lunsignedp, runsignedp;
2461 int lvolatilep = 0, rvolatilep = 0;
2462 tree linner, rinner = NULL_TREE;
2463 tree mask;
2464 tree offset;
2465
2466 /* Get all the information about the extractions being done. If the bit size
2467 if the same as the size of the underlying object, we aren't doing an
2468 extraction at all and so can do nothing. We also don't want to
2469 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2470 then will no longer be able to replace it. */
2471 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2472 &lunsignedp, &lvolatilep);
2473 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2474 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2475 return 0;
2476
2477 if (!const_p)
2478 {
2479 /* If this is not a constant, we can only do something if bit positions,
2480 sizes, and signedness are the same. */
2481 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2482 &runsignedp, &rvolatilep);
2483
2484 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2485 || lunsignedp != runsignedp || offset != 0
2486 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2487 return 0;
2488 }
2489
2490 /* See if we can find a mode to refer to this field. We should be able to,
2491 but fail if we can't. */
2492 nmode = get_best_mode (lbitsize, lbitpos,
2493 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2494 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2495 TYPE_ALIGN (TREE_TYPE (rinner))),
2496 word_mode, lvolatilep || rvolatilep);
2497 if (nmode == VOIDmode)
2498 return 0;
2499
2500 /* Set signed and unsigned types of the precision of this mode for the
2501 shifts below. */
2502 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2503 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2504
2505 /* Compute the bit position and size for the new reference and our offset
2506 within it. If the new reference is the same size as the original, we
2507 won't optimize anything, so return zero. */
2508 nbitsize = GET_MODE_BITSIZE (nmode);
2509 nbitpos = lbitpos & ~ (nbitsize - 1);
2510 lbitpos -= nbitpos;
2511 if (nbitsize == lbitsize)
2512 return 0;
2513
2514 if (BYTES_BIG_ENDIAN)
2515 lbitpos = nbitsize - lbitsize - lbitpos;
2516
2517 /* Make the mask to be used against the extracted field. */
2518 mask = build_int_2 (~0, ~0);
2519 TREE_TYPE (mask) = unsigned_type;
2520 force_fit_type (mask, 0);
2521 mask = convert (unsigned_type, mask);
2522 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2523 mask = const_binop (RSHIFT_EXPR, mask,
2524 size_int (nbitsize - lbitsize - lbitpos), 0);
2525
2526 if (! const_p)
2527 /* If not comparing with constant, just rework the comparison
2528 and return. */
2529 return build (code, compare_type,
2530 build (BIT_AND_EXPR, unsigned_type,
2531 make_bit_field_ref (linner, unsigned_type,
2532 nbitsize, nbitpos, 1),
2533 mask),
2534 build (BIT_AND_EXPR, unsigned_type,
2535 make_bit_field_ref (rinner, unsigned_type,
2536 nbitsize, nbitpos, 1),
2537 mask));
2538
2539 /* Otherwise, we are handling the constant case. See if the constant is too
2540 big for the field. Warn and return a tree of for 0 (false) if so. We do
2541 this not only for its own sake, but to avoid having to test for this
2542 error case below. If we didn't, we might generate wrong code.
2543
2544 For unsigned fields, the constant shifted right by the field length should
2545 be all zero. For signed fields, the high-order bits should agree with
2546 the sign bit. */
2547
2548 if (lunsignedp)
2549 {
2550 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2551 convert (unsigned_type, rhs),
2552 size_int (lbitsize), 0)))
2553 {
2554 warning ("comparison is always %d due to width of bit-field",
2555 code == NE_EXPR);
2556 return convert (compare_type,
2557 (code == NE_EXPR
2558 ? integer_one_node : integer_zero_node));
2559 }
2560 }
2561 else
2562 {
2563 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2564 size_int (lbitsize - 1), 0);
2565 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2566 {
2567 warning ("comparison is always %d due to width of bit-field",
2568 code == NE_EXPR);
2569 return convert (compare_type,
2570 (code == NE_EXPR
2571 ? integer_one_node : integer_zero_node));
2572 }
2573 }
2574
2575 /* Single-bit compares should always be against zero. */
2576 if (lbitsize == 1 && ! integer_zerop (rhs))
2577 {
2578 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2579 rhs = convert (type, integer_zero_node);
2580 }
2581
2582 /* Make a new bitfield reference, shift the constant over the
2583 appropriate number of bits and mask it with the computed mask
2584 (in case this was a signed field). If we changed it, make a new one. */
2585 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2586 if (lvolatilep)
2587 {
2588 TREE_SIDE_EFFECTS (lhs) = 1;
2589 TREE_THIS_VOLATILE (lhs) = 1;
2590 }
2591
2592 rhs = fold (const_binop (BIT_AND_EXPR,
2593 const_binop (LSHIFT_EXPR,
2594 convert (unsigned_type, rhs),
2595 size_int (lbitpos), 0),
2596 mask, 0));
2597
2598 return build (code, compare_type,
2599 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2600 rhs);
2601 }
2602
2603 /* Subroutine for fold_truthop: decode a field reference.
2604
2605 If EXP is a comparison reference, we return the innermost reference.
2606
2607 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2608 set to the starting bit number.
2609
2610 If the innermost field can be completely contained in a mode-sized
2611 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2612
2613 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2614 otherwise it is not changed.
2615
2616 *PUNSIGNEDP is set to the signedness of the field.
2617
2618 *PMASK is set to the mask used. This is either contained in a
2619 BIT_AND_EXPR or derived from the width of the field.
2620
2621 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2622
2623 Return 0 if this is not a component reference or is one that we can't
2624 do anything with. */
2625
2626 static tree
decode_field_reference(exp,pbitsize,pbitpos,pmode,punsignedp,pvolatilep,pmask,pand_mask)2627 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2628 pvolatilep, pmask, pand_mask)
2629 tree exp;
2630 HOST_WIDE_INT *pbitsize, *pbitpos;
2631 enum machine_mode *pmode;
2632 int *punsignedp, *pvolatilep;
2633 tree *pmask;
2634 tree *pand_mask;
2635 {
2636 tree and_mask = 0;
2637 tree mask, inner, offset;
2638 tree unsigned_type;
2639 unsigned int precision;
2640
2641 /* All the optimizations using this function assume integer fields.
2642 There are problems with FP fields since the type_for_size call
2643 below can fail for, e.g., XFmode. */
2644 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2645 return 0;
2646
2647 STRIP_NOPS (exp);
2648
2649 if (TREE_CODE (exp) == BIT_AND_EXPR)
2650 {
2651 and_mask = TREE_OPERAND (exp, 1);
2652 exp = TREE_OPERAND (exp, 0);
2653 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2654 if (TREE_CODE (and_mask) != INTEGER_CST)
2655 return 0;
2656 }
2657
2658 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2659 punsignedp, pvolatilep);
2660 if ((inner == exp && and_mask == 0)
2661 || *pbitsize < 0 || offset != 0
2662 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2663 return 0;
2664
2665 /* Compute the mask to access the bitfield. */
2666 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2667 precision = TYPE_PRECISION (unsigned_type);
2668
2669 mask = build_int_2 (~0, ~0);
2670 TREE_TYPE (mask) = unsigned_type;
2671 force_fit_type (mask, 0);
2672 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2673 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2674
2675 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2676 if (and_mask != 0)
2677 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2678 convert (unsigned_type, and_mask), mask));
2679
2680 *pmask = mask;
2681 *pand_mask = and_mask;
2682 return inner;
2683 }
2684
2685 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2686 bit positions. */
2687
2688 static int
all_ones_mask_p(mask,size)2689 all_ones_mask_p (mask, size)
2690 tree mask;
2691 int size;
2692 {
2693 tree type = TREE_TYPE (mask);
2694 unsigned int precision = TYPE_PRECISION (type);
2695 tree tmask;
2696
2697 tmask = build_int_2 (~0, ~0);
2698 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2699 force_fit_type (tmask, 0);
2700 return
2701 tree_int_cst_equal (mask,
2702 const_binop (RSHIFT_EXPR,
2703 const_binop (LSHIFT_EXPR, tmask,
2704 size_int (precision - size),
2705 0),
2706 size_int (precision - size), 0));
2707 }
2708
2709 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2710 represents the sign bit of EXP's type. If EXP represents a sign
2711 or zero extension, also test VAL against the unextended type.
2712 The return value is the (sub)expression whose sign bit is VAL,
2713 or NULL_TREE otherwise. */
2714
2715 static tree
sign_bit_p(exp,val)2716 sign_bit_p (exp, val)
2717 tree exp;
2718 tree val;
2719 {
2720 unsigned HOST_WIDE_INT lo;
2721 HOST_WIDE_INT hi;
2722 int width;
2723 tree t;
2724
2725 /* Tree EXP must have an integral type. */
2726 t = TREE_TYPE (exp);
2727 if (! INTEGRAL_TYPE_P (t))
2728 return NULL_TREE;
2729
2730 /* Tree VAL must be an integer constant. */
2731 if (TREE_CODE (val) != INTEGER_CST
2732 || TREE_CONSTANT_OVERFLOW (val))
2733 return NULL_TREE;
2734
2735 width = TYPE_PRECISION (t);
2736 if (width > HOST_BITS_PER_WIDE_INT)
2737 {
2738 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2739 lo = 0;
2740 }
2741 else
2742 {
2743 hi = 0;
2744 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2745 }
2746
2747 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2748 return exp;
2749
2750 /* Handle extension from a narrower type. */
2751 if (TREE_CODE (exp) == NOP_EXPR
2752 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2753 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2754
2755 return NULL_TREE;
2756 }
2757
2758 /* Subroutine for fold_truthop: determine if an operand is simple enough
2759 to be evaluated unconditionally. */
2760
2761 static int
simple_operand_p(exp)2762 simple_operand_p (exp)
2763 tree exp;
2764 {
2765 /* Strip any conversions that don't change the machine mode. */
2766 while ((TREE_CODE (exp) == NOP_EXPR
2767 || TREE_CODE (exp) == CONVERT_EXPR)
2768 && (TYPE_MODE (TREE_TYPE (exp))
2769 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2770 exp = TREE_OPERAND (exp, 0);
2771
2772 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2773 || (DECL_P (exp)
2774 && ! TREE_ADDRESSABLE (exp)
2775 && ! TREE_THIS_VOLATILE (exp)
2776 && ! DECL_NONLOCAL (exp)
2777 /* Don't regard global variables as simple. They may be
2778 allocated in ways unknown to the compiler (shared memory,
2779 #pragma weak, etc). */
2780 && ! TREE_PUBLIC (exp)
2781 && ! DECL_EXTERNAL (exp)
2782 /* Loading a static variable is unduly expensive, but global
2783 registers aren't expensive. */
2784 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2785 }
2786
2787 /* The following functions are subroutines to fold_range_test and allow it to
2788 try to change a logical combination of comparisons into a range test.
2789
2790 For example, both
2791 X == 2 || X == 3 || X == 4 || X == 5
2792 and
2793 X >= 2 && X <= 5
2794 are converted to
2795 (unsigned) (X - 2) <= 3
2796
2797 We describe each set of comparisons as being either inside or outside
2798 a range, using a variable named like IN_P, and then describe the
2799 range with a lower and upper bound. If one of the bounds is omitted,
2800 it represents either the highest or lowest value of the type.
2801
2802 In the comments below, we represent a range by two numbers in brackets
2803 preceded by a "+" to designate being inside that range, or a "-" to
2804 designate being outside that range, so the condition can be inverted by
2805 flipping the prefix. An omitted bound is represented by a "-". For
2806 example, "- [-, 10]" means being outside the range starting at the lowest
2807 possible value and ending at 10, in other words, being greater than 10.
2808 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2809 always false.
2810
2811 We set up things so that the missing bounds are handled in a consistent
2812 manner so neither a missing bound nor "true" and "false" need to be
2813 handled using a special case. */
2814
2815 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2816 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2817 and UPPER1_P are nonzero if the respective argument is an upper bound
2818 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2819 must be specified for a comparison. ARG1 will be converted to ARG0's
2820 type if both are specified. */
2821
2822 static tree
range_binop(code,type,arg0,upper0_p,arg1,upper1_p)2823 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2824 enum tree_code code;
2825 tree type;
2826 tree arg0, arg1;
2827 int upper0_p, upper1_p;
2828 {
2829 tree tem;
2830 int result;
2831 int sgn0, sgn1;
2832
2833 /* If neither arg represents infinity, do the normal operation.
2834 Else, if not a comparison, return infinity. Else handle the special
2835 comparison rules. Note that most of the cases below won't occur, but
2836 are handled for consistency. */
2837
2838 if (arg0 != 0 && arg1 != 0)
2839 {
2840 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2841 arg0, convert (TREE_TYPE (arg0), arg1)));
2842 STRIP_NOPS (tem);
2843 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2844 }
2845
2846 if (TREE_CODE_CLASS (code) != '<')
2847 return 0;
2848
2849 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2850 for neither. In real maths, we cannot assume open ended ranges are
2851 the same. But, this is computer arithmetic, where numbers are finite.
2852 We can therefore make the transformation of any unbounded range with
2853 the value Z, Z being greater than any representable number. This permits
2854 us to treat unbounded ranges as equal. */
2855 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2856 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2857 switch (code)
2858 {
2859 case EQ_EXPR:
2860 result = sgn0 == sgn1;
2861 break;
2862 case NE_EXPR:
2863 result = sgn0 != sgn1;
2864 break;
2865 case LT_EXPR:
2866 result = sgn0 < sgn1;
2867 break;
2868 case LE_EXPR:
2869 result = sgn0 <= sgn1;
2870 break;
2871 case GT_EXPR:
2872 result = sgn0 > sgn1;
2873 break;
2874 case GE_EXPR:
2875 result = sgn0 >= sgn1;
2876 break;
2877 default:
2878 abort ();
2879 }
2880
2881 return convert (type, result ? integer_one_node : integer_zero_node);
2882 }
2883
2884 /* Given EXP, a logical expression, set the range it is testing into
2885 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2886 actually being tested. *PLOW and *PHIGH will be made of the same type
2887 as the returned expression. If EXP is not a comparison, we will most
2888 likely not be returning a useful value and range. */
2889
2890 static tree
make_range(exp,pin_p,plow,phigh)2891 make_range (exp, pin_p, plow, phigh)
2892 tree exp;
2893 int *pin_p;
2894 tree *plow, *phigh;
2895 {
2896 enum tree_code code;
2897 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2898 tree orig_type = NULL_TREE;
2899 int in_p, n_in_p;
2900 tree low, high, n_low, n_high;
2901
2902 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2903 and see if we can refine the range. Some of the cases below may not
2904 happen, but it doesn't seem worth worrying about this. We "continue"
2905 the outer loop when we've changed something; otherwise we "break"
2906 the switch, which will "break" the while. */
2907
2908 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2909
2910 while (1)
2911 {
2912 code = TREE_CODE (exp);
2913
2914 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2915 {
2916 arg0 = TREE_OPERAND (exp, 0);
2917 if (TREE_CODE_CLASS (code) == '<'
2918 || TREE_CODE_CLASS (code) == '1'
2919 || TREE_CODE_CLASS (code) == '2')
2920 type = TREE_TYPE (arg0);
2921 if (TREE_CODE_CLASS (code) == '2'
2922 || TREE_CODE_CLASS (code) == '<'
2923 || (TREE_CODE_CLASS (code) == 'e'
2924 && TREE_CODE_LENGTH (code) > 1))
2925 arg1 = TREE_OPERAND (exp, 1);
2926 }
2927
2928 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2929 lose a cast by accident. */
2930 if (type != NULL_TREE && orig_type == NULL_TREE)
2931 orig_type = type;
2932
2933 switch (code)
2934 {
2935 case TRUTH_NOT_EXPR:
2936 in_p = ! in_p, exp = arg0;
2937 continue;
2938
2939 case EQ_EXPR: case NE_EXPR:
2940 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2941 /* We can only do something if the range is testing for zero
2942 and if the second operand is an integer constant. Note that
2943 saying something is "in" the range we make is done by
2944 complementing IN_P since it will set in the initial case of
2945 being not equal to zero; "out" is leaving it alone. */
2946 if (low == 0 || high == 0
2947 || ! integer_zerop (low) || ! integer_zerop (high)
2948 || TREE_CODE (arg1) != INTEGER_CST)
2949 break;
2950
2951 switch (code)
2952 {
2953 case NE_EXPR: /* - [c, c] */
2954 low = high = arg1;
2955 break;
2956 case EQ_EXPR: /* + [c, c] */
2957 in_p = ! in_p, low = high = arg1;
2958 break;
2959 case GT_EXPR: /* - [-, c] */
2960 low = 0, high = arg1;
2961 break;
2962 case GE_EXPR: /* + [c, -] */
2963 in_p = ! in_p, low = arg1, high = 0;
2964 break;
2965 case LT_EXPR: /* - [c, -] */
2966 low = arg1, high = 0;
2967 break;
2968 case LE_EXPR: /* + [-, c] */
2969 in_p = ! in_p, low = 0, high = arg1;
2970 break;
2971 default:
2972 abort ();
2973 }
2974
2975 exp = arg0;
2976
2977 /* If this is an unsigned comparison, we also know that EXP is
2978 greater than or equal to zero. We base the range tests we make
2979 on that fact, so we record it here so we can parse existing
2980 range tests. */
2981 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2982 {
2983 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2984 1, convert (type, integer_zero_node),
2985 NULL_TREE))
2986 break;
2987
2988 in_p = n_in_p, low = n_low, high = n_high;
2989
2990 /* If the high bound is missing, but we
2991 have a low bound, reverse the range so
2992 it goes from zero to the low bound minus 1. */
2993 if (high == 0 && low)
2994 {
2995 in_p = ! in_p;
2996 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2997 integer_one_node, 0);
2998 low = convert (type, integer_zero_node);
2999 }
3000 }
3001 continue;
3002
3003 case NEGATE_EXPR:
3004 /* (-x) IN [a,b] -> x in [-b, -a] */
3005 n_low = range_binop (MINUS_EXPR, type,
3006 convert (type, integer_zero_node), 0, high, 1);
3007 n_high = range_binop (MINUS_EXPR, type,
3008 convert (type, integer_zero_node), 0, low, 0);
3009 low = n_low, high = n_high;
3010 exp = arg0;
3011 continue;
3012
3013 case BIT_NOT_EXPR:
3014 /* ~ X -> -X - 1 */
3015 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3016 convert (type, integer_one_node));
3017 continue;
3018
3019 case PLUS_EXPR: case MINUS_EXPR:
3020 if (TREE_CODE (arg1) != INTEGER_CST)
3021 break;
3022
3023 /* If EXP is signed, any overflow in the computation is undefined,
3024 so we don't worry about it so long as our computations on
3025 the bounds don't overflow. For unsigned, overflow is defined
3026 and this is exactly the right thing. */
3027 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3028 type, low, 0, arg1, 0);
3029 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3030 type, high, 1, arg1, 0);
3031 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3032 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3033 break;
3034
3035 /* Check for an unsigned range which has wrapped around the maximum
3036 value thus making n_high < n_low, and normalize it. */
3037 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3038 {
3039 low = range_binop (PLUS_EXPR, type, n_high, 0,
3040 integer_one_node, 0);
3041 high = range_binop (MINUS_EXPR, type, n_low, 0,
3042 integer_one_node, 0);
3043
3044 /* If the range is of the form +/- [ x+1, x ], we won't
3045 be able to normalize it. But then, it represents the
3046 whole range or the empty set, so make it
3047 +/- [ -, - ]. */
3048 if (tree_int_cst_equal (n_low, low)
3049 && tree_int_cst_equal (n_high, high))
3050 low = high = 0;
3051 else
3052 in_p = ! in_p;
3053 }
3054 else
3055 low = n_low, high = n_high;
3056
3057 exp = arg0;
3058 continue;
3059
3060 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3061 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3062 break;
3063
3064 if (! INTEGRAL_TYPE_P (type)
3065 || (low != 0 && ! int_fits_type_p (low, type))
3066 || (high != 0 && ! int_fits_type_p (high, type)))
3067 break;
3068
3069 n_low = low, n_high = high;
3070
3071 if (n_low != 0)
3072 n_low = convert (type, n_low);
3073
3074 if (n_high != 0)
3075 n_high = convert (type, n_high);
3076
3077 /* If we're converting from an unsigned to a signed type,
3078 we will be doing the comparison as unsigned. The tests above
3079 have already verified that LOW and HIGH are both positive.
3080
3081 So we have to make sure that the original unsigned value will
3082 be interpreted as positive. */
3083 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3084 {
3085 tree equiv_type = (*lang_hooks.types.type_for_mode)
3086 (TYPE_MODE (type), 1);
3087 tree high_positive;
3088
3089 /* A range without an upper bound is, naturally, unbounded.
3090 Since convert would have cropped a very large value, use
3091 the max value for the destination type. */
3092 high_positive
3093 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3094 : TYPE_MAX_VALUE (type);
3095
3096 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3097 high_positive = fold (build (RSHIFT_EXPR, type,
3098 convert (type, high_positive),
3099 convert (type, integer_one_node)));
3100
3101 /* If the low bound is specified, "and" the range with the
3102 range for which the original unsigned value will be
3103 positive. */
3104 if (low != 0)
3105 {
3106 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3107 1, n_low, n_high,
3108 1, convert (type, integer_zero_node),
3109 high_positive))
3110 break;
3111
3112 in_p = (n_in_p == in_p);
3113 }
3114 else
3115 {
3116 /* Otherwise, "or" the range with the range of the input
3117 that will be interpreted as negative. */
3118 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3119 0, n_low, n_high,
3120 1, convert (type, integer_zero_node),
3121 high_positive))
3122 break;
3123
3124 in_p = (in_p != n_in_p);
3125 }
3126 }
3127
3128 exp = arg0;
3129 low = n_low, high = n_high;
3130 continue;
3131
3132 default:
3133 break;
3134 }
3135
3136 break;
3137 }
3138
3139 /* If EXP is a constant, we can evaluate whether this is true or false. */
3140 if (TREE_CODE (exp) == INTEGER_CST)
3141 {
3142 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3143 exp, 0, low, 0))
3144 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3145 exp, 1, high, 1)));
3146 low = high = 0;
3147 exp = 0;
3148 }
3149
3150 *pin_p = in_p, *plow = low, *phigh = high;
3151 return exp;
3152 }
3153
3154 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3155 type, TYPE, return an expression to test if EXP is in (or out of, depending
3156 on IN_P) the range. */
3157
3158 static tree
build_range_check(type,exp,in_p,low,high)3159 build_range_check (type, exp, in_p, low, high)
3160 tree type;
3161 tree exp;
3162 int in_p;
3163 tree low, high;
3164 {
3165 tree etype = TREE_TYPE (exp);
3166 tree value;
3167
3168 if (! in_p
3169 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3170 return invert_truthvalue (value);
3171
3172 if (low == 0 && high == 0)
3173 return convert (type, integer_one_node);
3174
3175 if (low == 0)
3176 return fold (build (LE_EXPR, type, exp, high));
3177
3178 if (high == 0)
3179 return fold (build (GE_EXPR, type, exp, low));
3180
3181 if (operand_equal_p (low, high, 0))
3182 return fold (build (EQ_EXPR, type, exp, low));
3183
3184 if (integer_zerop (low))
3185 {
3186 if (! TREE_UNSIGNED (etype))
3187 {
3188 etype = (*lang_hooks.types.unsigned_type) (etype);
3189 high = convert (etype, high);
3190 exp = convert (etype, exp);
3191 }
3192 return build_range_check (type, exp, 1, 0, high);
3193 }
3194
3195 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3196 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3197 {
3198 unsigned HOST_WIDE_INT lo;
3199 HOST_WIDE_INT hi;
3200 int prec;
3201
3202 prec = TYPE_PRECISION (etype);
3203 if (prec <= HOST_BITS_PER_WIDE_INT)
3204 {
3205 hi = 0;
3206 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3207 }
3208 else
3209 {
3210 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3211 lo = (unsigned HOST_WIDE_INT) -1;
3212 }
3213
3214 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3215 {
3216 if (TREE_UNSIGNED (etype))
3217 {
3218 etype = (*lang_hooks.types.signed_type) (etype);
3219 exp = convert (etype, exp);
3220 }
3221 return fold (build (GT_EXPR, type, exp,
3222 convert (etype, integer_zero_node)));
3223 }
3224 }
3225
3226 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3227 && ! TREE_OVERFLOW (value))
3228 return build_range_check (type,
3229 fold (build (MINUS_EXPR, etype, exp, low)),
3230 1, convert (etype, integer_zero_node), value);
3231
3232 return 0;
3233 }
3234
3235 /* Given two ranges, see if we can merge them into one. Return 1 if we
3236 can, 0 if we can't. Set the output range into the specified parameters. */
3237
3238 static int
merge_ranges(pin_p,plow,phigh,in0_p,low0,high0,in1_p,low1,high1)3239 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3240 int *pin_p;
3241 tree *plow, *phigh;
3242 int in0_p, in1_p;
3243 tree low0, high0, low1, high1;
3244 {
3245 int no_overlap;
3246 int subset;
3247 int temp;
3248 tree tem;
3249 int in_p;
3250 tree low, high;
3251 int lowequal = ((low0 == 0 && low1 == 0)
3252 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3253 low0, 0, low1, 0)));
3254 int highequal = ((high0 == 0 && high1 == 0)
3255 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3256 high0, 1, high1, 1)));
3257
3258 /* Make range 0 be the range that starts first, or ends last if they
3259 start at the same value. Swap them if it isn't. */
3260 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3261 low0, 0, low1, 0))
3262 || (lowequal
3263 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3264 high1, 1, high0, 1))))
3265 {
3266 temp = in0_p, in0_p = in1_p, in1_p = temp;
3267 tem = low0, low0 = low1, low1 = tem;
3268 tem = high0, high0 = high1, high1 = tem;
3269 }
3270
3271 /* Now flag two cases, whether the ranges are disjoint or whether the
3272 second range is totally subsumed in the first. Note that the tests
3273 below are simplified by the ones above. */
3274 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3275 high0, 1, low1, 0));
3276 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3277 high1, 1, high0, 1));
3278
3279 /* We now have four cases, depending on whether we are including or
3280 excluding the two ranges. */
3281 if (in0_p && in1_p)
3282 {
3283 /* If they don't overlap, the result is false. If the second range
3284 is a subset it is the result. Otherwise, the range is from the start
3285 of the second to the end of the first. */
3286 if (no_overlap)
3287 in_p = 0, low = high = 0;
3288 else if (subset)
3289 in_p = 1, low = low1, high = high1;
3290 else
3291 in_p = 1, low = low1, high = high0;
3292 }
3293
3294 else if (in0_p && ! in1_p)
3295 {
3296 /* If they don't overlap, the result is the first range. If they are
3297 equal, the result is false. If the second range is a subset of the
3298 first, and the ranges begin at the same place, we go from just after
3299 the end of the first range to the end of the second. If the second
3300 range is not a subset of the first, or if it is a subset and both
3301 ranges end at the same place, the range starts at the start of the
3302 first range and ends just before the second range.
3303 Otherwise, we can't describe this as a single range. */
3304 if (no_overlap)
3305 in_p = 1, low = low0, high = high0;
3306 else if (lowequal && highequal)
3307 in_p = 0, low = high = 0;
3308 else if (subset && lowequal)
3309 {
3310 in_p = 1, high = high0;
3311 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3312 integer_one_node, 0);
3313 }
3314 else if (! subset || highequal)
3315 {
3316 in_p = 1, low = low0;
3317 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3318 integer_one_node, 0);
3319 }
3320 else
3321 return 0;
3322 }
3323
3324 else if (! in0_p && in1_p)
3325 {
3326 /* If they don't overlap, the result is the second range. If the second
3327 is a subset of the first, the result is false. Otherwise,
3328 the range starts just after the first range and ends at the
3329 end of the second. */
3330 if (no_overlap)
3331 in_p = 1, low = low1, high = high1;
3332 else if (subset || highequal)
3333 in_p = 0, low = high = 0;
3334 else
3335 {
3336 in_p = 1, high = high1;
3337 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3338 integer_one_node, 0);
3339 }
3340 }
3341
3342 else
3343 {
3344 /* The case where we are excluding both ranges. Here the complex case
3345 is if they don't overlap. In that case, the only time we have a
3346 range is if they are adjacent. If the second is a subset of the
3347 first, the result is the first. Otherwise, the range to exclude
3348 starts at the beginning of the first range and ends at the end of the
3349 second. */
3350 if (no_overlap)
3351 {
3352 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3353 range_binop (PLUS_EXPR, NULL_TREE,
3354 high0, 1,
3355 integer_one_node, 1),
3356 1, low1, 0)))
3357 in_p = 0, low = low0, high = high1;
3358 else
3359 return 0;
3360 }
3361 else if (subset)
3362 in_p = 0, low = low0, high = high0;
3363 else
3364 in_p = 0, low = low0, high = high1;
3365 }
3366
3367 *pin_p = in_p, *plow = low, *phigh = high;
3368 return 1;
3369 }
3370
3371 /* EXP is some logical combination of boolean tests. See if we can
3372 merge it into some range test. Return the new tree if so. */
3373
3374 static tree
fold_range_test(exp)3375 fold_range_test (exp)
3376 tree exp;
3377 {
3378 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3379 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3380 int in0_p, in1_p, in_p;
3381 tree low0, low1, low, high0, high1, high;
3382 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3383 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3384 tree tem;
3385
3386 /* If this is an OR operation, invert both sides; we will invert
3387 again at the end. */
3388 if (or_op)
3389 in0_p = ! in0_p, in1_p = ! in1_p;
3390
3391 /* If both expressions are the same, if we can merge the ranges, and we
3392 can build the range test, return it or it inverted. If one of the
3393 ranges is always true or always false, consider it to be the same
3394 expression as the other. */
3395 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3396 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3397 in1_p, low1, high1)
3398 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3399 lhs != 0 ? lhs
3400 : rhs != 0 ? rhs : integer_zero_node,
3401 in_p, low, high))))
3402 return or_op ? invert_truthvalue (tem) : tem;
3403
3404 /* On machines where the branch cost is expensive, if this is a
3405 short-circuited branch and the underlying object on both sides
3406 is the same, make a non-short-circuit operation. */
3407 else if (BRANCH_COST >= 2
3408 && lhs != 0 && rhs != 0
3409 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3410 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3411 && operand_equal_p (lhs, rhs, 0))
3412 {
3413 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3414 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3415 which cases we can't do this. */
3416 if (simple_operand_p (lhs))
3417 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3418 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3419 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3420 TREE_OPERAND (exp, 1));
3421
3422 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3423 && ! contains_placeholder_p (lhs))
3424 {
3425 tree common = save_expr (lhs);
3426
3427 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3428 or_op ? ! in0_p : in0_p,
3429 low0, high0))
3430 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3431 or_op ? ! in1_p : in1_p,
3432 low1, high1))))
3433 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3434 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3435 TREE_TYPE (exp), lhs, rhs);
3436 }
3437 }
3438
3439 return 0;
3440 }
3441
3442 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3443 bit value. Arrange things so the extra bits will be set to zero if and
3444 only if C is signed-extended to its full width. If MASK is nonzero,
3445 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3446
3447 static tree
unextend(c,p,unsignedp,mask)3448 unextend (c, p, unsignedp, mask)
3449 tree c;
3450 int p;
3451 int unsignedp;
3452 tree mask;
3453 {
3454 tree type = TREE_TYPE (c);
3455 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3456 tree temp;
3457
3458 if (p == modesize || unsignedp)
3459 return c;
3460
3461 /* We work by getting just the sign bit into the low-order bit, then
3462 into the high-order bit, then sign-extend. We then XOR that value
3463 with C. */
3464 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3465 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3466
3467 /* We must use a signed type in order to get an arithmetic right shift.
3468 However, we must also avoid introducing accidental overflows, so that
3469 a subsequent call to integer_zerop will work. Hence we must
3470 do the type conversion here. At this point, the constant is either
3471 zero or one, and the conversion to a signed type can never overflow.
3472 We could get an overflow if this conversion is done anywhere else. */
3473 if (TREE_UNSIGNED (type))
3474 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3475
3476 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3477 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3478 if (mask != 0)
3479 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3480 /* If necessary, convert the type back to match the type of C. */
3481 if (TREE_UNSIGNED (type))
3482 temp = convert (type, temp);
3483
3484 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3485 }
3486
3487 /* Find ways of folding logical expressions of LHS and RHS:
3488 Try to merge two comparisons to the same innermost item.
3489 Look for range tests like "ch >= '0' && ch <= '9'".
3490 Look for combinations of simple terms on machines with expensive branches
3491 and evaluate the RHS unconditionally.
3492
3493 For example, if we have p->a == 2 && p->b == 4 and we can make an
3494 object large enough to span both A and B, we can do this with a comparison
3495 against the object ANDed with the a mask.
3496
3497 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3498 operations to do this with one comparison.
3499
3500 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3501 function and the one above.
3502
3503 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3504 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3505
3506 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3507 two operands.
3508
3509 We return the simplified tree or 0 if no optimization is possible. */
3510
3511 static tree
fold_truthop(code,truth_type,lhs,rhs)3512 fold_truthop (code, truth_type, lhs, rhs)
3513 enum tree_code code;
3514 tree truth_type, lhs, rhs;
3515 {
3516 /* If this is the "or" of two comparisons, we can do something if
3517 the comparisons are NE_EXPR. If this is the "and", we can do something
3518 if the comparisons are EQ_EXPR. I.e.,
3519 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3520
3521 WANTED_CODE is this operation code. For single bit fields, we can
3522 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3523 comparison for one-bit fields. */
3524
3525 enum tree_code wanted_code;
3526 enum tree_code lcode, rcode;
3527 tree ll_arg, lr_arg, rl_arg, rr_arg;
3528 tree ll_inner, lr_inner, rl_inner, rr_inner;
3529 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3530 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3531 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3532 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3533 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3534 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3535 enum machine_mode lnmode, rnmode;
3536 tree ll_mask, lr_mask, rl_mask, rr_mask;
3537 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3538 tree l_const, r_const;
3539 tree lntype, rntype, result;
3540 int first_bit, end_bit;
3541 int volatilep;
3542
3543 /* Start by getting the comparison codes. Fail if anything is volatile.
3544 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3545 it were surrounded with a NE_EXPR. */
3546
3547 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3548 return 0;
3549
3550 lcode = TREE_CODE (lhs);
3551 rcode = TREE_CODE (rhs);
3552
3553 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3554 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3555
3556 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3557 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3558
3559 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3560 return 0;
3561
3562 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3563 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3564
3565 ll_arg = TREE_OPERAND (lhs, 0);
3566 lr_arg = TREE_OPERAND (lhs, 1);
3567 rl_arg = TREE_OPERAND (rhs, 0);
3568 rr_arg = TREE_OPERAND (rhs, 1);
3569
3570 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3571 if (simple_operand_p (ll_arg)
3572 && simple_operand_p (lr_arg)
3573 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3574 {
3575 int compcode;
3576
3577 if (operand_equal_p (ll_arg, rl_arg, 0)
3578 && operand_equal_p (lr_arg, rr_arg, 0))
3579 {
3580 int lcompcode, rcompcode;
3581
3582 lcompcode = comparison_to_compcode (lcode);
3583 rcompcode = comparison_to_compcode (rcode);
3584 compcode = (code == TRUTH_AND_EXPR)
3585 ? lcompcode & rcompcode
3586 : lcompcode | rcompcode;
3587 }
3588 else if (operand_equal_p (ll_arg, rr_arg, 0)
3589 && operand_equal_p (lr_arg, rl_arg, 0))
3590 {
3591 int lcompcode, rcompcode;
3592
3593 rcode = swap_tree_comparison (rcode);
3594 lcompcode = comparison_to_compcode (lcode);
3595 rcompcode = comparison_to_compcode (rcode);
3596 compcode = (code == TRUTH_AND_EXPR)
3597 ? lcompcode & rcompcode
3598 : lcompcode | rcompcode;
3599 }
3600 else
3601 compcode = -1;
3602
3603 if (compcode == COMPCODE_TRUE)
3604 return convert (truth_type, integer_one_node);
3605 else if (compcode == COMPCODE_FALSE)
3606 return convert (truth_type, integer_zero_node);
3607 else if (compcode != -1)
3608 return build (compcode_to_comparison (compcode),
3609 truth_type, ll_arg, lr_arg);
3610 }
3611
3612 /* If the RHS can be evaluated unconditionally and its operands are
3613 simple, it wins to evaluate the RHS unconditionally on machines
3614 with expensive branches. In this case, this isn't a comparison
3615 that can be merged. Avoid doing this if the RHS is a floating-point
3616 comparison since those can trap. */
3617
3618 if (BRANCH_COST >= 2
3619 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3620 && simple_operand_p (rl_arg)
3621 && simple_operand_p (rr_arg))
3622 {
3623 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3624 if (code == TRUTH_OR_EXPR
3625 && lcode == NE_EXPR && integer_zerop (lr_arg)
3626 && rcode == NE_EXPR && integer_zerop (rr_arg)
3627 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3628 return build (NE_EXPR, truth_type,
3629 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3630 ll_arg, rl_arg),
3631 integer_zero_node);
3632
3633 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3634 if (code == TRUTH_AND_EXPR
3635 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3636 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3637 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3638 return build (EQ_EXPR, truth_type,
3639 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3640 ll_arg, rl_arg),
3641 integer_zero_node);
3642
3643 return build (code, truth_type, lhs, rhs);
3644 }
3645
3646 /* See if the comparisons can be merged. Then get all the parameters for
3647 each side. */
3648
3649 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3650 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3651 return 0;
3652
3653 volatilep = 0;
3654 ll_inner = decode_field_reference (ll_arg,
3655 &ll_bitsize, &ll_bitpos, &ll_mode,
3656 &ll_unsignedp, &volatilep, &ll_mask,
3657 &ll_and_mask);
3658 lr_inner = decode_field_reference (lr_arg,
3659 &lr_bitsize, &lr_bitpos, &lr_mode,
3660 &lr_unsignedp, &volatilep, &lr_mask,
3661 &lr_and_mask);
3662 rl_inner = decode_field_reference (rl_arg,
3663 &rl_bitsize, &rl_bitpos, &rl_mode,
3664 &rl_unsignedp, &volatilep, &rl_mask,
3665 &rl_and_mask);
3666 rr_inner = decode_field_reference (rr_arg,
3667 &rr_bitsize, &rr_bitpos, &rr_mode,
3668 &rr_unsignedp, &volatilep, &rr_mask,
3669 &rr_and_mask);
3670
3671 /* It must be true that the inner operation on the lhs of each
3672 comparison must be the same if we are to be able to do anything.
3673 Then see if we have constants. If not, the same must be true for
3674 the rhs's. */
3675 if (volatilep || ll_inner == 0 || rl_inner == 0
3676 || ! operand_equal_p (ll_inner, rl_inner, 0))
3677 return 0;
3678
3679 if (TREE_CODE (lr_arg) == INTEGER_CST
3680 && TREE_CODE (rr_arg) == INTEGER_CST)
3681 l_const = lr_arg, r_const = rr_arg;
3682 else if (lr_inner == 0 || rr_inner == 0
3683 || ! operand_equal_p (lr_inner, rr_inner, 0))
3684 return 0;
3685 else
3686 l_const = r_const = 0;
3687
3688 /* If either comparison code is not correct for our logical operation,
3689 fail. However, we can convert a one-bit comparison against zero into
3690 the opposite comparison against that bit being set in the field. */
3691
3692 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3693 if (lcode != wanted_code)
3694 {
3695 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3696 {
3697 /* Make the left operand unsigned, since we are only interested
3698 in the value of one bit. Otherwise we are doing the wrong
3699 thing below. */
3700 ll_unsignedp = 1;
3701 l_const = ll_mask;
3702 }
3703 else
3704 return 0;
3705 }
3706
3707 /* This is analogous to the code for l_const above. */
3708 if (rcode != wanted_code)
3709 {
3710 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3711 {
3712 rl_unsignedp = 1;
3713 r_const = rl_mask;
3714 }
3715 else
3716 return 0;
3717 }
3718
3719 /* After this point all optimizations will generate bit-field
3720 references, which we might not want. */
3721 if (! (*lang_hooks.can_use_bit_fields_p) ())
3722 return 0;
3723
3724 /* See if we can find a mode that contains both fields being compared on
3725 the left. If we can't, fail. Otherwise, update all constants and masks
3726 to be relative to a field of that size. */
3727 first_bit = MIN (ll_bitpos, rl_bitpos);
3728 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3729 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3730 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3731 volatilep);
3732 if (lnmode == VOIDmode)
3733 return 0;
3734
3735 lnbitsize = GET_MODE_BITSIZE (lnmode);
3736 lnbitpos = first_bit & ~ (lnbitsize - 1);
3737 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3738 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3739
3740 if (BYTES_BIG_ENDIAN)
3741 {
3742 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3743 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3744 }
3745
3746 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3747 size_int (xll_bitpos), 0);
3748 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3749 size_int (xrl_bitpos), 0);
3750
3751 if (l_const)
3752 {
3753 l_const = convert (lntype, l_const);
3754 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3755 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3756 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3757 fold (build1 (BIT_NOT_EXPR,
3758 lntype, ll_mask)),
3759 0)))
3760 {
3761 warning ("comparison is always %d", wanted_code == NE_EXPR);
3762
3763 return convert (truth_type,
3764 wanted_code == NE_EXPR
3765 ? integer_one_node : integer_zero_node);
3766 }
3767 }
3768 if (r_const)
3769 {
3770 r_const = convert (lntype, r_const);
3771 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3772 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3773 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3774 fold (build1 (BIT_NOT_EXPR,
3775 lntype, rl_mask)),
3776 0)))
3777 {
3778 warning ("comparison is always %d", wanted_code == NE_EXPR);
3779
3780 return convert (truth_type,
3781 wanted_code == NE_EXPR
3782 ? integer_one_node : integer_zero_node);
3783 }
3784 }
3785
3786 /* If the right sides are not constant, do the same for it. Also,
3787 disallow this optimization if a size or signedness mismatch occurs
3788 between the left and right sides. */
3789 if (l_const == 0)
3790 {
3791 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3792 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3793 /* Make sure the two fields on the right
3794 correspond to the left without being swapped. */
3795 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3796 return 0;
3797
3798 first_bit = MIN (lr_bitpos, rr_bitpos);
3799 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3800 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3801 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3802 volatilep);
3803 if (rnmode == VOIDmode)
3804 return 0;
3805
3806 rnbitsize = GET_MODE_BITSIZE (rnmode);
3807 rnbitpos = first_bit & ~ (rnbitsize - 1);
3808 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3809 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3810
3811 if (BYTES_BIG_ENDIAN)
3812 {
3813 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3814 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3815 }
3816
3817 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3818 size_int (xlr_bitpos), 0);
3819 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3820 size_int (xrr_bitpos), 0);
3821
3822 /* Make a mask that corresponds to both fields being compared.
3823 Do this for both items being compared. If the operands are the
3824 same size and the bits being compared are in the same position
3825 then we can do this by masking both and comparing the masked
3826 results. */
3827 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3828 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3829 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3830 {
3831 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3832 ll_unsignedp || rl_unsignedp);
3833 if (! all_ones_mask_p (ll_mask, lnbitsize))
3834 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3835
3836 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3837 lr_unsignedp || rr_unsignedp);
3838 if (! all_ones_mask_p (lr_mask, rnbitsize))
3839 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3840
3841 return build (wanted_code, truth_type, lhs, rhs);
3842 }
3843
3844 /* There is still another way we can do something: If both pairs of
3845 fields being compared are adjacent, we may be able to make a wider
3846 field containing them both.
3847
3848 Note that we still must mask the lhs/rhs expressions. Furthermore,
3849 the mask must be shifted to account for the shift done by
3850 make_bit_field_ref. */
3851 if ((ll_bitsize + ll_bitpos == rl_bitpos
3852 && lr_bitsize + lr_bitpos == rr_bitpos)
3853 || (ll_bitpos == rl_bitpos + rl_bitsize
3854 && lr_bitpos == rr_bitpos + rr_bitsize))
3855 {
3856 tree type;
3857
3858 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3859 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3860 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3861 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3862
3863 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3864 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3865 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3866 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3867
3868 /* Convert to the smaller type before masking out unwanted bits. */
3869 type = lntype;
3870 if (lntype != rntype)
3871 {
3872 if (lnbitsize > rnbitsize)
3873 {
3874 lhs = convert (rntype, lhs);
3875 ll_mask = convert (rntype, ll_mask);
3876 type = rntype;
3877 }
3878 else if (lnbitsize < rnbitsize)
3879 {
3880 rhs = convert (lntype, rhs);
3881 lr_mask = convert (lntype, lr_mask);
3882 type = lntype;
3883 }
3884 }
3885
3886 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3887 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3888
3889 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3890 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3891
3892 return build (wanted_code, truth_type, lhs, rhs);
3893 }
3894
3895 return 0;
3896 }
3897
3898 /* Handle the case of comparisons with constants. If there is something in
3899 common between the masks, those bits of the constants must be the same.
3900 If not, the condition is always false. Test for this to avoid generating
3901 incorrect code below. */
3902 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3903 if (! integer_zerop (result)
3904 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3905 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3906 {
3907 if (wanted_code == NE_EXPR)
3908 {
3909 warning ("`or' of unmatched not-equal tests is always 1");
3910 return convert (truth_type, integer_one_node);
3911 }
3912 else
3913 {
3914 warning ("`and' of mutually exclusive equal-tests is always 0");
3915 return convert (truth_type, integer_zero_node);
3916 }
3917 }
3918
3919 /* Construct the expression we will return. First get the component
3920 reference we will make. Unless the mask is all ones the width of
3921 that field, perform the mask operation. Then compare with the
3922 merged constant. */
3923 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3924 ll_unsignedp || rl_unsignedp);
3925
3926 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3927 if (! all_ones_mask_p (ll_mask, lnbitsize))
3928 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3929
3930 return build (wanted_code, truth_type, result,
3931 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3932 }
3933
3934 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3935 constant. */
3936
3937 static tree
optimize_minmax_comparison(t)3938 optimize_minmax_comparison (t)
3939 tree t;
3940 {
3941 tree type = TREE_TYPE (t);
3942 tree arg0 = TREE_OPERAND (t, 0);
3943 enum tree_code op_code;
3944 tree comp_const = TREE_OPERAND (t, 1);
3945 tree minmax_const;
3946 int consts_equal, consts_lt;
3947 tree inner;
3948
3949 STRIP_SIGN_NOPS (arg0);
3950
3951 op_code = TREE_CODE (arg0);
3952 minmax_const = TREE_OPERAND (arg0, 1);
3953 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3954 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3955 inner = TREE_OPERAND (arg0, 0);
3956
3957 /* If something does not permit us to optimize, return the original tree. */
3958 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3959 || TREE_CODE (comp_const) != INTEGER_CST
3960 || TREE_CONSTANT_OVERFLOW (comp_const)
3961 || TREE_CODE (minmax_const) != INTEGER_CST
3962 || TREE_CONSTANT_OVERFLOW (minmax_const))
3963 return t;
3964
3965 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3966 and GT_EXPR, doing the rest with recursive calls using logical
3967 simplifications. */
3968 switch (TREE_CODE (t))
3969 {
3970 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3971 return
3972 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3973
3974 case GE_EXPR:
3975 return
3976 fold (build (TRUTH_ORIF_EXPR, type,
3977 optimize_minmax_comparison
3978 (build (EQ_EXPR, type, arg0, comp_const)),
3979 optimize_minmax_comparison
3980 (build (GT_EXPR, type, arg0, comp_const))));
3981
3982 case EQ_EXPR:
3983 if (op_code == MAX_EXPR && consts_equal)
3984 /* MAX (X, 0) == 0 -> X <= 0 */
3985 return fold (build (LE_EXPR, type, inner, comp_const));
3986
3987 else if (op_code == MAX_EXPR && consts_lt)
3988 /* MAX (X, 0) == 5 -> X == 5 */
3989 return fold (build (EQ_EXPR, type, inner, comp_const));
3990
3991 else if (op_code == MAX_EXPR)
3992 /* MAX (X, 0) == -1 -> false */
3993 return omit_one_operand (type, integer_zero_node, inner);
3994
3995 else if (consts_equal)
3996 /* MIN (X, 0) == 0 -> X >= 0 */
3997 return fold (build (GE_EXPR, type, inner, comp_const));
3998
3999 else if (consts_lt)
4000 /* MIN (X, 0) == 5 -> false */
4001 return omit_one_operand (type, integer_zero_node, inner);
4002
4003 else
4004 /* MIN (X, 0) == -1 -> X == -1 */
4005 return fold (build (EQ_EXPR, type, inner, comp_const));
4006
4007 case GT_EXPR:
4008 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4009 /* MAX (X, 0) > 0 -> X > 0
4010 MAX (X, 0) > 5 -> X > 5 */
4011 return fold (build (GT_EXPR, type, inner, comp_const));
4012
4013 else if (op_code == MAX_EXPR)
4014 /* MAX (X, 0) > -1 -> true */
4015 return omit_one_operand (type, integer_one_node, inner);
4016
4017 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4018 /* MIN (X, 0) > 0 -> false
4019 MIN (X, 0) > 5 -> false */
4020 return omit_one_operand (type, integer_zero_node, inner);
4021
4022 else
4023 /* MIN (X, 0) > -1 -> X > -1 */
4024 return fold (build (GT_EXPR, type, inner, comp_const));
4025
4026 default:
4027 return t;
4028 }
4029 }
4030
4031 /* T is an integer expression that is being multiplied, divided, or taken a
4032 modulus (CODE says which and what kind of divide or modulus) by a
4033 constant C. See if we can eliminate that operation by folding it with
4034 other operations already in T. WIDE_TYPE, if non-null, is a type that
4035 should be used for the computation if wider than our type.
4036
4037 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4038 (X * 2) + (Y * 4). We must, however, be assured that either the original
4039 expression would not overflow or that overflow is undefined for the type
4040 in the language in question.
4041
4042 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4043 the machine has a multiply-accumulate insn or that this is part of an
4044 addressing calculation.
4045
4046 If we return a non-null expression, it is an equivalent form of the
4047 original computation, but need not be in the original type. */
4048
4049 static tree
extract_muldiv(t,c,code,wide_type)4050 extract_muldiv (t, c, code, wide_type)
4051 tree t;
4052 tree c;
4053 enum tree_code code;
4054 tree wide_type;
4055 {
4056 /* To avoid exponential search depth, refuse to allow recursion past
4057 three levels. Beyond that (1) it's highly unlikely that we'll find
4058 something interesting and (2) we've probably processed it before
4059 when we built the inner expression. */
4060
4061 static int depth;
4062 tree ret;
4063
4064 if (depth > 3)
4065 return NULL;
4066
4067 depth++;
4068 ret = extract_muldiv_1 (t, c, code, wide_type);
4069 depth--;
4070
4071 return ret;
4072 }
4073
4074 static tree
extract_muldiv_1(t,c,code,wide_type)4075 extract_muldiv_1 (t, c, code, wide_type)
4076 tree t;
4077 tree c;
4078 enum tree_code code;
4079 tree wide_type;
4080 {
4081 tree type = TREE_TYPE (t);
4082 enum tree_code tcode = TREE_CODE (t);
4083 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4084 > GET_MODE_SIZE (TYPE_MODE (type)))
4085 ? wide_type : type);
4086 tree t1, t2;
4087 int same_p = tcode == code;
4088 tree op0 = NULL_TREE, op1 = NULL_TREE;
4089
4090 /* Don't deal with constants of zero here; they confuse the code below. */
4091 if (integer_zerop (c))
4092 return NULL_TREE;
4093
4094 if (TREE_CODE_CLASS (tcode) == '1')
4095 op0 = TREE_OPERAND (t, 0);
4096
4097 if (TREE_CODE_CLASS (tcode) == '2')
4098 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4099
4100 /* Note that we need not handle conditional operations here since fold
4101 already handles those cases. So just do arithmetic here. */
4102 switch (tcode)
4103 {
4104 case INTEGER_CST:
4105 /* For a constant, we can always simplify if we are a multiply
4106 or (for divide and modulus) if it is a multiple of our constant. */
4107 if (code == MULT_EXPR
4108 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4109 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4110 break;
4111
4112 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4113 /* If op0 is an expression ... */
4114 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4115 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4116 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4117 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4118 /* ... and is unsigned, and its type is smaller than ctype,
4119 then we cannot pass through as widening. */
4120 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4121 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4122 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4123 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4124 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4125 /* ... or this is a truncation (t is narrower than op0),
4126 then we cannot pass through this narrowing. */
4127 || (GET_MODE_SIZE (TYPE_MODE (type))
4128 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4129 /* ... or signedness changes for division or modulus,
4130 then we cannot pass through this conversion. */
4131 || (code != MULT_EXPR
4132 && (TREE_UNSIGNED (ctype)
4133 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4134 break;
4135
4136 /* Pass the constant down and see if we can make a simplification. If
4137 we can, replace this expression with the inner simplification for
4138 possible later conversion to our or some other type. */
4139 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4140 code == MULT_EXPR ? ctype : NULL_TREE)))
4141 return t1;
4142 break;
4143
4144 case NEGATE_EXPR: case ABS_EXPR:
4145 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4146 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4147 break;
4148
4149 case MIN_EXPR: case MAX_EXPR:
4150 /* If widening the type changes the signedness, then we can't perform
4151 this optimization as that changes the result. */
4152 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4153 break;
4154
4155 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4156 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4157 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4158 {
4159 if (tree_int_cst_sgn (c) < 0)
4160 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4161
4162 return fold (build (tcode, ctype, convert (ctype, t1),
4163 convert (ctype, t2)));
4164 }
4165 break;
4166
4167 case WITH_RECORD_EXPR:
4168 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4169 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4170 TREE_OPERAND (t, 1));
4171 break;
4172
4173 case LSHIFT_EXPR: case RSHIFT_EXPR:
4174 /* If the second operand is constant, this is a multiplication
4175 or floor division, by a power of two, so we can treat it that
4176 way unless the multiplier or divisor overflows. */
4177 if (TREE_CODE (op1) == INTEGER_CST
4178 /* const_binop may not detect overflow correctly,
4179 so check for it explicitly here. */
4180 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4181 && TREE_INT_CST_HIGH (op1) == 0
4182 && 0 != (t1 = convert (ctype,
4183 const_binop (LSHIFT_EXPR, size_one_node,
4184 op1, 0)))
4185 && ! TREE_OVERFLOW (t1))
4186 return extract_muldiv (build (tcode == LSHIFT_EXPR
4187 ? MULT_EXPR : FLOOR_DIV_EXPR,
4188 ctype, convert (ctype, op0), t1),
4189 c, code, wide_type);
4190 break;
4191
4192 case PLUS_EXPR: case MINUS_EXPR:
4193 /* See if we can eliminate the operation on both sides. If we can, we
4194 can return a new PLUS or MINUS. If we can't, the only remaining
4195 cases where we can do anything are if the second operand is a
4196 constant. */
4197 t1 = extract_muldiv (op0, c, code, wide_type);
4198 t2 = extract_muldiv (op1, c, code, wide_type);
4199 if (t1 != 0 && t2 != 0
4200 && (code == MULT_EXPR
4201 /* If not multiplication, we can only do this if both operands
4202 are divisible by c. */
4203 || (multiple_of_p (ctype, op0, c)
4204 && multiple_of_p (ctype, op1, c))))
4205 return fold (build (tcode, ctype, convert (ctype, t1),
4206 convert (ctype, t2)));
4207
4208 /* If this was a subtraction, negate OP1 and set it to be an addition.
4209 This simplifies the logic below. */
4210 if (tcode == MINUS_EXPR)
4211 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4212
4213 if (TREE_CODE (op1) != INTEGER_CST)
4214 break;
4215
4216 /* If either OP1 or C are negative, this optimization is not safe for
4217 some of the division and remainder types while for others we need
4218 to change the code. */
4219 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4220 {
4221 if (code == CEIL_DIV_EXPR)
4222 code = FLOOR_DIV_EXPR;
4223 else if (code == FLOOR_DIV_EXPR)
4224 code = CEIL_DIV_EXPR;
4225 else if (code != MULT_EXPR
4226 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4227 break;
4228 }
4229
4230 /* If it's a multiply or a division/modulus operation of a multiple
4231 of our constant, do the operation and verify it doesn't overflow. */
4232 if (code == MULT_EXPR
4233 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4234 {
4235 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4236 if (op1 == 0 || TREE_OVERFLOW (op1))
4237 break;
4238 }
4239 else
4240 break;
4241
4242 /* If we have an unsigned type is not a sizetype, we cannot widen
4243 the operation since it will change the result if the original
4244 computation overflowed. */
4245 if (TREE_UNSIGNED (ctype)
4246 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4247 && ctype != type)
4248 break;
4249
4250 /* If we were able to eliminate our operation from the first side,
4251 apply our operation to the second side and reform the PLUS. */
4252 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4253 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4254
4255 /* The last case is if we are a multiply. In that case, we can
4256 apply the distributive law to commute the multiply and addition
4257 if the multiplication of the constants doesn't overflow. */
4258 if (code == MULT_EXPR)
4259 return fold (build (tcode, ctype, fold (build (code, ctype,
4260 convert (ctype, op0),
4261 convert (ctype, c))),
4262 op1));
4263
4264 break;
4265
4266 case MULT_EXPR:
4267 /* We have a special case here if we are doing something like
4268 (C * 8) % 4 since we know that's zero. */
4269 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4270 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4271 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4272 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4273 return omit_one_operand (type, integer_zero_node, op0);
4274
4275 /* ... fall through ... */
4276
4277 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4278 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4279 /* If we can extract our operation from the LHS, do so and return a
4280 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4281 do something only if the second operand is a constant. */
4282 if (same_p
4283 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4284 return fold (build (tcode, ctype, convert (ctype, t1),
4285 convert (ctype, op1)));
4286 else if (tcode == MULT_EXPR && code == MULT_EXPR
4287 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4288 return fold (build (tcode, ctype, convert (ctype, op0),
4289 convert (ctype, t1)));
4290 else if (TREE_CODE (op1) != INTEGER_CST)
4291 return 0;
4292
4293 /* If these are the same operation types, we can associate them
4294 assuming no overflow. */
4295 if (tcode == code
4296 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4297 convert (ctype, c), 0))
4298 && ! TREE_OVERFLOW (t1))
4299 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4300
4301 /* If these operations "cancel" each other, we have the main
4302 optimizations of this pass, which occur when either constant is a
4303 multiple of the other, in which case we replace this with either an
4304 operation or CODE or TCODE.
4305
4306 If we have an unsigned type that is not a sizetype, we cannot do
4307 this since it will change the result if the original computation
4308 overflowed. */
4309 if ((! TREE_UNSIGNED (ctype)
4310 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4311 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4312 || (tcode == MULT_EXPR
4313 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4314 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4315 {
4316 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4317 return fold (build (tcode, ctype, convert (ctype, op0),
4318 convert (ctype,
4319 const_binop (TRUNC_DIV_EXPR,
4320 op1, c, 0))));
4321 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4322 return fold (build (code, ctype, convert (ctype, op0),
4323 convert (ctype,
4324 const_binop (TRUNC_DIV_EXPR,
4325 c, op1, 0))));
4326 }
4327 break;
4328
4329 default:
4330 break;
4331 }
4332
4333 return 0;
4334 }
4335
4336 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4337 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4338 that we may sometimes modify the tree. */
4339
4340 static tree
strip_compound_expr(t,s)4341 strip_compound_expr (t, s)
4342 tree t;
4343 tree s;
4344 {
4345 enum tree_code code = TREE_CODE (t);
4346
4347 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4348 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4349 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4350 return TREE_OPERAND (t, 1);
4351
4352 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4353 don't bother handling any other types. */
4354 else if (code == COND_EXPR)
4355 {
4356 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4357 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4358 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4359 }
4360 else if (TREE_CODE_CLASS (code) == '1')
4361 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4362 else if (TREE_CODE_CLASS (code) == '<'
4363 || TREE_CODE_CLASS (code) == '2')
4364 {
4365 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4366 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4367 }
4368
4369 return t;
4370 }
4371
4372 /* Return a node which has the indicated constant VALUE (either 0 or
4373 1), and is of the indicated TYPE. */
4374
4375 static tree
constant_boolean_node(value,type)4376 constant_boolean_node (value, type)
4377 int value;
4378 tree type;
4379 {
4380 if (type == integer_type_node)
4381 return value ? integer_one_node : integer_zero_node;
4382 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4383 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4384 integer_zero_node);
4385 else
4386 {
4387 tree t = build_int_2 (value, 0);
4388
4389 TREE_TYPE (t) = type;
4390 return t;
4391 }
4392 }
4393
4394 /* Utility function for the following routine, to see how complex a nesting of
4395 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4396 we don't care (to avoid spending too much time on complex expressions.). */
4397
4398 static int
count_cond(expr,lim)4399 count_cond (expr, lim)
4400 tree expr;
4401 int lim;
4402 {
4403 int ctrue, cfalse;
4404
4405 if (TREE_CODE (expr) != COND_EXPR)
4406 return 0;
4407 else if (lim <= 0)
4408 return 0;
4409
4410 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4411 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4412 return MIN (lim, 1 + ctrue + cfalse);
4413 }
4414
4415 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4416 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4417 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4418 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4419 COND is the first argument to CODE; otherwise (as in the example
4420 given here), it is the second argument. TYPE is the type of the
4421 original expression. */
4422
4423 static tree
fold_binary_op_with_conditional_arg(code,type,cond,arg,cond_first_p)4424 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4425 enum tree_code code;
4426 tree type;
4427 tree cond;
4428 tree arg;
4429 int cond_first_p;
4430 {
4431 tree test, true_value, false_value;
4432 tree lhs = NULL_TREE;
4433 tree rhs = NULL_TREE;
4434 /* In the end, we'll produce a COND_EXPR. Both arms of the
4435 conditional expression will be binary operations. The left-hand
4436 side of the expression to be executed if the condition is true
4437 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4438 of the expression to be executed if the condition is true will be
4439 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4440 but apply to the expression to be executed if the conditional is
4441 false. */
4442 tree *true_lhs;
4443 tree *true_rhs;
4444 tree *false_lhs;
4445 tree *false_rhs;
4446 /* These are the codes to use for the left-hand side and right-hand
4447 side of the COND_EXPR. Normally, they are the same as CODE. */
4448 enum tree_code lhs_code = code;
4449 enum tree_code rhs_code = code;
4450 /* And these are the types of the expressions. */
4451 tree lhs_type = type;
4452 tree rhs_type = type;
4453 int save = 0;
4454
4455 if (cond_first_p)
4456 {
4457 true_rhs = false_rhs = &arg;
4458 true_lhs = &true_value;
4459 false_lhs = &false_value;
4460 }
4461 else
4462 {
4463 true_lhs = false_lhs = &arg;
4464 true_rhs = &true_value;
4465 false_rhs = &false_value;
4466 }
4467
4468 if (TREE_CODE (cond) == COND_EXPR)
4469 {
4470 test = TREE_OPERAND (cond, 0);
4471 true_value = TREE_OPERAND (cond, 1);
4472 false_value = TREE_OPERAND (cond, 2);
4473 /* If this operand throws an expression, then it does not make
4474 sense to try to perform a logical or arithmetic operation
4475 involving it. Instead of building `a + throw 3' for example,
4476 we simply build `a, throw 3'. */
4477 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4478 {
4479 if (! cond_first_p)
4480 {
4481 lhs_code = COMPOUND_EXPR;
4482 lhs_type = void_type_node;
4483 }
4484 else
4485 lhs = true_value;
4486 }
4487 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4488 {
4489 if (! cond_first_p)
4490 {
4491 rhs_code = COMPOUND_EXPR;
4492 rhs_type = void_type_node;
4493 }
4494 else
4495 rhs = false_value;
4496 }
4497 }
4498 else
4499 {
4500 tree testtype = TREE_TYPE (cond);
4501 test = cond;
4502 true_value = convert (testtype, integer_one_node);
4503 false_value = convert (testtype, integer_zero_node);
4504 }
4505
4506 /* If ARG is complex we want to make sure we only evaluate
4507 it once. Though this is only required if it is volatile, it
4508 might be more efficient even if it is not. However, if we
4509 succeed in folding one part to a constant, we do not need
4510 to make this SAVE_EXPR. Since we do this optimization
4511 primarily to see if we do end up with constant and this
4512 SAVE_EXPR interferes with later optimizations, suppressing
4513 it when we can is important.
4514
4515 If we are not in a function, we can't make a SAVE_EXPR, so don't
4516 try to do so. Don't try to see if the result is a constant
4517 if an arm is a COND_EXPR since we get exponential behavior
4518 in that case. */
4519
4520 if (TREE_CODE (arg) == SAVE_EXPR)
4521 save = 1;
4522 else if (lhs == 0 && rhs == 0
4523 && !TREE_CONSTANT (arg)
4524 && (*lang_hooks.decls.global_bindings_p) () == 0
4525 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4526 || TREE_SIDE_EFFECTS (arg)))
4527 {
4528 if (TREE_CODE (true_value) != COND_EXPR)
4529 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4530
4531 if (TREE_CODE (false_value) != COND_EXPR)
4532 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4533
4534 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4535 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4536 {
4537 arg = save_expr (arg);
4538 lhs = rhs = 0;
4539 save = 1;
4540 }
4541 }
4542
4543 if (lhs == 0)
4544 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4545 if (rhs == 0)
4546 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4547
4548 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4549
4550 if (save)
4551 return build (COMPOUND_EXPR, type,
4552 convert (void_type_node, arg),
4553 strip_compound_expr (test, arg));
4554 else
4555 return convert (type, test);
4556 }
4557
4558
4559 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4560
4561 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4562 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4563 ADDEND is the same as X.
4564
4565 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4566 and finite. The problematic cases are when X is zero, and its mode
4567 has signed zeros. In the case of rounding towards -infinity,
4568 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4569 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4570
4571 static bool
fold_real_zero_addition_p(type,addend,negate)4572 fold_real_zero_addition_p (type, addend, negate)
4573 tree type, addend;
4574 int negate;
4575 {
4576 if (!real_zerop (addend))
4577 return false;
4578
4579 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4580 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4581 return true;
4582
4583 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4584 if (TREE_CODE (addend) == REAL_CST
4585 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4586 negate = !negate;
4587
4588 /* The mode has signed zeros, and we have to honor their sign.
4589 In this situation, there is only one case we can return true for.
4590 X - 0 is the same as X unless rounding towards -infinity is
4591 supported. */
4592 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4593 }
4594
4595
4596 /* Perform constant folding and related simplification of EXPR.
4597 The related simplifications include x*1 => x, x*0 => 0, etc.,
4598 and application of the associative law.
4599 NOP_EXPR conversions may be removed freely (as long as we
4600 are careful not to change the C type of the overall expression)
4601 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4602 but we can constant-fold them if they have constant operands. */
4603
4604 tree
fold(expr)4605 fold (expr)
4606 tree expr;
4607 {
4608 tree t = expr;
4609 tree t1 = NULL_TREE;
4610 tree tem;
4611 tree type = TREE_TYPE (expr);
4612 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4613 enum tree_code code = TREE_CODE (t);
4614 int kind = TREE_CODE_CLASS (code);
4615 int invert;
4616 /* WINS will be nonzero when the switch is done
4617 if all operands are constant. */
4618 int wins = 1;
4619
4620 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4621 Likewise for a SAVE_EXPR that's already been evaluated. */
4622 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4623 return t;
4624
4625 /* Return right away if a constant. */
4626 if (kind == 'c')
4627 return t;
4628
4629 #ifdef MAX_INTEGER_COMPUTATION_MODE
4630 check_max_integer_computation_mode (expr);
4631 #endif
4632
4633 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4634 {
4635 tree subop;
4636
4637 /* Special case for conversion ops that can have fixed point args. */
4638 arg0 = TREE_OPERAND (t, 0);
4639
4640 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4641 if (arg0 != 0)
4642 STRIP_SIGN_NOPS (arg0);
4643
4644 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4645 subop = TREE_REALPART (arg0);
4646 else
4647 subop = arg0;
4648
4649 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4650 && TREE_CODE (subop) != REAL_CST
4651 )
4652 /* Note that TREE_CONSTANT isn't enough:
4653 static var addresses are constant but we can't
4654 do arithmetic on them. */
4655 wins = 0;
4656 }
4657 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4658 {
4659 int len = first_rtl_op (code);
4660 int i;
4661 for (i = 0; i < len; i++)
4662 {
4663 tree op = TREE_OPERAND (t, i);
4664 tree subop;
4665
4666 if (op == 0)
4667 continue; /* Valid for CALL_EXPR, at least. */
4668
4669 if (kind == '<' || code == RSHIFT_EXPR)
4670 {
4671 /* Signedness matters here. Perhaps we can refine this
4672 later. */
4673 STRIP_SIGN_NOPS (op);
4674 }
4675 else
4676 /* Strip any conversions that don't change the mode. */
4677 STRIP_NOPS (op);
4678
4679 if (TREE_CODE (op) == COMPLEX_CST)
4680 subop = TREE_REALPART (op);
4681 else
4682 subop = op;
4683
4684 if (TREE_CODE (subop) != INTEGER_CST
4685 && TREE_CODE (subop) != REAL_CST)
4686 /* Note that TREE_CONSTANT isn't enough:
4687 static var addresses are constant but we can't
4688 do arithmetic on them. */
4689 wins = 0;
4690
4691 if (i == 0)
4692 arg0 = op;
4693 else if (i == 1)
4694 arg1 = op;
4695 }
4696 }
4697
4698 /* If this is a commutative operation, and ARG0 is a constant, move it
4699 to ARG1 to reduce the number of tests below. */
4700 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4701 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4702 || code == BIT_AND_EXPR)
4703 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4704 {
4705 tem = arg0; arg0 = arg1; arg1 = tem;
4706
4707 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4708 TREE_OPERAND (t, 1) = tem;
4709 }
4710
4711 /* Now WINS is set as described above,
4712 ARG0 is the first operand of EXPR,
4713 and ARG1 is the second operand (if it has more than one operand).
4714
4715 First check for cases where an arithmetic operation is applied to a
4716 compound, conditional, or comparison operation. Push the arithmetic
4717 operation inside the compound or conditional to see if any folding
4718 can then be done. Convert comparison to conditional for this purpose.
4719 The also optimizes non-constant cases that used to be done in
4720 expand_expr.
4721
4722 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4723 one of the operands is a comparison and the other is a comparison, a
4724 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4725 code below would make the expression more complex. Change it to a
4726 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4727 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4728
4729 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4730 || code == EQ_EXPR || code == NE_EXPR)
4731 && ((truth_value_p (TREE_CODE (arg0))
4732 && (truth_value_p (TREE_CODE (arg1))
4733 || (TREE_CODE (arg1) == BIT_AND_EXPR
4734 && integer_onep (TREE_OPERAND (arg1, 1)))))
4735 || (truth_value_p (TREE_CODE (arg1))
4736 && (truth_value_p (TREE_CODE (arg0))
4737 || (TREE_CODE (arg0) == BIT_AND_EXPR
4738 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4739 {
4740 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4741 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4742 : TRUTH_XOR_EXPR,
4743 type, arg0, arg1));
4744
4745 if (code == EQ_EXPR)
4746 t = invert_truthvalue (t);
4747
4748 return t;
4749 }
4750
4751 if (TREE_CODE_CLASS (code) == '1')
4752 {
4753 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4754 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4755 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4756 else if (TREE_CODE (arg0) == COND_EXPR)
4757 {
4758 tree arg01 = TREE_OPERAND (arg0, 1);
4759 tree arg02 = TREE_OPERAND (arg0, 2);
4760 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
4761 arg01 = fold (build1 (code, type, arg01));
4762 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
4763 arg02 = fold (build1 (code, type, arg02));
4764 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4765 arg01, arg02));
4766
4767 /* If this was a conversion, and all we did was to move into
4768 inside the COND_EXPR, bring it back out. But leave it if
4769 it is a conversion from integer to integer and the
4770 result precision is no wider than a word since such a
4771 conversion is cheap and may be optimized away by combine,
4772 while it couldn't if it were outside the COND_EXPR. Then return
4773 so we don't get into an infinite recursion loop taking the
4774 conversion out and then back in. */
4775
4776 if ((code == NOP_EXPR || code == CONVERT_EXPR
4777 || code == NON_LVALUE_EXPR)
4778 && TREE_CODE (t) == COND_EXPR
4779 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4780 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4781 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
4782 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
4783 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4784 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4785 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4786 && (INTEGRAL_TYPE_P
4787 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4788 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4789 t = build1 (code, type,
4790 build (COND_EXPR,
4791 TREE_TYPE (TREE_OPERAND
4792 (TREE_OPERAND (t, 1), 0)),
4793 TREE_OPERAND (t, 0),
4794 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4795 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4796 return t;
4797 }
4798 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4799 return fold (build (COND_EXPR, type, arg0,
4800 fold (build1 (code, type, integer_one_node)),
4801 fold (build1 (code, type, integer_zero_node))));
4802 }
4803 else if (TREE_CODE_CLASS (code) == '2'
4804 || TREE_CODE_CLASS (code) == '<')
4805 {
4806 if (TREE_CODE (arg1) == COMPOUND_EXPR
4807 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
4808 && ! TREE_SIDE_EFFECTS (arg0))
4809 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4810 fold (build (code, type,
4811 arg0, TREE_OPERAND (arg1, 1))));
4812 else if ((TREE_CODE (arg1) == COND_EXPR
4813 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4814 && TREE_CODE_CLASS (code) != '<'))
4815 && (TREE_CODE (arg0) != COND_EXPR
4816 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4817 && (! TREE_SIDE_EFFECTS (arg0)
4818 || ((*lang_hooks.decls.global_bindings_p) () == 0
4819 && ! contains_placeholder_p (arg0))))
4820 return
4821 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4822 /*cond_first_p=*/0);
4823 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4824 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4825 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4826 else if ((TREE_CODE (arg0) == COND_EXPR
4827 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4828 && TREE_CODE_CLASS (code) != '<'))
4829 && (TREE_CODE (arg1) != COND_EXPR
4830 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4831 && (! TREE_SIDE_EFFECTS (arg1)
4832 || ((*lang_hooks.decls.global_bindings_p) () == 0
4833 && ! contains_placeholder_p (arg1))))
4834 return
4835 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4836 /*cond_first_p=*/1);
4837 }
4838 else if (TREE_CODE_CLASS (code) == '<'
4839 && TREE_CODE (arg0) == COMPOUND_EXPR)
4840 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4841 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4842 else if (TREE_CODE_CLASS (code) == '<'
4843 && TREE_CODE (arg1) == COMPOUND_EXPR)
4844 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4845 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4846
4847 switch (code)
4848 {
4849 case INTEGER_CST:
4850 case REAL_CST:
4851 case VECTOR_CST:
4852 case STRING_CST:
4853 case COMPLEX_CST:
4854 case CONSTRUCTOR:
4855 return t;
4856
4857 case CONST_DECL:
4858 return fold (DECL_INITIAL (t));
4859
4860 case NOP_EXPR:
4861 case FLOAT_EXPR:
4862 case CONVERT_EXPR:
4863 case FIX_TRUNC_EXPR:
4864 /* Other kinds of FIX are not handled properly by fold_convert. */
4865
4866 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4867 return TREE_OPERAND (t, 0);
4868
4869 /* Handle cases of two conversions in a row. */
4870 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4871 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4872 {
4873 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4874 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4875 tree final_type = TREE_TYPE (t);
4876 int inside_int = INTEGRAL_TYPE_P (inside_type);
4877 int inside_ptr = POINTER_TYPE_P (inside_type);
4878 int inside_float = FLOAT_TYPE_P (inside_type);
4879 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4880 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4881 int inter_int = INTEGRAL_TYPE_P (inter_type);
4882 int inter_ptr = POINTER_TYPE_P (inter_type);
4883 int inter_float = FLOAT_TYPE_P (inter_type);
4884 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4885 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4886 int final_int = INTEGRAL_TYPE_P (final_type);
4887 int final_ptr = POINTER_TYPE_P (final_type);
4888 int final_float = FLOAT_TYPE_P (final_type);
4889 unsigned int final_prec = TYPE_PRECISION (final_type);
4890 int final_unsignedp = TREE_UNSIGNED (final_type);
4891
4892 /* In addition to the cases of two conversions in a row
4893 handled below, if we are converting something to its own
4894 type via an object of identical or wider precision, neither
4895 conversion is needed. */
4896 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4897 && ((inter_int && final_int) || (inter_float && final_float))
4898 && inter_prec >= final_prec)
4899 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4900
4901 /* Likewise, if the intermediate and final types are either both
4902 float or both integer, we don't need the middle conversion if
4903 it is wider than the final type and doesn't change the signedness
4904 (for integers). Avoid this if the final type is a pointer
4905 since then we sometimes need the inner conversion. Likewise if
4906 the outer has a precision not equal to the size of its mode. */
4907 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4908 || (inter_float && inside_float))
4909 && inter_prec >= inside_prec
4910 && (inter_float || inter_unsignedp == inside_unsignedp)
4911 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4912 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4913 && ! final_ptr)
4914 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4915
4916 /* If we have a sign-extension of a zero-extended value, we can
4917 replace that by a single zero-extension. */
4918 if (inside_int && inter_int && final_int
4919 && inside_prec < inter_prec && inter_prec < final_prec
4920 && inside_unsignedp && !inter_unsignedp)
4921 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4922
4923 /* Two conversions in a row are not needed unless:
4924 - some conversion is floating-point (overstrict for now), or
4925 - the intermediate type is narrower than both initial and
4926 final, or
4927 - the intermediate type and innermost type differ in signedness,
4928 and the outermost type is wider than the intermediate, or
4929 - the initial type is a pointer type and the precisions of the
4930 intermediate and final types differ, or
4931 - the final type is a pointer type and the precisions of the
4932 initial and intermediate types differ. */
4933 if (! inside_float && ! inter_float && ! final_float
4934 && (inter_prec > inside_prec || inter_prec > final_prec)
4935 && ! (inside_int && inter_int
4936 && inter_unsignedp != inside_unsignedp
4937 && inter_prec < final_prec)
4938 && ((inter_unsignedp && inter_prec > inside_prec)
4939 == (final_unsignedp && final_prec > inter_prec))
4940 && ! (inside_ptr && inter_prec != final_prec)
4941 && ! (final_ptr && inside_prec != inter_prec)
4942 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4943 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4944 && ! final_ptr)
4945 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4946 }
4947
4948 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4949 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4950 /* Detect assigning a bitfield. */
4951 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4952 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4953 {
4954 /* Don't leave an assignment inside a conversion
4955 unless assigning a bitfield. */
4956 tree prev = TREE_OPERAND (t, 0);
4957 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4958 /* First do the assignment, then return converted constant. */
4959 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4960 TREE_USED (t) = 1;
4961 return t;
4962 }
4963
4964 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4965 constants (if x has signed type, the sign bit cannot be set
4966 in c). This folds extension into the BIT_AND_EXPR. */
4967 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4968 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4969 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4970 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4971 {
4972 tree and = TREE_OPERAND (t, 0);
4973 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4974 int change = 0;
4975
4976 if (TREE_UNSIGNED (TREE_TYPE (and))
4977 || (TYPE_PRECISION (TREE_TYPE (t))
4978 <= TYPE_PRECISION (TREE_TYPE (and))))
4979 change = 1;
4980 else if (TYPE_PRECISION (TREE_TYPE (and1))
4981 <= HOST_BITS_PER_WIDE_INT
4982 && host_integerp (and1, 1))
4983 {
4984 unsigned HOST_WIDE_INT cst;
4985
4986 cst = tree_low_cst (and1, 1);
4987 cst &= (HOST_WIDE_INT) -1
4988 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4989 change = (cst == 0);
4990 #ifdef LOAD_EXTEND_OP
4991 if (change
4992 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4993 == ZERO_EXTEND))
4994 {
4995 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4996 and0 = convert (uns, and0);
4997 and1 = convert (uns, and1);
4998 }
4999 #endif
5000 }
5001 if (change)
5002 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5003 convert (TREE_TYPE (t), and0),
5004 convert (TREE_TYPE (t), and1)));
5005 }
5006
5007 if (!wins)
5008 {
5009 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5010 return t;
5011 }
5012 return fold_convert (t, arg0);
5013
5014 case VIEW_CONVERT_EXPR:
5015 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5016 return build1 (VIEW_CONVERT_EXPR, type,
5017 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5018 return t;
5019
5020 case COMPONENT_REF:
5021 if (TREE_CODE (arg0) == CONSTRUCTOR)
5022 {
5023 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5024 if (m)
5025 t = TREE_VALUE (m);
5026 }
5027 return t;
5028
5029 case RANGE_EXPR:
5030 TREE_CONSTANT (t) = wins;
5031 return t;
5032
5033 case NEGATE_EXPR:
5034 if (wins)
5035 {
5036 if (TREE_CODE (arg0) == INTEGER_CST)
5037 {
5038 unsigned HOST_WIDE_INT low;
5039 HOST_WIDE_INT high;
5040 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5041 TREE_INT_CST_HIGH (arg0),
5042 &low, &high);
5043 t = build_int_2 (low, high);
5044 TREE_TYPE (t) = type;
5045 TREE_OVERFLOW (t)
5046 = (TREE_OVERFLOW (arg0)
5047 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5048 TREE_CONSTANT_OVERFLOW (t)
5049 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5050 /* If arg0 was calculated from sizeof(ptr), record this */
5051 if (SIZEOF_PTR_DERIVED (arg0))
5052 SIZEOF_PTR_DERIVED (t) = 1;
5053 }
5054 else if (TREE_CODE (arg0) == REAL_CST)
5055 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5056 }
5057 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5058 return TREE_OPERAND (arg0, 0);
5059
5060 /* Convert - (a - b) to (b - a) for non-floating-point. */
5061 else if (TREE_CODE (arg0) == MINUS_EXPR
5062 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5063 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5064 TREE_OPERAND (arg0, 0));
5065
5066 return t;
5067
5068 case ABS_EXPR:
5069 if (wins)
5070 {
5071 if (TREE_CODE (arg0) == INTEGER_CST)
5072 {
5073 /* If the value is unsigned, then the absolute value is
5074 the same as the ordinary value. */
5075 if (TREE_UNSIGNED (type))
5076 return arg0;
5077 /* Similarly, if the value is non-negative. */
5078 else if (INT_CST_LT (integer_minus_one_node, arg0))
5079 return arg0;
5080 /* If the value is negative, then the absolute value is
5081 its negation. */
5082 else
5083 {
5084 unsigned HOST_WIDE_INT low;
5085 HOST_WIDE_INT high;
5086 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5087 TREE_INT_CST_HIGH (arg0),
5088 &low, &high);
5089 t = build_int_2 (low, high);
5090 TREE_TYPE (t) = type;
5091 TREE_OVERFLOW (t)
5092 = (TREE_OVERFLOW (arg0)
5093 | force_fit_type (t, overflow));
5094 TREE_CONSTANT_OVERFLOW (t)
5095 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5096 /* If arg0 was calculated from sizeof(ptr), record this */
5097 if (SIZEOF_PTR_DERIVED (arg0))
5098 SIZEOF_PTR_DERIVED (t) = 1;
5099 }
5100 }
5101 else if (TREE_CODE (arg0) == REAL_CST)
5102 {
5103 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5104 t = build_real (type,
5105 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5106 }
5107 }
5108 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5109 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5110 return t;
5111
5112 case CONJ_EXPR:
5113 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5114 return convert (type, arg0);
5115 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5116 return build (COMPLEX_EXPR, type,
5117 TREE_OPERAND (arg0, 0),
5118 negate_expr (TREE_OPERAND (arg0, 1)));
5119 else if (TREE_CODE (arg0) == COMPLEX_CST)
5120 return build_complex (type, TREE_REALPART (arg0),
5121 negate_expr (TREE_IMAGPART (arg0)));
5122 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5123 return fold (build (TREE_CODE (arg0), type,
5124 fold (build1 (CONJ_EXPR, type,
5125 TREE_OPERAND (arg0, 0))),
5126 fold (build1 (CONJ_EXPR,
5127 type, TREE_OPERAND (arg0, 1)))));
5128 else if (TREE_CODE (arg0) == CONJ_EXPR)
5129 return TREE_OPERAND (arg0, 0);
5130 return t;
5131
5132 case BIT_NOT_EXPR:
5133 if (wins)
5134 {
5135 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5136 ~ TREE_INT_CST_HIGH (arg0));
5137 TREE_TYPE (t) = type;
5138 force_fit_type (t, 0);
5139 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5140 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5141 }
5142 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5143 return TREE_OPERAND (arg0, 0);
5144 return t;
5145
5146 case PLUS_EXPR:
5147 /* A + (-B) -> A - B */
5148 if (TREE_CODE (arg1) == NEGATE_EXPR)
5149 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5150 /* (-A) + B -> B - A */
5151 if (TREE_CODE (arg0) == NEGATE_EXPR)
5152 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5153 else if (! FLOAT_TYPE_P (type))
5154 {
5155 if (integer_zerop (arg1))
5156 return non_lvalue (convert (type, arg0));
5157
5158 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5159 with a constant, and the two constants have no bits in common,
5160 we should treat this as a BIT_IOR_EXPR since this may produce more
5161 simplifications. */
5162 if (TREE_CODE (arg0) == BIT_AND_EXPR
5163 && TREE_CODE (arg1) == BIT_AND_EXPR
5164 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5165 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5166 && integer_zerop (const_binop (BIT_AND_EXPR,
5167 TREE_OPERAND (arg0, 1),
5168 TREE_OPERAND (arg1, 1), 0)))
5169 {
5170 code = BIT_IOR_EXPR;
5171 goto bit_ior;
5172 }
5173
5174 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5175 (plus (plus (mult) (mult)) (foo)) so that we can
5176 take advantage of the factoring cases below. */
5177 if ((TREE_CODE (arg0) == PLUS_EXPR
5178 && TREE_CODE (arg1) == MULT_EXPR)
5179 || (TREE_CODE (arg1) == PLUS_EXPR
5180 && TREE_CODE (arg0) == MULT_EXPR))
5181 {
5182 tree parg0, parg1, parg, marg;
5183
5184 if (TREE_CODE (arg0) == PLUS_EXPR)
5185 parg = arg0, marg = arg1;
5186 else
5187 parg = arg1, marg = arg0;
5188 parg0 = TREE_OPERAND (parg, 0);
5189 parg1 = TREE_OPERAND (parg, 1);
5190 STRIP_NOPS (parg0);
5191 STRIP_NOPS (parg1);
5192
5193 if (TREE_CODE (parg0) == MULT_EXPR
5194 && TREE_CODE (parg1) != MULT_EXPR)
5195 return fold (build (PLUS_EXPR, type,
5196 fold (build (PLUS_EXPR, type, parg0, marg)),
5197 parg1));
5198 if (TREE_CODE (parg0) != MULT_EXPR
5199 && TREE_CODE (parg1) == MULT_EXPR)
5200 return fold (build (PLUS_EXPR, type,
5201 fold (build (PLUS_EXPR, type, parg1, marg)),
5202 parg0));
5203 }
5204
5205 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5206 {
5207 tree arg00, arg01, arg10, arg11;
5208 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5209
5210 /* (A * C) + (B * C) -> (A+B) * C.
5211 We are most concerned about the case where C is a constant,
5212 but other combinations show up during loop reduction. Since
5213 it is not difficult, try all four possibilities. */
5214
5215 arg00 = TREE_OPERAND (arg0, 0);
5216 arg01 = TREE_OPERAND (arg0, 1);
5217 arg10 = TREE_OPERAND (arg1, 0);
5218 arg11 = TREE_OPERAND (arg1, 1);
5219 same = NULL_TREE;
5220
5221 if (operand_equal_p (arg01, arg11, 0))
5222 same = arg01, alt0 = arg00, alt1 = arg10;
5223 else if (operand_equal_p (arg00, arg10, 0))
5224 same = arg00, alt0 = arg01, alt1 = arg11;
5225 else if (operand_equal_p (arg00, arg11, 0))
5226 same = arg00, alt0 = arg01, alt1 = arg10;
5227 else if (operand_equal_p (arg01, arg10, 0))
5228 same = arg01, alt0 = arg00, alt1 = arg11;
5229
5230 /* No identical multiplicands; see if we can find a common
5231 power-of-two factor in non-power-of-two multiplies. This
5232 can help in multi-dimensional array access. */
5233 else if (TREE_CODE (arg01) == INTEGER_CST
5234 && TREE_CODE (arg11) == INTEGER_CST
5235 && TREE_INT_CST_HIGH (arg01) == 0
5236 && TREE_INT_CST_HIGH (arg11) == 0)
5237 {
5238 HOST_WIDE_INT int01, int11, tmp;
5239 int01 = TREE_INT_CST_LOW (arg01);
5240 int11 = TREE_INT_CST_LOW (arg11);
5241
5242 /* Move min of absolute values to int11. */
5243 if ((int01 >= 0 ? int01 : -int01)
5244 < (int11 >= 0 ? int11 : -int11))
5245 {
5246 tmp = int01, int01 = int11, int11 = tmp;
5247 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5248 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5249 }
5250
5251 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5252 {
5253 alt0 = fold (build (MULT_EXPR, type, arg00,
5254 build_int_2 (int01 / int11, 0)));
5255 alt1 = arg10;
5256 same = arg11;
5257 }
5258 }
5259
5260 if (same)
5261 return fold (build (MULT_EXPR, type,
5262 fold (build (PLUS_EXPR, type, alt0, alt1)),
5263 same));
5264 }
5265 }
5266
5267 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5268 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5269 return non_lvalue (convert (type, arg0));
5270
5271 /* Likewise if the operands are reversed. */
5272 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5273 return non_lvalue (convert (type, arg1));
5274
5275 bit_rotate:
5276 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5277 is a rotate of A by C1 bits. */
5278 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5279 is a rotate of A by B bits. */
5280 {
5281 enum tree_code code0, code1;
5282 code0 = TREE_CODE (arg0);
5283 code1 = TREE_CODE (arg1);
5284 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5285 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5286 && operand_equal_p (TREE_OPERAND (arg0, 0),
5287 TREE_OPERAND (arg1, 0), 0)
5288 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5289 {
5290 tree tree01, tree11;
5291 enum tree_code code01, code11;
5292
5293 tree01 = TREE_OPERAND (arg0, 1);
5294 tree11 = TREE_OPERAND (arg1, 1);
5295 STRIP_NOPS (tree01);
5296 STRIP_NOPS (tree11);
5297 code01 = TREE_CODE (tree01);
5298 code11 = TREE_CODE (tree11);
5299 if (code01 == INTEGER_CST
5300 && code11 == INTEGER_CST
5301 && TREE_INT_CST_HIGH (tree01) == 0
5302 && TREE_INT_CST_HIGH (tree11) == 0
5303 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5304 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5305 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5306 code0 == LSHIFT_EXPR ? tree01 : tree11);
5307 else if (code11 == MINUS_EXPR)
5308 {
5309 tree tree110, tree111;
5310 tree110 = TREE_OPERAND (tree11, 0);
5311 tree111 = TREE_OPERAND (tree11, 1);
5312 STRIP_NOPS (tree110);
5313 STRIP_NOPS (tree111);
5314 if (TREE_CODE (tree110) == INTEGER_CST
5315 && 0 == compare_tree_int (tree110,
5316 TYPE_PRECISION
5317 (TREE_TYPE (TREE_OPERAND
5318 (arg0, 0))))
5319 && operand_equal_p (tree01, tree111, 0))
5320 return build ((code0 == LSHIFT_EXPR
5321 ? LROTATE_EXPR
5322 : RROTATE_EXPR),
5323 type, TREE_OPERAND (arg0, 0), tree01);
5324 }
5325 else if (code01 == MINUS_EXPR)
5326 {
5327 tree tree010, tree011;
5328 tree010 = TREE_OPERAND (tree01, 0);
5329 tree011 = TREE_OPERAND (tree01, 1);
5330 STRIP_NOPS (tree010);
5331 STRIP_NOPS (tree011);
5332 if (TREE_CODE (tree010) == INTEGER_CST
5333 && 0 == compare_tree_int (tree010,
5334 TYPE_PRECISION
5335 (TREE_TYPE (TREE_OPERAND
5336 (arg0, 0))))
5337 && operand_equal_p (tree11, tree011, 0))
5338 return build ((code0 != LSHIFT_EXPR
5339 ? LROTATE_EXPR
5340 : RROTATE_EXPR),
5341 type, TREE_OPERAND (arg0, 0), tree11);
5342 }
5343 }
5344 }
5345
5346 associate:
5347 /* In most languages, can't associate operations on floats through
5348 parentheses. Rather than remember where the parentheses were, we
5349 don't associate floats at all. It shouldn't matter much. However,
5350 associating multiplications is only very slightly inaccurate, so do
5351 that if -funsafe-math-optimizations is specified. */
5352
5353 if (! wins
5354 && (! FLOAT_TYPE_P (type)
5355 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5356 {
5357 tree var0, con0, lit0, minus_lit0;
5358 tree var1, con1, lit1, minus_lit1;
5359
5360 /* Split both trees into variables, constants, and literals. Then
5361 associate each group together, the constants with literals,
5362 then the result with variables. This increases the chances of
5363 literals being recombined later and of generating relocatable
5364 expressions for the sum of a constant and literal. */
5365 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5366 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5367 code == MINUS_EXPR);
5368
5369 /* Only do something if we found more than two objects. Otherwise,
5370 nothing has changed and we risk infinite recursion. */
5371 if (2 < ((var0 != 0) + (var1 != 0)
5372 + (con0 != 0) + (con1 != 0)
5373 + (lit0 != 0) + (lit1 != 0)
5374 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5375 {
5376 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5377 if (code == MINUS_EXPR)
5378 code = PLUS_EXPR;
5379
5380 var0 = associate_trees (var0, var1, code, type);
5381 con0 = associate_trees (con0, con1, code, type);
5382 lit0 = associate_trees (lit0, lit1, code, type);
5383 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5384
5385 /* Preserve the MINUS_EXPR if the negative part of the literal is
5386 greater than the positive part. Otherwise, the multiplicative
5387 folding code (i.e extract_muldiv) may be fooled in case
5388 unsigned constants are substracted, like in the following
5389 example: ((X*2 + 4) - 8U)/2. */
5390 if (minus_lit0 && lit0)
5391 {
5392 if (tree_int_cst_lt (lit0, minus_lit0))
5393 {
5394 minus_lit0 = associate_trees (minus_lit0, lit0,
5395 MINUS_EXPR, type);
5396 lit0 = 0;
5397 }
5398 else
5399 {
5400 lit0 = associate_trees (lit0, minus_lit0,
5401 MINUS_EXPR, type);
5402 minus_lit0 = 0;
5403 }
5404 }
5405 if (minus_lit0)
5406 {
5407 if (con0 == 0)
5408 return convert (type, associate_trees (var0, minus_lit0,
5409 MINUS_EXPR, type));
5410 else
5411 {
5412 con0 = associate_trees (con0, minus_lit0,
5413 MINUS_EXPR, type);
5414 return convert (type, associate_trees (var0, con0,
5415 PLUS_EXPR, type));
5416 }
5417 }
5418
5419 con0 = associate_trees (con0, lit0, code, type);
5420 return convert (type, associate_trees (var0, con0, code, type));
5421 }
5422 }
5423
5424 binary:
5425 if (wins)
5426 t1 = const_binop (code, arg0, arg1, 0);
5427 if (t1 != NULL_TREE)
5428 {
5429 /* The return value should always have
5430 the same type as the original expression. */
5431 if (TREE_TYPE (t1) != TREE_TYPE (t))
5432 t1 = convert (TREE_TYPE (t), t1);
5433
5434 return t1;
5435 }
5436 return t;
5437
5438 case MINUS_EXPR:
5439 /* A - (-B) -> A + B */
5440 if (TREE_CODE (arg1) == NEGATE_EXPR)
5441 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5442 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5443 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5444 return
5445 fold (build (MINUS_EXPR, type,
5446 build_real (TREE_TYPE (arg1),
5447 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5448 TREE_OPERAND (arg0, 0)));
5449
5450 if (! FLOAT_TYPE_P (type))
5451 {
5452 if (! wins && integer_zerop (arg0))
5453 return negate_expr (convert (type, arg1));
5454 if (integer_zerop (arg1))
5455 return non_lvalue (convert (type, arg0));
5456
5457 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5458 about the case where C is a constant, just try one of the
5459 four possibilities. */
5460
5461 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5462 && operand_equal_p (TREE_OPERAND (arg0, 1),
5463 TREE_OPERAND (arg1, 1), 0))
5464 return fold (build (MULT_EXPR, type,
5465 fold (build (MINUS_EXPR, type,
5466 TREE_OPERAND (arg0, 0),
5467 TREE_OPERAND (arg1, 0))),
5468 TREE_OPERAND (arg0, 1)));
5469 }
5470
5471 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5472 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5473 return non_lvalue (convert (type, arg0));
5474
5475 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5476 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5477 (-ARG1 + ARG0) reduces to -ARG1. */
5478 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5479 return negate_expr (convert (type, arg1));
5480
5481 /* Fold &x - &x. This can happen from &x.foo - &x.
5482 This is unsafe for certain floats even in non-IEEE formats.
5483 In IEEE, it is unsafe because it does wrong for NaNs.
5484 Also note that operand_equal_p is always false if an operand
5485 is volatile. */
5486
5487 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5488 && operand_equal_p (arg0, arg1, 0))
5489 return convert (type, integer_zero_node);
5490
5491 goto associate;
5492
5493 case MULT_EXPR:
5494 /* (-A) * (-B) -> A * B */
5495 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5496 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5497 TREE_OPERAND (arg1, 0)));
5498
5499 if (! FLOAT_TYPE_P (type))
5500 {
5501 if (integer_zerop (arg1))
5502 return omit_one_operand (type, arg1, arg0);
5503 if (integer_onep (arg1))
5504 return non_lvalue (convert (type, arg0));
5505
5506 /* (a * (1 << b)) is (a << b) */
5507 if (TREE_CODE (arg1) == LSHIFT_EXPR
5508 && integer_onep (TREE_OPERAND (arg1, 0)))
5509 return fold (build (LSHIFT_EXPR, type, arg0,
5510 TREE_OPERAND (arg1, 1)));
5511 if (TREE_CODE (arg0) == LSHIFT_EXPR
5512 && integer_onep (TREE_OPERAND (arg0, 0)))
5513 return fold (build (LSHIFT_EXPR, type, arg1,
5514 TREE_OPERAND (arg0, 1)));
5515
5516 if (TREE_CODE (arg1) == INTEGER_CST
5517 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5518 code, NULL_TREE)))
5519 return convert (type, tem);
5520
5521 }
5522 else
5523 {
5524 /* Maybe fold x * 0 to 0. The expressions aren't the same
5525 when x is NaN, since x * 0 is also NaN. Nor are they the
5526 same in modes with signed zeros, since multiplying a
5527 negative value by 0 gives -0, not +0. */
5528 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5529 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5530 && real_zerop (arg1))
5531 return omit_one_operand (type, arg1, arg0);
5532 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5533 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5534 && real_onep (arg1))
5535 return non_lvalue (convert (type, arg0));
5536
5537 /* Transform x * -1.0 into -x. */
5538 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5539 && real_minus_onep (arg1))
5540 return fold (build1 (NEGATE_EXPR, type, arg0));
5541
5542 /* x*2 is x+x */
5543 if (! wins && real_twop (arg1)
5544 && (*lang_hooks.decls.global_bindings_p) () == 0
5545 && ! contains_placeholder_p (arg0))
5546 {
5547 tree arg = save_expr (arg0);
5548 return build (PLUS_EXPR, type, arg, arg);
5549 }
5550 }
5551 goto associate;
5552
5553 case BIT_IOR_EXPR:
5554 bit_ior:
5555 if (integer_all_onesp (arg1))
5556 return omit_one_operand (type, arg1, arg0);
5557 if (integer_zerop (arg1))
5558 return non_lvalue (convert (type, arg0));
5559 t1 = distribute_bit_expr (code, type, arg0, arg1);
5560 if (t1 != NULL_TREE)
5561 return t1;
5562
5563 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5564
5565 This results in more efficient code for machines without a NAND
5566 instruction. Combine will canonicalize to the first form
5567 which will allow use of NAND instructions provided by the
5568 backend if they exist. */
5569 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5570 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5571 {
5572 return fold (build1 (BIT_NOT_EXPR, type,
5573 build (BIT_AND_EXPR, type,
5574 TREE_OPERAND (arg0, 0),
5575 TREE_OPERAND (arg1, 0))));
5576 }
5577
5578 /* See if this can be simplified into a rotate first. If that
5579 is unsuccessful continue in the association code. */
5580 goto bit_rotate;
5581
5582 case BIT_XOR_EXPR:
5583 if (integer_zerop (arg1))
5584 return non_lvalue (convert (type, arg0));
5585 if (integer_all_onesp (arg1))
5586 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5587
5588 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5589 with a constant, and the two constants have no bits in common,
5590 we should treat this as a BIT_IOR_EXPR since this may produce more
5591 simplifications. */
5592 if (TREE_CODE (arg0) == BIT_AND_EXPR
5593 && TREE_CODE (arg1) == BIT_AND_EXPR
5594 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5595 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5596 && integer_zerop (const_binop (BIT_AND_EXPR,
5597 TREE_OPERAND (arg0, 1),
5598 TREE_OPERAND (arg1, 1), 0)))
5599 {
5600 code = BIT_IOR_EXPR;
5601 goto bit_ior;
5602 }
5603
5604 /* See if this can be simplified into a rotate first. If that
5605 is unsuccessful continue in the association code. */
5606 goto bit_rotate;
5607
5608 case BIT_AND_EXPR:
5609 bit_and:
5610 if (integer_all_onesp (arg1))
5611 return non_lvalue (convert (type, arg0));
5612 if (integer_zerop (arg1))
5613 return omit_one_operand (type, arg1, arg0);
5614 t1 = distribute_bit_expr (code, type, arg0, arg1);
5615 if (t1 != NULL_TREE)
5616 return t1;
5617 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5618 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5619 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5620 {
5621 unsigned int prec
5622 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5623
5624 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5625 && (~TREE_INT_CST_LOW (arg1)
5626 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5627 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5628 }
5629
5630 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5631
5632 This results in more efficient code for machines without a NOR
5633 instruction. Combine will canonicalize to the first form
5634 which will allow use of NOR instructions provided by the
5635 backend if they exist. */
5636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5637 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5638 {
5639 return fold (build1 (BIT_NOT_EXPR, type,
5640 build (BIT_IOR_EXPR, type,
5641 TREE_OPERAND (arg0, 0),
5642 TREE_OPERAND (arg1, 0))));
5643 }
5644
5645 goto associate;
5646
5647 case BIT_ANDTC_EXPR:
5648 if (integer_all_onesp (arg0))
5649 return non_lvalue (convert (type, arg1));
5650 if (integer_zerop (arg0))
5651 return omit_one_operand (type, arg0, arg1);
5652 if (TREE_CODE (arg1) == INTEGER_CST)
5653 {
5654 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5655 code = BIT_AND_EXPR;
5656 goto bit_and;
5657 }
5658 goto binary;
5659
5660 case RDIV_EXPR:
5661 /* Don't touch a floating-point divide by zero unless the mode
5662 of the constant can represent infinity. */
5663 if (TREE_CODE (arg1) == REAL_CST
5664 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5665 && real_zerop (arg1))
5666 return t;
5667
5668 /* (-A) / (-B) -> A / B */
5669 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5670 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5671 TREE_OPERAND (arg1, 0)));
5672
5673 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
5674 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5675 && real_onep (arg1))
5676 return non_lvalue (convert (type, arg0));
5677
5678 /* If ARG1 is a constant, we can convert this to a multiply by the
5679 reciprocal. This does not have the same rounding properties,
5680 so only do this if -funsafe-math-optimizations. We can actually
5681 always safely do it if ARG1 is a power of two, but it's hard to
5682 tell if it is or not in a portable manner. */
5683 if (TREE_CODE (arg1) == REAL_CST)
5684 {
5685 if (flag_unsafe_math_optimizations
5686 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5687 arg1, 0)))
5688 return fold (build (MULT_EXPR, type, arg0, tem));
5689 /* Find the reciprocal if optimizing and the result is exact. */
5690 else if (optimize)
5691 {
5692 REAL_VALUE_TYPE r;
5693 r = TREE_REAL_CST (arg1);
5694 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5695 {
5696 tem = build_real (type, r);
5697 return fold (build (MULT_EXPR, type, arg0, tem));
5698 }
5699 }
5700 }
5701 /* Convert A/B/C to A/(B*C). */
5702 if (flag_unsafe_math_optimizations
5703 && TREE_CODE (arg0) == RDIV_EXPR)
5704 {
5705 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5706 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5707 arg1)));
5708 }
5709 /* Convert A/(B/C) to (A/B)*C. */
5710 if (flag_unsafe_math_optimizations
5711 && TREE_CODE (arg1) == RDIV_EXPR)
5712 {
5713 return fold (build (MULT_EXPR, type,
5714 build (RDIV_EXPR, type, arg0,
5715 TREE_OPERAND (arg1, 0)),
5716 TREE_OPERAND (arg1, 1)));
5717 }
5718 goto binary;
5719
5720 case TRUNC_DIV_EXPR:
5721 case ROUND_DIV_EXPR:
5722 case FLOOR_DIV_EXPR:
5723 case CEIL_DIV_EXPR:
5724 case EXACT_DIV_EXPR:
5725 if (integer_onep (arg1))
5726 return non_lvalue (convert (type, arg0));
5727 if (integer_zerop (arg1))
5728 return t;
5729
5730 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5731 operation, EXACT_DIV_EXPR.
5732
5733 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5734 At one time others generated faster code, it's not clear if they do
5735 after the last round to changes to the DIV code in expmed.c. */
5736 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5737 && multiple_of_p (type, arg0, arg1))
5738 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5739
5740 if (TREE_CODE (arg1) == INTEGER_CST
5741 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5742 code, NULL_TREE)))
5743 return convert (type, tem);
5744
5745 goto binary;
5746
5747 case CEIL_MOD_EXPR:
5748 case FLOOR_MOD_EXPR:
5749 case ROUND_MOD_EXPR:
5750 case TRUNC_MOD_EXPR:
5751 if (integer_onep (arg1))
5752 return omit_one_operand (type, integer_zero_node, arg0);
5753 if (integer_zerop (arg1))
5754 return t;
5755
5756 if (TREE_CODE (arg1) == INTEGER_CST
5757 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5758 code, NULL_TREE)))
5759 return convert (type, tem);
5760
5761 goto binary;
5762
5763 case LSHIFT_EXPR:
5764 case RSHIFT_EXPR:
5765 case LROTATE_EXPR:
5766 case RROTATE_EXPR:
5767 if (integer_zerop (arg1))
5768 return non_lvalue (convert (type, arg0));
5769 /* Since negative shift count is not well-defined,
5770 don't try to compute it in the compiler. */
5771 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5772 return t;
5773 /* Rewrite an LROTATE_EXPR by a constant into an
5774 RROTATE_EXPR by a new constant. */
5775 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5776 {
5777 TREE_SET_CODE (t, RROTATE_EXPR);
5778 code = RROTATE_EXPR;
5779 TREE_OPERAND (t, 1) = arg1
5780 = const_binop
5781 (MINUS_EXPR,
5782 convert (TREE_TYPE (arg1),
5783 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5784 arg1, 0);
5785 if (tree_int_cst_sgn (arg1) < 0)
5786 return t;
5787 }
5788
5789 /* If we have a rotate of a bit operation with the rotate count and
5790 the second operand of the bit operation both constant,
5791 permute the two operations. */
5792 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5793 && (TREE_CODE (arg0) == BIT_AND_EXPR
5794 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5795 || TREE_CODE (arg0) == BIT_IOR_EXPR
5796 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5798 return fold (build (TREE_CODE (arg0), type,
5799 fold (build (code, type,
5800 TREE_OPERAND (arg0, 0), arg1)),
5801 fold (build (code, type,
5802 TREE_OPERAND (arg0, 1), arg1))));
5803
5804 /* Two consecutive rotates adding up to the width of the mode can
5805 be ignored. */
5806 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5807 && TREE_CODE (arg0) == RROTATE_EXPR
5808 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5809 && TREE_INT_CST_HIGH (arg1) == 0
5810 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5811 && ((TREE_INT_CST_LOW (arg1)
5812 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5813 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5814 return TREE_OPERAND (arg0, 0);
5815
5816 goto binary;
5817
5818 case MIN_EXPR:
5819 if (operand_equal_p (arg0, arg1, 0))
5820 return omit_one_operand (type, arg0, arg1);
5821 if (INTEGRAL_TYPE_P (type)
5822 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5823 return omit_one_operand (type, arg1, arg0);
5824 goto associate;
5825
5826 case MAX_EXPR:
5827 if (operand_equal_p (arg0, arg1, 0))
5828 return omit_one_operand (type, arg0, arg1);
5829 if (INTEGRAL_TYPE_P (type)
5830 && TYPE_MAX_VALUE (type)
5831 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5832 return omit_one_operand (type, arg1, arg0);
5833 goto associate;
5834
5835 case TRUTH_NOT_EXPR:
5836 /* Note that the operand of this must be an int
5837 and its values must be 0 or 1.
5838 ("true" is a fixed value perhaps depending on the language,
5839 but we don't handle values other than 1 correctly yet.) */
5840 tem = invert_truthvalue (arg0);
5841 /* Avoid infinite recursion. */
5842 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5843 return t;
5844 return convert (type, tem);
5845
5846 case TRUTH_ANDIF_EXPR:
5847 /* Note that the operands of this must be ints
5848 and their values must be 0 or 1.
5849 ("true" is a fixed value perhaps depending on the language.) */
5850 /* If first arg is constant zero, return it. */
5851 if (integer_zerop (arg0))
5852 return convert (type, arg0);
5853 case TRUTH_AND_EXPR:
5854 /* If either arg is constant true, drop it. */
5855 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5856 return non_lvalue (convert (type, arg1));
5857 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5858 /* Preserve sequence points. */
5859 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5860 return non_lvalue (convert (type, arg0));
5861 /* If second arg is constant zero, result is zero, but first arg
5862 must be evaluated. */
5863 if (integer_zerop (arg1))
5864 return omit_one_operand (type, arg1, arg0);
5865 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5866 case will be handled here. */
5867 if (integer_zerop (arg0))
5868 return omit_one_operand (type, arg0, arg1);
5869
5870 truth_andor:
5871 /* We only do these simplifications if we are optimizing. */
5872 if (!optimize)
5873 return t;
5874
5875 /* Check for things like (A || B) && (A || C). We can convert this
5876 to A || (B && C). Note that either operator can be any of the four
5877 truth and/or operations and the transformation will still be
5878 valid. Also note that we only care about order for the
5879 ANDIF and ORIF operators. If B contains side effects, this
5880 might change the truth-value of A. */
5881 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5882 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5883 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5884 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5885 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5886 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5887 {
5888 tree a00 = TREE_OPERAND (arg0, 0);
5889 tree a01 = TREE_OPERAND (arg0, 1);
5890 tree a10 = TREE_OPERAND (arg1, 0);
5891 tree a11 = TREE_OPERAND (arg1, 1);
5892 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5893 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5894 && (code == TRUTH_AND_EXPR
5895 || code == TRUTH_OR_EXPR));
5896
5897 if (operand_equal_p (a00, a10, 0))
5898 return fold (build (TREE_CODE (arg0), type, a00,
5899 fold (build (code, type, a01, a11))));
5900 else if (commutative && operand_equal_p (a00, a11, 0))
5901 return fold (build (TREE_CODE (arg0), type, a00,
5902 fold (build (code, type, a01, a10))));
5903 else if (commutative && operand_equal_p (a01, a10, 0))
5904 return fold (build (TREE_CODE (arg0), type, a01,
5905 fold (build (code, type, a00, a11))));
5906
5907 /* This case if tricky because we must either have commutative
5908 operators or else A10 must not have side-effects. */
5909
5910 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5911 && operand_equal_p (a01, a11, 0))
5912 return fold (build (TREE_CODE (arg0), type,
5913 fold (build (code, type, a00, a10)),
5914 a01));
5915 }
5916
5917 /* See if we can build a range comparison. */
5918 if (0 != (tem = fold_range_test (t)))
5919 return tem;
5920
5921 /* Check for the possibility of merging component references. If our
5922 lhs is another similar operation, try to merge its rhs with our
5923 rhs. Then try to merge our lhs and rhs. */
5924 if (TREE_CODE (arg0) == code
5925 && 0 != (tem = fold_truthop (code, type,
5926 TREE_OPERAND (arg0, 1), arg1)))
5927 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5928
5929 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5930 return tem;
5931
5932 return t;
5933
5934 case TRUTH_ORIF_EXPR:
5935 /* Note that the operands of this must be ints
5936 and their values must be 0 or true.
5937 ("true" is a fixed value perhaps depending on the language.) */
5938 /* If first arg is constant true, return it. */
5939 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5940 return convert (type, arg0);
5941 case TRUTH_OR_EXPR:
5942 /* If either arg is constant zero, drop it. */
5943 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5944 return non_lvalue (convert (type, arg1));
5945 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5946 /* Preserve sequence points. */
5947 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5948 return non_lvalue (convert (type, arg0));
5949 /* If second arg is constant true, result is true, but we must
5950 evaluate first arg. */
5951 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5952 return omit_one_operand (type, arg1, arg0);
5953 /* Likewise for first arg, but note this only occurs here for
5954 TRUTH_OR_EXPR. */
5955 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5956 return omit_one_operand (type, arg0, arg1);
5957 goto truth_andor;
5958
5959 case TRUTH_XOR_EXPR:
5960 /* If either arg is constant zero, drop it. */
5961 if (integer_zerop (arg0))
5962 return non_lvalue (convert (type, arg1));
5963 if (integer_zerop (arg1))
5964 return non_lvalue (convert (type, arg0));
5965 /* If either arg is constant true, this is a logical inversion. */
5966 if (integer_onep (arg0))
5967 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5968 if (integer_onep (arg1))
5969 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5970 return t;
5971
5972 case EQ_EXPR:
5973 case NE_EXPR:
5974 case LT_EXPR:
5975 case GT_EXPR:
5976 case LE_EXPR:
5977 case GE_EXPR:
5978 /* If one arg is a real or integer constant, put it last. */
5979 if ((TREE_CODE (arg0) == INTEGER_CST
5980 && TREE_CODE (arg1) != INTEGER_CST)
5981 || (TREE_CODE (arg0) == REAL_CST
5982 && TREE_CODE (arg0) != REAL_CST))
5983 {
5984 TREE_OPERAND (t, 0) = arg1;
5985 TREE_OPERAND (t, 1) = arg0;
5986 arg0 = TREE_OPERAND (t, 0);
5987 arg1 = TREE_OPERAND (t, 1);
5988 code = swap_tree_comparison (code);
5989 TREE_SET_CODE (t, code);
5990 }
5991
5992 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5993 {
5994 /* (-a) CMP (-b) -> b CMP a */
5995 if (TREE_CODE (arg0) == NEGATE_EXPR
5996 && TREE_CODE (arg1) == NEGATE_EXPR)
5997 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5998 TREE_OPERAND (arg0, 0)));
5999 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6000 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6001 return
6002 fold (build
6003 (swap_tree_comparison (code), type,
6004 TREE_OPERAND (arg0, 0),
6005 build_real (TREE_TYPE (arg1),
6006 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6007 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6008 /* a CMP (-0) -> a CMP 0 */
6009 if (TREE_CODE (arg1) == REAL_CST
6010 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6011 return fold (build (code, type, arg0,
6012 build_real (TREE_TYPE (arg1), dconst0)));
6013
6014 /* If this is a comparison of a real constant with a PLUS_EXPR
6015 or a MINUS_EXPR of a real constant, we can convert it into a
6016 comparison with a revised real constant as long as no overflow
6017 occurs when unsafe_math_optimizations are enabled. */
6018 if (flag_unsafe_math_optimizations
6019 && TREE_CODE (arg1) == REAL_CST
6020 && (TREE_CODE (arg0) == PLUS_EXPR
6021 || TREE_CODE (arg0) == MINUS_EXPR)
6022 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6023 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6024 ? MINUS_EXPR : PLUS_EXPR,
6025 arg1, TREE_OPERAND (arg0, 1), 0))
6026 && ! TREE_CONSTANT_OVERFLOW (tem))
6027 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6028 }
6029
6030
6031 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
6032 if (TREE_CONSTANT (arg1)
6033 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
6034 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
6035 /* This optimization is invalid for ordered comparisons
6036 if CONST+INCR overflows or if foo+incr might overflow.
6037 This optimization is invalid for floating point due to rounding.
6038 For pointer types we assume overflow doesn't happen. */
6039 && (POINTER_TYPE_P (TREE_TYPE (arg0))
6040 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
6041 && (code == EQ_EXPR || code == NE_EXPR))))
6042 {
6043 tree varop, newconst;
6044
6045 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
6046 {
6047 newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
6048 arg1, TREE_OPERAND (arg0, 1)));
6049 varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
6050 TREE_OPERAND (arg0, 0),
6051 TREE_OPERAND (arg0, 1));
6052 }
6053 else
6054 {
6055 newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
6056 arg1, TREE_OPERAND (arg0, 1)));
6057 varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
6058 TREE_OPERAND (arg0, 0),
6059 TREE_OPERAND (arg0, 1));
6060 }
6061
6062
6063 /* If VAROP is a reference to a bitfield, we must mask
6064 the constant by the width of the field. */
6065 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6066 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
6067 {
6068 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
6069 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
6070 tree folded_compare, shift;
6071
6072 /* First check whether the comparison would come out
6073 always the same. If we don't do that we would
6074 change the meaning with the masking. */
6075 folded_compare = fold (build (code, type,
6076 TREE_OPERAND (varop, 0),
6077 arg1));
6078 if (integer_zerop (folded_compare)
6079 || integer_onep (folded_compare))
6080 return omit_one_operand (type, folded_compare, varop);
6081
6082 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
6083 0);
6084 newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
6085 newconst, shift));
6086 newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
6087 newconst, shift));
6088 }
6089
6090 return fold (build (code, type, varop, newconst));
6091 }
6092
6093 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6094 This transformation affects the cases which are handled in later
6095 optimizations involving comparisons with non-negative constants. */
6096 if (TREE_CODE (arg1) == INTEGER_CST
6097 && TREE_CODE (arg0) != INTEGER_CST
6098 && tree_int_cst_sgn (arg1) > 0)
6099 {
6100 switch (code)
6101 {
6102 case GE_EXPR:
6103 code = GT_EXPR;
6104 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6105 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6106 break;
6107
6108 case LT_EXPR:
6109 code = LE_EXPR;
6110 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6111 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6112 break;
6113
6114 default:
6115 break;
6116 }
6117 }
6118
6119 /* Comparisons with the highest or lowest possible integer of
6120 the specified size will have known values. */
6121 {
6122 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6123
6124 if (TREE_CODE (arg1) == INTEGER_CST
6125 && ! TREE_CONSTANT_OVERFLOW (arg1)
6126 && width <= HOST_BITS_PER_WIDE_INT
6127 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6128 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6129 {
6130 unsigned HOST_WIDE_INT signed_max;
6131 unsigned HOST_WIDE_INT max, min;
6132
6133 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6134
6135 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6136 {
6137 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6138 min = 0;
6139 }
6140 else
6141 {
6142 max = signed_max;
6143 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6144 }
6145
6146 if (TREE_INT_CST_HIGH (arg1) == 0
6147 && TREE_INT_CST_LOW (arg1) == max)
6148 switch (code)
6149 {
6150 case GT_EXPR:
6151 return omit_one_operand (type,
6152 convert (type, integer_zero_node),
6153 arg0);
6154 case GE_EXPR:
6155 code = EQ_EXPR;
6156 TREE_SET_CODE (t, EQ_EXPR);
6157 break;
6158 case LE_EXPR:
6159 return omit_one_operand (type,
6160 convert (type, integer_one_node),
6161 arg0);
6162 case LT_EXPR:
6163 code = NE_EXPR;
6164 TREE_SET_CODE (t, NE_EXPR);
6165 break;
6166
6167 /* The GE_EXPR and LT_EXPR cases above are not normally
6168 reached because of previous transformations. */
6169
6170 default:
6171 break;
6172 }
6173 else if (TREE_INT_CST_HIGH (arg1) == 0
6174 && TREE_INT_CST_LOW (arg1) == max - 1)
6175 switch (code)
6176 {
6177 case GT_EXPR:
6178 code = EQ_EXPR;
6179 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6180 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6181 break;
6182 case LE_EXPR:
6183 code = NE_EXPR;
6184 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6185 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6186 break;
6187 default:
6188 break;
6189 }
6190 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6191 && TREE_INT_CST_LOW (arg1) == min)
6192 switch (code)
6193 {
6194 case LT_EXPR:
6195 return omit_one_operand (type,
6196 convert (type, integer_zero_node),
6197 arg0);
6198 case LE_EXPR:
6199 code = EQ_EXPR;
6200 TREE_SET_CODE (t, EQ_EXPR);
6201 break;
6202
6203 case GE_EXPR:
6204 return omit_one_operand (type,
6205 convert (type, integer_one_node),
6206 arg0);
6207 case GT_EXPR:
6208 code = NE_EXPR;
6209 TREE_SET_CODE (t, NE_EXPR);
6210 break;
6211
6212 default:
6213 break;
6214 }
6215 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6216 && TREE_INT_CST_LOW (arg1) == min + 1)
6217 switch (code)
6218 {
6219 case GE_EXPR:
6220 code = NE_EXPR;
6221 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6222 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6223 break;
6224 case LT_EXPR:
6225 code = EQ_EXPR;
6226 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6227 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6228 break;
6229 default:
6230 break;
6231 }
6232
6233 else if (TREE_INT_CST_HIGH (arg1) == 0
6234 && TREE_INT_CST_LOW (arg1) == signed_max
6235 && TREE_UNSIGNED (TREE_TYPE (arg1))
6236 /* signed_type does not work on pointer types. */
6237 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6238 {
6239 /* The following case also applies to X < signed_max+1
6240 and X >= signed_max+1 because previous transformations. */
6241 if (code == LE_EXPR || code == GT_EXPR)
6242 {
6243 tree st0, st1;
6244 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6245 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6246 return fold
6247 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6248 type, convert (st0, arg0),
6249 convert (st1, integer_zero_node)));
6250 }
6251 }
6252 }
6253 }
6254
6255 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6256 a MINUS_EXPR of a constant, we can convert it into a comparison with
6257 a revised constant as long as no overflow occurs. */
6258 if ((code == EQ_EXPR || code == NE_EXPR)
6259 && TREE_CODE (arg1) == INTEGER_CST
6260 && (TREE_CODE (arg0) == PLUS_EXPR
6261 || TREE_CODE (arg0) == MINUS_EXPR)
6262 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6263 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6264 ? MINUS_EXPR : PLUS_EXPR,
6265 arg1, TREE_OPERAND (arg0, 1), 0))
6266 && ! TREE_CONSTANT_OVERFLOW (tem))
6267 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6268
6269 /* Similarly for a NEGATE_EXPR. */
6270 else if ((code == EQ_EXPR || code == NE_EXPR)
6271 && TREE_CODE (arg0) == NEGATE_EXPR
6272 && TREE_CODE (arg1) == INTEGER_CST
6273 && 0 != (tem = negate_expr (arg1))
6274 && TREE_CODE (tem) == INTEGER_CST
6275 && ! TREE_CONSTANT_OVERFLOW (tem))
6276 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6277
6278 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6279 for !=. Don't do this for ordered comparisons due to overflow. */
6280 else if ((code == NE_EXPR || code == EQ_EXPR)
6281 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6282 return fold (build (code, type,
6283 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6284
6285 /* If we are widening one operand of an integer comparison,
6286 see if the other operand is similarly being widened. Perhaps we
6287 can do the comparison in the narrower type. */
6288 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6289 && TREE_CODE (arg0) == NOP_EXPR
6290 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6291 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6292 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6293 || (TREE_CODE (t1) == INTEGER_CST
6294 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6295 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6296
6297 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6298 constant, we can simplify it. */
6299 else if (TREE_CODE (arg1) == INTEGER_CST
6300 && (TREE_CODE (arg0) == MIN_EXPR
6301 || TREE_CODE (arg0) == MAX_EXPR)
6302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6303 return optimize_minmax_comparison (t);
6304
6305 /* If we are comparing an ABS_EXPR with a constant, we can
6306 convert all the cases into explicit comparisons, but they may
6307 well not be faster than doing the ABS and one comparison.
6308 But ABS (X) <= C is a range comparison, which becomes a subtraction
6309 and a comparison, and is probably faster. */
6310 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6311 && TREE_CODE (arg0) == ABS_EXPR
6312 && ! TREE_SIDE_EFFECTS (arg0)
6313 && (0 != (tem = negate_expr (arg1)))
6314 && TREE_CODE (tem) == INTEGER_CST
6315 && ! TREE_CONSTANT_OVERFLOW (tem))
6316 return fold (build (TRUTH_ANDIF_EXPR, type,
6317 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6318 build (LE_EXPR, type,
6319 TREE_OPERAND (arg0, 0), arg1)));
6320
6321 /* If this is an EQ or NE comparison with zero and ARG0 is
6322 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6323 two operations, but the latter can be done in one less insn
6324 on machines that have only two-operand insns or on which a
6325 constant cannot be the first operand. */
6326 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6327 && TREE_CODE (arg0) == BIT_AND_EXPR)
6328 {
6329 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6330 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6331 return
6332 fold (build (code, type,
6333 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6334 build (RSHIFT_EXPR,
6335 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6336 TREE_OPERAND (arg0, 1),
6337 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6338 convert (TREE_TYPE (arg0),
6339 integer_one_node)),
6340 arg1));
6341 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6342 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6343 return
6344 fold (build (code, type,
6345 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6346 build (RSHIFT_EXPR,
6347 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6348 TREE_OPERAND (arg0, 0),
6349 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6350 convert (TREE_TYPE (arg0),
6351 integer_one_node)),
6352 arg1));
6353 }
6354
6355 /* If this is an NE or EQ comparison of zero against the result of a
6356 signed MOD operation whose second operand is a power of 2, make
6357 the MOD operation unsigned since it is simpler and equivalent. */
6358 if ((code == NE_EXPR || code == EQ_EXPR)
6359 && integer_zerop (arg1)
6360 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6361 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6362 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6363 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6364 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6365 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6366 {
6367 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6368 tree newmod = build (TREE_CODE (arg0), newtype,
6369 convert (newtype, TREE_OPERAND (arg0, 0)),
6370 convert (newtype, TREE_OPERAND (arg0, 1)));
6371
6372 return build (code, type, newmod, convert (newtype, arg1));
6373 }
6374
6375 /* If this is an NE comparison of zero with an AND of one, remove the
6376 comparison since the AND will give the correct value. */
6377 if (code == NE_EXPR && integer_zerop (arg1)
6378 && TREE_CODE (arg0) == BIT_AND_EXPR
6379 && integer_onep (TREE_OPERAND (arg0, 1)))
6380 return convert (type, arg0);
6381
6382 /* If we have (A & C) == C where C is a power of 2, convert this into
6383 (A & C) != 0. Similarly for NE_EXPR. */
6384 if ((code == EQ_EXPR || code == NE_EXPR)
6385 && TREE_CODE (arg0) == BIT_AND_EXPR
6386 && integer_pow2p (TREE_OPERAND (arg0, 1))
6387 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6388 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6389 arg0, integer_zero_node));
6390
6391 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6392 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6393 if ((code == EQ_EXPR || code == NE_EXPR)
6394 && TREE_CODE (arg0) == BIT_AND_EXPR
6395 && integer_zerop (arg1))
6396 {
6397 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6398 TREE_OPERAND (arg0, 1));
6399 if (arg00 != NULL_TREE)
6400 {
6401 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6402 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6403 convert (stype, arg00),
6404 convert (stype, integer_zero_node)));
6405 }
6406 }
6407
6408 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6409 and similarly for >= into !=. */
6410 if ((code == LT_EXPR || code == GE_EXPR)
6411 && TREE_UNSIGNED (TREE_TYPE (arg0))
6412 && TREE_CODE (arg1) == LSHIFT_EXPR
6413 && integer_onep (TREE_OPERAND (arg1, 0)))
6414 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6415 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6416 TREE_OPERAND (arg1, 1)),
6417 convert (TREE_TYPE (arg0), integer_zero_node));
6418
6419 else if ((code == LT_EXPR || code == GE_EXPR)
6420 && TREE_UNSIGNED (TREE_TYPE (arg0))
6421 && (TREE_CODE (arg1) == NOP_EXPR
6422 || TREE_CODE (arg1) == CONVERT_EXPR)
6423 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6424 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6425 return
6426 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6427 convert (TREE_TYPE (arg0),
6428 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6429 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6430 convert (TREE_TYPE (arg0), integer_zero_node));
6431
6432 /* Simplify comparison of something with itself. (For IEEE
6433 floating-point, we can only do some of these simplifications.) */
6434 if (operand_equal_p (arg0, arg1, 0))
6435 {
6436 switch (code)
6437 {
6438 case EQ_EXPR:
6439 case GE_EXPR:
6440 case LE_EXPR:
6441 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6442 return constant_boolean_node (1, type);
6443 code = EQ_EXPR;
6444 TREE_SET_CODE (t, code);
6445 break;
6446
6447 case NE_EXPR:
6448 /* For NE, we can only do this simplification if integer. */
6449 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6450 break;
6451 /* ... fall through ... */
6452 case GT_EXPR:
6453 case LT_EXPR:
6454 return constant_boolean_node (0, type);
6455 default:
6456 abort ();
6457 }
6458 }
6459
6460 /* If we are comparing an expression that just has comparisons
6461 of two integer values, arithmetic expressions of those comparisons,
6462 and constants, we can simplify it. There are only three cases
6463 to check: the two values can either be equal, the first can be
6464 greater, or the second can be greater. Fold the expression for
6465 those three values. Since each value must be 0 or 1, we have
6466 eight possibilities, each of which corresponds to the constant 0
6467 or 1 or one of the six possible comparisons.
6468
6469 This handles common cases like (a > b) == 0 but also handles
6470 expressions like ((x > y) - (y > x)) > 0, which supposedly
6471 occur in macroized code. */
6472
6473 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6474 {
6475 tree cval1 = 0, cval2 = 0;
6476 int save_p = 0;
6477
6478 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6479 /* Don't handle degenerate cases here; they should already
6480 have been handled anyway. */
6481 && cval1 != 0 && cval2 != 0
6482 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6483 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6484 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6485 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6486 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6487 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6488 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6489 {
6490 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6491 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6492
6493 /* We can't just pass T to eval_subst in case cval1 or cval2
6494 was the same as ARG1. */
6495
6496 tree high_result
6497 = fold (build (code, type,
6498 eval_subst (arg0, cval1, maxval, cval2, minval),
6499 arg1));
6500 tree equal_result
6501 = fold (build (code, type,
6502 eval_subst (arg0, cval1, maxval, cval2, maxval),
6503 arg1));
6504 tree low_result
6505 = fold (build (code, type,
6506 eval_subst (arg0, cval1, minval, cval2, maxval),
6507 arg1));
6508
6509 /* All three of these results should be 0 or 1. Confirm they
6510 are. Then use those values to select the proper code
6511 to use. */
6512
6513 if ((integer_zerop (high_result)
6514 || integer_onep (high_result))
6515 && (integer_zerop (equal_result)
6516 || integer_onep (equal_result))
6517 && (integer_zerop (low_result)
6518 || integer_onep (low_result)))
6519 {
6520 /* Make a 3-bit mask with the high-order bit being the
6521 value for `>', the next for '=', and the low for '<'. */
6522 switch ((integer_onep (high_result) * 4)
6523 + (integer_onep (equal_result) * 2)
6524 + integer_onep (low_result))
6525 {
6526 case 0:
6527 /* Always false. */
6528 return omit_one_operand (type, integer_zero_node, arg0);
6529 case 1:
6530 code = LT_EXPR;
6531 break;
6532 case 2:
6533 code = EQ_EXPR;
6534 break;
6535 case 3:
6536 code = LE_EXPR;
6537 break;
6538 case 4:
6539 code = GT_EXPR;
6540 break;
6541 case 5:
6542 code = NE_EXPR;
6543 break;
6544 case 6:
6545 code = GE_EXPR;
6546 break;
6547 case 7:
6548 /* Always true. */
6549 return omit_one_operand (type, integer_one_node, arg0);
6550 }
6551
6552 t = build (code, type, cval1, cval2);
6553 if (save_p)
6554 return save_expr (t);
6555 else
6556 return fold (t);
6557 }
6558 }
6559 }
6560
6561 /* If this is a comparison of a field, we may be able to simplify it. */
6562 if (((TREE_CODE (arg0) == COMPONENT_REF
6563 && (*lang_hooks.can_use_bit_fields_p) ())
6564 || TREE_CODE (arg0) == BIT_FIELD_REF)
6565 && (code == EQ_EXPR || code == NE_EXPR)
6566 /* Handle the constant case even without -O
6567 to make sure the warnings are given. */
6568 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6569 {
6570 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6571 return t1 ? t1 : t;
6572 }
6573
6574 /* If this is a comparison of complex values and either or both sides
6575 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6576 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6577 This may prevent needless evaluations. */
6578 if ((code == EQ_EXPR || code == NE_EXPR)
6579 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6580 && (TREE_CODE (arg0) == COMPLEX_EXPR
6581 || TREE_CODE (arg1) == COMPLEX_EXPR
6582 || TREE_CODE (arg0) == COMPLEX_CST
6583 || TREE_CODE (arg1) == COMPLEX_CST))
6584 {
6585 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6586 tree real0, imag0, real1, imag1;
6587
6588 arg0 = save_expr (arg0);
6589 arg1 = save_expr (arg1);
6590 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6591 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6592 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6593 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6594
6595 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6596 : TRUTH_ORIF_EXPR),
6597 type,
6598 fold (build (code, type, real0, real1)),
6599 fold (build (code, type, imag0, imag1))));
6600 }
6601
6602 /* Optimize comparisons of strlen vs zero to a compare of the
6603 first character of the string vs zero. To wit,
6604 strlen(ptr) == 0 => *ptr == 0
6605 strlen(ptr) != 0 => *ptr != 0
6606 Other cases should reduce to one of these two (or a constant)
6607 due to the return value of strlen being unsigned. */
6608 if ((code == EQ_EXPR || code == NE_EXPR)
6609 && integer_zerop (arg1)
6610 && TREE_CODE (arg0) == CALL_EXPR
6611 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6612 {
6613 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6614 tree arglist;
6615
6616 if (TREE_CODE (fndecl) == FUNCTION_DECL
6617 && DECL_BUILT_IN (fndecl)
6618 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6619 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6620 && (arglist = TREE_OPERAND (arg0, 1))
6621 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6622 && ! TREE_CHAIN (arglist))
6623 return fold (build (code, type,
6624 build1 (INDIRECT_REF, char_type_node,
6625 TREE_VALUE(arglist)),
6626 integer_zero_node));
6627 }
6628
6629 /* From here on, the only cases we handle are when the result is
6630 known to be a constant.
6631
6632 To compute GT, swap the arguments and do LT.
6633 To compute GE, do LT and invert the result.
6634 To compute LE, swap the arguments, do LT and invert the result.
6635 To compute NE, do EQ and invert the result.
6636
6637 Therefore, the code below must handle only EQ and LT. */
6638
6639 if (code == LE_EXPR || code == GT_EXPR)
6640 {
6641 tem = arg0, arg0 = arg1, arg1 = tem;
6642 code = swap_tree_comparison (code);
6643 }
6644
6645 /* Note that it is safe to invert for real values here because we
6646 will check below in the one case that it matters. */
6647
6648 t1 = NULL_TREE;
6649 invert = 0;
6650 if (code == NE_EXPR || code == GE_EXPR)
6651 {
6652 invert = 1;
6653 code = invert_tree_comparison (code);
6654 }
6655
6656 /* Compute a result for LT or EQ if args permit;
6657 otherwise return T. */
6658 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6659 {
6660 if (code == EQ_EXPR)
6661 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6662 else
6663 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6664 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6665 : INT_CST_LT (arg0, arg1)),
6666 0);
6667 }
6668
6669 #if 0 /* This is no longer useful, but breaks some real code. */
6670 /* Assume a nonexplicit constant cannot equal an explicit one,
6671 since such code would be undefined anyway.
6672 Exception: on sysvr4, using #pragma weak,
6673 a label can come out as 0. */
6674 else if (TREE_CODE (arg1) == INTEGER_CST
6675 && !integer_zerop (arg1)
6676 && TREE_CONSTANT (arg0)
6677 && TREE_CODE (arg0) == ADDR_EXPR
6678 && code == EQ_EXPR)
6679 t1 = build_int_2 (0, 0);
6680 #endif
6681 /* Two real constants can be compared explicitly. */
6682 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6683 {
6684 /* If either operand is a NaN, the result is false with two
6685 exceptions: First, an NE_EXPR is true on NaNs, but that case
6686 is already handled correctly since we will be inverting the
6687 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6688 or a GE_EXPR into a LT_EXPR, we must return true so that it
6689 will be inverted into false. */
6690
6691 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6692 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6693 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6694
6695 else if (code == EQ_EXPR)
6696 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6697 TREE_REAL_CST (arg1)),
6698 0);
6699 else
6700 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6701 TREE_REAL_CST (arg1)),
6702 0);
6703 }
6704
6705 if (t1 == NULL_TREE)
6706 return t;
6707
6708 if (invert)
6709 TREE_INT_CST_LOW (t1) ^= 1;
6710
6711 TREE_TYPE (t1) = type;
6712 if (TREE_CODE (type) == BOOLEAN_TYPE)
6713 return (*lang_hooks.truthvalue_conversion) (t1);
6714 return t1;
6715
6716 case COND_EXPR:
6717 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6718 so all simple results must be passed through pedantic_non_lvalue. */
6719 if (TREE_CODE (arg0) == INTEGER_CST)
6720 {
6721 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
6722 /* Only optimize constant conditions when the selected branch
6723 has the same type as the COND_EXPR. This avoids optimizing
6724 away "c ? x : throw", where the throw has a void type. */
6725 if (! VOID_TYPE_P (TREE_TYPE (tem))
6726 || VOID_TYPE_P (TREE_TYPE (t)))
6727 return pedantic_non_lvalue (tem);
6728 return t;
6729 }
6730 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6731 return pedantic_omit_one_operand (type, arg1, arg0);
6732
6733 /* If the second operand is zero, invert the comparison and swap
6734 the second and third operands. Likewise if the second operand
6735 is constant and the third is not or if the third operand is
6736 equivalent to the first operand of the comparison. */
6737
6738 if (integer_zerop (arg1)
6739 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6740 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6741 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6742 TREE_OPERAND (t, 2),
6743 TREE_OPERAND (arg0, 1))))
6744 {
6745 /* See if this can be inverted. If it can't, possibly because
6746 it was a floating-point inequality comparison, don't do
6747 anything. */
6748 tem = invert_truthvalue (arg0);
6749
6750 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6751 {
6752 t = build (code, type, tem,
6753 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6754 arg0 = tem;
6755 /* arg1 should be the first argument of the new T. */
6756 arg1 = TREE_OPERAND (t, 1);
6757 STRIP_NOPS (arg1);
6758 }
6759 }
6760
6761 /* If we have A op B ? A : C, we may be able to convert this to a
6762 simpler expression, depending on the operation and the values
6763 of B and C. Signed zeros prevent all of these transformations,
6764 for reasons given above each one. */
6765
6766 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6767 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6768 arg1, TREE_OPERAND (arg0, 1))
6769 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6770 {
6771 tree arg2 = TREE_OPERAND (t, 2);
6772 enum tree_code comp_code = TREE_CODE (arg0);
6773
6774 STRIP_NOPS (arg2);
6775
6776 /* If we have A op 0 ? A : -A, consider applying the following
6777 transformations:
6778
6779 A == 0? A : -A same as -A
6780 A != 0? A : -A same as A
6781 A >= 0? A : -A same as abs (A)
6782 A > 0? A : -A same as abs (A)
6783 A <= 0? A : -A same as -abs (A)
6784 A < 0? A : -A same as -abs (A)
6785
6786 None of these transformations work for modes with signed
6787 zeros. If A is +/-0, the first two transformations will
6788 change the sign of the result (from +0 to -0, or vice
6789 versa). The last four will fix the sign of the result,
6790 even though the original expressions could be positive or
6791 negative, depending on the sign of A.
6792
6793 Note that all these transformations are correct if A is
6794 NaN, since the two alternatives (A and -A) are also NaNs. */
6795 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6796 ? real_zerop (TREE_OPERAND (arg0, 1))
6797 : integer_zerop (TREE_OPERAND (arg0, 1)))
6798 && TREE_CODE (arg2) == NEGATE_EXPR
6799 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6800 switch (comp_code)
6801 {
6802 case EQ_EXPR:
6803 return
6804 pedantic_non_lvalue
6805 (convert (type,
6806 negate_expr
6807 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6808 arg1))));
6809 case NE_EXPR:
6810 return pedantic_non_lvalue (convert (type, arg1));
6811 case GE_EXPR:
6812 case GT_EXPR:
6813 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6814 arg1 = convert ((*lang_hooks.types.signed_type)
6815 (TREE_TYPE (arg1)), arg1);
6816 return pedantic_non_lvalue
6817 (convert (type, fold (build1 (ABS_EXPR,
6818 TREE_TYPE (arg1), arg1))));
6819 case LE_EXPR:
6820 case LT_EXPR:
6821 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6822 arg1 = convert ((lang_hooks.types.signed_type)
6823 (TREE_TYPE (arg1)), arg1);
6824 return pedantic_non_lvalue
6825 (negate_expr (convert (type,
6826 fold (build1 (ABS_EXPR,
6827 TREE_TYPE (arg1),
6828 arg1)))));
6829 default:
6830 abort ();
6831 }
6832
6833 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6834 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6835 both transformations are correct when A is NaN: A != 0
6836 is then true, and A == 0 is false. */
6837
6838 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6839 {
6840 if (comp_code == NE_EXPR)
6841 return pedantic_non_lvalue (convert (type, arg1));
6842 else if (comp_code == EQ_EXPR)
6843 return pedantic_non_lvalue (convert (type, integer_zero_node));
6844 }
6845
6846 /* Try some transformations of A op B ? A : B.
6847
6848 A == B? A : B same as B
6849 A != B? A : B same as A
6850 A >= B? A : B same as max (A, B)
6851 A > B? A : B same as max (B, A)
6852 A <= B? A : B same as min (A, B)
6853 A < B? A : B same as min (B, A)
6854
6855 As above, these transformations don't work in the presence
6856 of signed zeros. For example, if A and B are zeros of
6857 opposite sign, the first two transformations will change
6858 the sign of the result. In the last four, the original
6859 expressions give different results for (A=+0, B=-0) and
6860 (A=-0, B=+0), but the transformed expressions do not.
6861
6862 The first two transformations are correct if either A or B
6863 is a NaN. In the first transformation, the condition will
6864 be false, and B will indeed be chosen. In the case of the
6865 second transformation, the condition A != B will be true,
6866 and A will be chosen.
6867
6868 The conversions to max() and min() are not correct if B is
6869 a number and A is not. The conditions in the original
6870 expressions will be false, so all four give B. The min()
6871 and max() versions would give a NaN instead. */
6872 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6873 arg2, TREE_OPERAND (arg0, 0)))
6874 {
6875 tree comp_op0 = TREE_OPERAND (arg0, 0);
6876 tree comp_op1 = TREE_OPERAND (arg0, 1);
6877 tree comp_type = TREE_TYPE (comp_op0);
6878
6879 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6880 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6881 comp_type = type;
6882
6883 switch (comp_code)
6884 {
6885 case EQ_EXPR:
6886 return pedantic_non_lvalue (convert (type, arg2));
6887 case NE_EXPR:
6888 return pedantic_non_lvalue (convert (type, arg1));
6889 case LE_EXPR:
6890 case LT_EXPR:
6891 /* In C++ a ?: expression can be an lvalue, so put the
6892 operand which will be used if they are equal first
6893 so that we can convert this back to the
6894 corresponding COND_EXPR. */
6895 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6896 return pedantic_non_lvalue
6897 (convert (type, fold (build (MIN_EXPR, comp_type,
6898 (comp_code == LE_EXPR
6899 ? comp_op0 : comp_op1),
6900 (comp_code == LE_EXPR
6901 ? comp_op1 : comp_op0)))));
6902 break;
6903 case GE_EXPR:
6904 case GT_EXPR:
6905 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6906 return pedantic_non_lvalue
6907 (convert (type, fold (build (MAX_EXPR, comp_type,
6908 (comp_code == GE_EXPR
6909 ? comp_op0 : comp_op1),
6910 (comp_code == GE_EXPR
6911 ? comp_op1 : comp_op0)))));
6912 break;
6913 default:
6914 abort ();
6915 }
6916 }
6917
6918 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6919 we might still be able to simplify this. For example,
6920 if C1 is one less or one more than C2, this might have started
6921 out as a MIN or MAX and been transformed by this function.
6922 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6923
6924 if (INTEGRAL_TYPE_P (type)
6925 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6926 && TREE_CODE (arg2) == INTEGER_CST)
6927 switch (comp_code)
6928 {
6929 case EQ_EXPR:
6930 /* We can replace A with C1 in this case. */
6931 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6932 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6933 TREE_OPERAND (t, 2));
6934 break;
6935
6936 case LT_EXPR:
6937 /* If C1 is C2 + 1, this is min(A, C2). */
6938 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6939 && operand_equal_p (TREE_OPERAND (arg0, 1),
6940 const_binop (PLUS_EXPR, arg2,
6941 integer_one_node, 0), 1))
6942 return pedantic_non_lvalue
6943 (fold (build (MIN_EXPR, type, arg1, arg2)));
6944 break;
6945
6946 case LE_EXPR:
6947 /* If C1 is C2 - 1, this is min(A, C2). */
6948 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6949 && operand_equal_p (TREE_OPERAND (arg0, 1),
6950 const_binop (MINUS_EXPR, arg2,
6951 integer_one_node, 0), 1))
6952 return pedantic_non_lvalue
6953 (fold (build (MIN_EXPR, type, arg1, arg2)));
6954 break;
6955
6956 case GT_EXPR:
6957 /* If C1 is C2 - 1, this is max(A, C2). */
6958 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6959 && operand_equal_p (TREE_OPERAND (arg0, 1),
6960 const_binop (MINUS_EXPR, arg2,
6961 integer_one_node, 0), 1))
6962 return pedantic_non_lvalue
6963 (fold (build (MAX_EXPR, type, arg1, arg2)));
6964 break;
6965
6966 case GE_EXPR:
6967 /* If C1 is C2 + 1, this is max(A, C2). */
6968 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6969 && operand_equal_p (TREE_OPERAND (arg0, 1),
6970 const_binop (PLUS_EXPR, arg2,
6971 integer_one_node, 0), 1))
6972 return pedantic_non_lvalue
6973 (fold (build (MAX_EXPR, type, arg1, arg2)));
6974 break;
6975 case NE_EXPR:
6976 break;
6977 default:
6978 abort ();
6979 }
6980 }
6981
6982 /* If the second operand is simpler than the third, swap them
6983 since that produces better jump optimization results. */
6984 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6985 || TREE_CODE (arg1) == SAVE_EXPR)
6986 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6987 || DECL_P (TREE_OPERAND (t, 2))
6988 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6989 {
6990 /* See if this can be inverted. If it can't, possibly because
6991 it was a floating-point inequality comparison, don't do
6992 anything. */
6993 tem = invert_truthvalue (arg0);
6994
6995 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6996 {
6997 t = build (code, type, tem,
6998 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6999 arg0 = tem;
7000 /* arg1 should be the first argument of the new T. */
7001 arg1 = TREE_OPERAND (t, 1);
7002 STRIP_NOPS (arg1);
7003 }
7004 }
7005
7006 /* Convert A ? 1 : 0 to simply A. */
7007 if (integer_onep (TREE_OPERAND (t, 1))
7008 && integer_zerop (TREE_OPERAND (t, 2))
7009 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7010 call to fold will try to move the conversion inside
7011 a COND, which will recurse. In that case, the COND_EXPR
7012 is probably the best choice, so leave it alone. */
7013 && type == TREE_TYPE (arg0))
7014 return pedantic_non_lvalue (arg0);
7015
7016 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7017 over COND_EXPR in cases such as floating point comparisons. */
7018 if (integer_zerop (TREE_OPERAND (t, 1))
7019 && integer_onep (TREE_OPERAND (t, 2))
7020 && truth_value_p (TREE_CODE (arg0)))
7021 return pedantic_non_lvalue (convert (type,
7022 invert_truthvalue (arg0)));
7023
7024 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7025 operation is simply A & 2. */
7026
7027 if (integer_zerop (TREE_OPERAND (t, 2))
7028 && TREE_CODE (arg0) == NE_EXPR
7029 && integer_zerop (TREE_OPERAND (arg0, 1))
7030 && integer_pow2p (arg1)
7031 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7032 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7033 arg1, 1))
7034 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7035
7036 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7037 if (integer_zerop (TREE_OPERAND (t, 2))
7038 && truth_value_p (TREE_CODE (arg0))
7039 && truth_value_p (TREE_CODE (arg1)))
7040 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7041 arg0, arg1)));
7042
7043 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7044 if (integer_onep (TREE_OPERAND (t, 2))
7045 && truth_value_p (TREE_CODE (arg0))
7046 && truth_value_p (TREE_CODE (arg1)))
7047 {
7048 /* Only perform transformation if ARG0 is easily inverted. */
7049 tem = invert_truthvalue (arg0);
7050 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7051 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7052 tem, arg1)));
7053 }
7054
7055 return t;
7056
7057 case COMPOUND_EXPR:
7058 /* When pedantic, a compound expression can be neither an lvalue
7059 nor an integer constant expression. */
7060 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7061 return t;
7062 /* Don't let (0, 0) be null pointer constant. */
7063 if (integer_zerop (arg1))
7064 return build1 (NOP_EXPR, type, arg1);
7065 return convert (type, arg1);
7066
7067 case COMPLEX_EXPR:
7068 if (wins)
7069 return build_complex (type, arg0, arg1);
7070 return t;
7071
7072 case REALPART_EXPR:
7073 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7074 return t;
7075 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7076 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7077 TREE_OPERAND (arg0, 1));
7078 else if (TREE_CODE (arg0) == COMPLEX_CST)
7079 return TREE_REALPART (arg0);
7080 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7081 return fold (build (TREE_CODE (arg0), type,
7082 fold (build1 (REALPART_EXPR, type,
7083 TREE_OPERAND (arg0, 0))),
7084 fold (build1 (REALPART_EXPR,
7085 type, TREE_OPERAND (arg0, 1)))));
7086 return t;
7087
7088 case IMAGPART_EXPR:
7089 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7090 return convert (type, integer_zero_node);
7091 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7092 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7093 TREE_OPERAND (arg0, 0));
7094 else if (TREE_CODE (arg0) == COMPLEX_CST)
7095 return TREE_IMAGPART (arg0);
7096 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7097 return fold (build (TREE_CODE (arg0), type,
7098 fold (build1 (IMAGPART_EXPR, type,
7099 TREE_OPERAND (arg0, 0))),
7100 fold (build1 (IMAGPART_EXPR, type,
7101 TREE_OPERAND (arg0, 1)))));
7102 return t;
7103
7104 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7105 appropriate. */
7106 case CLEANUP_POINT_EXPR:
7107 if (! has_cleanups (arg0))
7108 return TREE_OPERAND (t, 0);
7109
7110 {
7111 enum tree_code code0 = TREE_CODE (arg0);
7112 int kind0 = TREE_CODE_CLASS (code0);
7113 tree arg00 = TREE_OPERAND (arg0, 0);
7114 tree arg01;
7115
7116 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7117 return fold (build1 (code0, type,
7118 fold (build1 (CLEANUP_POINT_EXPR,
7119 TREE_TYPE (arg00), arg00))));
7120
7121 if (kind0 == '<' || kind0 == '2'
7122 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7123 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7124 || code0 == TRUTH_XOR_EXPR)
7125 {
7126 arg01 = TREE_OPERAND (arg0, 1);
7127
7128 if (TREE_CONSTANT (arg00)
7129 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7130 && ! has_cleanups (arg00)))
7131 return fold (build (code0, type, arg00,
7132 fold (build1 (CLEANUP_POINT_EXPR,
7133 TREE_TYPE (arg01), arg01))));
7134
7135 if (TREE_CONSTANT (arg01))
7136 return fold (build (code0, type,
7137 fold (build1 (CLEANUP_POINT_EXPR,
7138 TREE_TYPE (arg00), arg00)),
7139 arg01));
7140 }
7141
7142 return t;
7143 }
7144
7145 case CALL_EXPR:
7146 /* Check for a built-in function. */
7147 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7148 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7149 == FUNCTION_DECL)
7150 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7151 {
7152 tree tmp = fold_builtin (expr);
7153 if (tmp)
7154 return tmp;
7155 }
7156 return t;
7157
7158 default:
7159 return t;
7160 } /* switch (code) */
7161 }
7162
7163 /* Determine if first argument is a multiple of second argument. Return 0 if
7164 it is not, or we cannot easily determined it to be.
7165
7166 An example of the sort of thing we care about (at this point; this routine
7167 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7168 fold cases do now) is discovering that
7169
7170 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7171
7172 is a multiple of
7173
7174 SAVE_EXPR (J * 8)
7175
7176 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7177
7178 This code also handles discovering that
7179
7180 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7181
7182 is a multiple of 8 so we don't have to worry about dealing with a
7183 possible remainder.
7184
7185 Note that we *look* inside a SAVE_EXPR only to determine how it was
7186 calculated; it is not safe for fold to do much of anything else with the
7187 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7188 at run time. For example, the latter example above *cannot* be implemented
7189 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7190 evaluation time of the original SAVE_EXPR is not necessarily the same at
7191 the time the new expression is evaluated. The only optimization of this
7192 sort that would be valid is changing
7193
7194 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7195
7196 divided by 8 to
7197
7198 SAVE_EXPR (I) * SAVE_EXPR (J)
7199
7200 (where the same SAVE_EXPR (J) is used in the original and the
7201 transformed version). */
7202
7203 static int
multiple_of_p(type,top,bottom)7204 multiple_of_p (type, top, bottom)
7205 tree type;
7206 tree top;
7207 tree bottom;
7208 {
7209 if (operand_equal_p (top, bottom, 0))
7210 return 1;
7211
7212 if (TREE_CODE (type) != INTEGER_TYPE)
7213 return 0;
7214
7215 switch (TREE_CODE (top))
7216 {
7217 case MULT_EXPR:
7218 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7219 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7220
7221 case PLUS_EXPR:
7222 case MINUS_EXPR:
7223 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7224 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7225
7226 case LSHIFT_EXPR:
7227 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7228 {
7229 tree op1, t1;
7230
7231 op1 = TREE_OPERAND (top, 1);
7232 /* const_binop may not detect overflow correctly,
7233 so check for it explicitly here. */
7234 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7235 > TREE_INT_CST_LOW (op1)
7236 && TREE_INT_CST_HIGH (op1) == 0
7237 && 0 != (t1 = convert (type,
7238 const_binop (LSHIFT_EXPR, size_one_node,
7239 op1, 0)))
7240 && ! TREE_OVERFLOW (t1))
7241 return multiple_of_p (type, t1, bottom);
7242 }
7243 return 0;
7244
7245 case NOP_EXPR:
7246 /* Can't handle conversions from non-integral or wider integral type. */
7247 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7248 || (TYPE_PRECISION (type)
7249 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7250 return 0;
7251
7252 /* .. fall through ... */
7253
7254 case SAVE_EXPR:
7255 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7256
7257 case INTEGER_CST:
7258 if (TREE_CODE (bottom) != INTEGER_CST
7259 || (TREE_UNSIGNED (type)
7260 && (tree_int_cst_sgn (top) < 0
7261 || tree_int_cst_sgn (bottom) < 0)))
7262 return 0;
7263 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7264 top, bottom, 0));
7265
7266 default:
7267 return 0;
7268 }
7269 }
7270
7271 /* Return true if `t' is known to be non-negative. */
7272
7273 int
tree_expr_nonnegative_p(t)7274 tree_expr_nonnegative_p (t)
7275 tree t;
7276 {
7277 switch (TREE_CODE (t))
7278 {
7279 case ABS_EXPR:
7280 case FFS_EXPR:
7281 return 1;
7282 case INTEGER_CST:
7283 return tree_int_cst_sgn (t) >= 0;
7284 case TRUNC_DIV_EXPR:
7285 case CEIL_DIV_EXPR:
7286 case FLOOR_DIV_EXPR:
7287 case ROUND_DIV_EXPR:
7288 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7289 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7290 case TRUNC_MOD_EXPR:
7291 case CEIL_MOD_EXPR:
7292 case FLOOR_MOD_EXPR:
7293 case ROUND_MOD_EXPR:
7294 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7295 case COND_EXPR:
7296 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7297 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7298 case COMPOUND_EXPR:
7299 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7300 case MIN_EXPR:
7301 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7302 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7303 case MAX_EXPR:
7304 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7305 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7306 case MODIFY_EXPR:
7307 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7308 case BIND_EXPR:
7309 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7310 case SAVE_EXPR:
7311 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7312 case NON_LVALUE_EXPR:
7313 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7314 case RTL_EXPR:
7315 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7316
7317 default:
7318 if (truth_value_p (TREE_CODE (t)))
7319 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7320 return 1;
7321 else
7322 /* We don't know sign of `t', so be conservative and return false. */
7323 return 0;
7324 }
7325 }
7326
7327 /* Return true if `r' is known to be non-negative.
7328 Only handles constants at the moment. */
7329
7330 int
rtl_expr_nonnegative_p(r)7331 rtl_expr_nonnegative_p (r)
7332 rtx r;
7333 {
7334 switch (GET_CODE (r))
7335 {
7336 case CONST_INT:
7337 return INTVAL (r) >= 0;
7338
7339 case CONST_DOUBLE:
7340 if (GET_MODE (r) == VOIDmode)
7341 return CONST_DOUBLE_HIGH (r) >= 0;
7342 return 0;
7343
7344 case CONST_VECTOR:
7345 {
7346 int units, i;
7347 rtx elt;
7348
7349 units = CONST_VECTOR_NUNITS (r);
7350
7351 for (i = 0; i < units; ++i)
7352 {
7353 elt = CONST_VECTOR_ELT (r, i);
7354 if (!rtl_expr_nonnegative_p (elt))
7355 return 0;
7356 }
7357
7358 return 1;
7359 }
7360
7361 case SYMBOL_REF:
7362 case LABEL_REF:
7363 /* These are always nonnegative. */
7364 return 1;
7365
7366 default:
7367 return 0;
7368 }
7369 }
7370
7371 #include "gt-fold-const.h"
7372