xref: /openbsd-src/gnu/usr.bin/gcc/gcc/fold-const.c (revision 8500990981f885cbe5e6a4958549cacc238b5ae6)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 2002,
3    1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /*@@ This file should be rewritten to use an arbitrary precision
23   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25   @@ The routines that translate from the ap rep should
26   @@ warn if precision et. al. is lost.
27   @@ This would also make life easier when this technology is used
28   @@ for cross-compilers.  */
29 
30 /* The entry points in this file are fold, size_int_wide, size_binop
31    and force_fit_type.
32 
33    fold takes a tree as argument and returns a simplified tree.
34 
35    size_binop takes a tree code for an arithmetic operation
36    and two operands that are trees, and produces a tree for the
37    result, assuming the type comes from `sizetype'.
38 
39    size_int takes an integer value, and creates a tree constant
40    with type from `sizetype'.
41 
42    force_fit_type takes a constant and prior overflow indicator, and
43    forces the value to fit the type.  It returns an overflow indicator.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "real.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55 #include "hashtab.h"
56 #include "langhooks.h"
57 
58 static void encode		PARAMS ((HOST_WIDE_INT *,
59 					 unsigned HOST_WIDE_INT,
60 					 HOST_WIDE_INT));
61 static void decode		PARAMS ((HOST_WIDE_INT *,
62 					 unsigned HOST_WIDE_INT *,
63 					 HOST_WIDE_INT *));
64 static tree negate_expr		PARAMS ((tree));
65 static tree split_tree		PARAMS ((tree, enum tree_code, tree *, tree *,
66 					 tree *, int));
67 static tree associate_trees	PARAMS ((tree, tree, enum tree_code, tree));
68 static tree int_const_binop	PARAMS ((enum tree_code, tree, tree, int));
69 static tree const_binop		PARAMS ((enum tree_code, tree, tree, int));
70 static hashval_t size_htab_hash	PARAMS ((const void *));
71 static int size_htab_eq		PARAMS ((const void *, const void *));
72 static tree fold_convert	PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int comparison_to_compcode PARAMS ((enum tree_code));
76 static enum tree_code compcode_to_comparison PARAMS ((int));
77 static int truth_value_p	PARAMS ((enum tree_code));
78 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
79 static int twoval_comparison_p	PARAMS ((tree, tree *, tree *, int *));
80 static tree eval_subst		PARAMS ((tree, tree, tree, tree, tree));
81 static tree omit_one_operand	PARAMS ((tree, tree, tree));
82 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
83 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
84 static tree make_bit_field_ref	PARAMS ((tree, tree, int, int, int));
85 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
86 						tree, tree));
87 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
88 					    HOST_WIDE_INT *,
89 					    enum machine_mode *, int *,
90 					    int *, tree *, tree *));
91 static int all_ones_mask_p	PARAMS ((tree, int));
92 static tree sign_bit_p		PARAMS ((tree, tree));
93 static int simple_operand_p	PARAMS ((tree));
94 static tree range_binop		PARAMS ((enum tree_code, tree, tree, int,
95 					 tree, int));
96 static tree make_range		PARAMS ((tree, int *, tree *, tree *));
97 static tree build_range_check	PARAMS ((tree, tree, int, tree, tree));
98 static int merge_ranges		PARAMS ((int *, tree *, tree *, int, tree, tree,
99 				       int, tree, tree));
100 static tree fold_range_test	PARAMS ((tree));
101 static tree unextend		PARAMS ((tree, int, int, tree));
102 static tree fold_truthop	PARAMS ((enum tree_code, tree, tree, tree));
103 static tree optimize_minmax_comparison PARAMS ((tree));
104 static tree extract_muldiv	PARAMS ((tree, tree, enum tree_code, tree));
105 static tree extract_muldiv_1	PARAMS ((tree, tree, enum tree_code, tree));
106 static tree strip_compound_expr PARAMS ((tree, tree));
107 static int multiple_of_p	PARAMS ((tree, tree, tree));
108 static tree constant_boolean_node PARAMS ((int, tree));
109 static int count_cond		PARAMS ((tree, int));
110 static tree fold_binary_op_with_conditional_arg
111   PARAMS ((enum tree_code, tree, tree, tree, int));
112 static bool fold_real_zero_addition_p	PARAMS ((tree, tree, int));
113 
114 /* The following constants represent a bit based encoding of GCC's
115    comparison operators.  This encoding simplifies transformations
116    on relational comparison operators, such as AND and OR.  */
117 #define COMPCODE_FALSE   0
118 #define COMPCODE_LT      1
119 #define COMPCODE_EQ      2
120 #define COMPCODE_LE      3
121 #define COMPCODE_GT      4
122 #define COMPCODE_NE      5
123 #define COMPCODE_GE      6
124 #define COMPCODE_TRUE    7
125 
126 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
127    overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
128    and SUM1.  Then this yields nonzero if overflow occurred during the
129    addition.
130 
131    Overflow occurs if A and B have the same sign, but A and SUM differ in
132    sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
133    sign.  */
134 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
135 
136 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
137    We do that by representing the two-word integer in 4 words, with only
138    HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
139    number.  The value of the word is LOWPART + HIGHPART * BASE.  */
140 
141 #define LOWPART(x) \
142   ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
143 #define HIGHPART(x) \
144   ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
145 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
146 
147 /* Unpack a two-word integer into 4 words.
148    LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
149    WORDS points to the array of HOST_WIDE_INTs.  */
150 
151 static void
152 encode (words, low, hi)
153      HOST_WIDE_INT *words;
154      unsigned HOST_WIDE_INT low;
155      HOST_WIDE_INT hi;
156 {
157   words[0] = LOWPART (low);
158   words[1] = HIGHPART (low);
159   words[2] = LOWPART (hi);
160   words[3] = HIGHPART (hi);
161 }
162 
163 /* Pack an array of 4 words into a two-word integer.
164    WORDS points to the array of words.
165    The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
166 
167 static void
168 decode (words, low, hi)
169      HOST_WIDE_INT *words;
170      unsigned HOST_WIDE_INT *low;
171      HOST_WIDE_INT *hi;
172 {
173   *low = words[0] + words[1] * BASE;
174   *hi = words[2] + words[3] * BASE;
175 }
176 
177 /* Make the integer constant T valid for its type by setting to 0 or 1 all
178    the bits in the constant that don't belong in the type.
179 
180    Return 1 if a signed overflow occurs, 0 otherwise.  If OVERFLOW is
181    nonzero, a signed overflow has already occurred in calculating T, so
182    propagate it.  */
183 
184 int
185 force_fit_type (t, overflow)
186      tree t;
187      int overflow;
188 {
189   unsigned HOST_WIDE_INT low;
190   HOST_WIDE_INT high;
191   unsigned int prec;
192 
193   if (TREE_CODE (t) == REAL_CST)
194     {
195       /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
196 	 Consider doing it via real_convert now.  */
197       return overflow;
198     }
199 
200   else if (TREE_CODE (t) != INTEGER_CST)
201     return overflow;
202 
203   low = TREE_INT_CST_LOW (t);
204   high = TREE_INT_CST_HIGH (t);
205 
206   if (POINTER_TYPE_P (TREE_TYPE (t)))
207     prec = POINTER_SIZE;
208   else
209     prec = TYPE_PRECISION (TREE_TYPE (t));
210 
211   /* First clear all bits that are beyond the type's precision.  */
212 
213   if (prec == 2 * HOST_BITS_PER_WIDE_INT)
214     ;
215   else if (prec > HOST_BITS_PER_WIDE_INT)
216     TREE_INT_CST_HIGH (t)
217       &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
218   else
219     {
220       TREE_INT_CST_HIGH (t) = 0;
221       if (prec < HOST_BITS_PER_WIDE_INT)
222 	TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
223     }
224 
225   /* Unsigned types do not suffer sign extension or overflow unless they
226      are a sizetype.  */
227   if (TREE_UNSIGNED (TREE_TYPE (t))
228       && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 	    && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
230     return overflow;
231 
232   /* If the value's sign bit is set, extend the sign.  */
233   if (prec != 2 * HOST_BITS_PER_WIDE_INT
234       && (prec > HOST_BITS_PER_WIDE_INT
235 	  ? 0 != (TREE_INT_CST_HIGH (t)
236 		  & ((HOST_WIDE_INT) 1
237 		     << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 	  : 0 != (TREE_INT_CST_LOW (t)
239 		  & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
240     {
241       /* Value is negative:
242 	 set to 1 all the bits that are outside this type's precision.  */
243       if (prec > HOST_BITS_PER_WIDE_INT)
244 	TREE_INT_CST_HIGH (t)
245 	  |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
246       else
247 	{
248 	  TREE_INT_CST_HIGH (t) = -1;
249 	  if (prec < HOST_BITS_PER_WIDE_INT)
250 	    TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
251 	}
252     }
253 
254   /* Return nonzero if signed overflow occurred.  */
255   return
256     ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
257      != 0);
258 }
259 
260 /* Add two doubleword integers with doubleword result.
261    Each argument is given as two `HOST_WIDE_INT' pieces.
262    One argument is L1 and H1; the other, L2 and H2.
263    The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
264 
265 int
266 add_double (l1, h1, l2, h2, lv, hv)
267      unsigned HOST_WIDE_INT l1, l2;
268      HOST_WIDE_INT h1, h2;
269      unsigned HOST_WIDE_INT *lv;
270      HOST_WIDE_INT *hv;
271 {
272   unsigned HOST_WIDE_INT l;
273   HOST_WIDE_INT h;
274 
275   l = l1 + l2;
276   h = h1 + h2 + (l < l1);
277 
278   *lv = l;
279   *hv = h;
280   return OVERFLOW_SUM_SIGN (h1, h2, h);
281 }
282 
283 /* Negate a doubleword integer with doubleword result.
284    Return nonzero if the operation overflows, assuming it's signed.
285    The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
286    The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
287 
288 int
289 neg_double (l1, h1, lv, hv)
290      unsigned HOST_WIDE_INT l1;
291      HOST_WIDE_INT h1;
292      unsigned HOST_WIDE_INT *lv;
293      HOST_WIDE_INT *hv;
294 {
295   if (l1 == 0)
296     {
297       *lv = 0;
298       *hv = - h1;
299       return (*hv & h1) < 0;
300     }
301   else
302     {
303       *lv = -l1;
304       *hv = ~h1;
305       return 0;
306     }
307 }
308 
309 /* Multiply two doubleword integers with doubleword result.
310    Return nonzero if the operation overflows, assuming it's signed.
311    Each argument is given as two `HOST_WIDE_INT' pieces.
312    One argument is L1 and H1; the other, L2 and H2.
313    The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
314 
315 int
316 mul_double (l1, h1, l2, h2, lv, hv)
317      unsigned HOST_WIDE_INT l1, l2;
318      HOST_WIDE_INT h1, h2;
319      unsigned HOST_WIDE_INT *lv;
320      HOST_WIDE_INT *hv;
321 {
322   HOST_WIDE_INT arg1[4];
323   HOST_WIDE_INT arg2[4];
324   HOST_WIDE_INT prod[4 * 2];
325   unsigned HOST_WIDE_INT carry;
326   int i, j, k;
327   unsigned HOST_WIDE_INT toplow, neglow;
328   HOST_WIDE_INT tophigh, neghigh;
329 
330   encode (arg1, l1, h1);
331   encode (arg2, l2, h2);
332 
333   memset ((char *) prod, 0, sizeof prod);
334 
335   for (i = 0; i < 4; i++)
336     {
337       carry = 0;
338       for (j = 0; j < 4; j++)
339 	{
340 	  k = i + j;
341 	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
342 	  carry += arg1[i] * arg2[j];
343 	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
344 	  carry += prod[k];
345 	  prod[k] = LOWPART (carry);
346 	  carry = HIGHPART (carry);
347 	}
348       prod[i + 4] = carry;
349     }
350 
351   decode (prod, lv, hv);	/* This ignores prod[4] through prod[4*2-1] */
352 
353   /* Check for overflow by calculating the top half of the answer in full;
354      it should agree with the low half's sign bit.  */
355   decode (prod + 4, &toplow, &tophigh);
356   if (h1 < 0)
357     {
358       neg_double (l2, h2, &neglow, &neghigh);
359       add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
360     }
361   if (h2 < 0)
362     {
363       neg_double (l1, h1, &neglow, &neghigh);
364       add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365     }
366   return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
367 }
368 
369 /* Shift the doubleword integer in L1, H1 left by COUNT places
370    keeping only PREC bits of result.
371    Shift right if COUNT is negative.
372    ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
373    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
374 
375 void
376 lshift_double (l1, h1, count, prec, lv, hv, arith)
377      unsigned HOST_WIDE_INT l1;
378      HOST_WIDE_INT h1, count;
379      unsigned int prec;
380      unsigned HOST_WIDE_INT *lv;
381      HOST_WIDE_INT *hv;
382      int arith;
383 {
384   unsigned HOST_WIDE_INT signmask;
385 
386   if (count < 0)
387     {
388       rshift_double (l1, h1, -count, prec, lv, hv, arith);
389       return;
390     }
391 
392 #ifdef SHIFT_COUNT_TRUNCATED
393   if (SHIFT_COUNT_TRUNCATED)
394     count %= prec;
395 #endif
396 
397   if (count >= 2 * HOST_BITS_PER_WIDE_INT)
398     {
399       /* Shifting by the host word size is undefined according to the
400 	 ANSI standard, so we must handle this as a special case.  */
401       *hv = 0;
402       *lv = 0;
403     }
404   else if (count >= HOST_BITS_PER_WIDE_INT)
405     {
406       *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
407       *lv = 0;
408     }
409   else
410     {
411       *hv = (((unsigned HOST_WIDE_INT) h1 << count)
412 	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
413       *lv = l1 << count;
414     }
415 
416   /* Sign extend all bits that are beyond the precision.  */
417 
418   signmask = -((prec > HOST_BITS_PER_WIDE_INT
419 		? ((unsigned HOST_WIDE_INT) *hv
420 		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
421 		: (*lv >> (prec - 1))) & 1);
422 
423   if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
424     ;
425   else if (prec >= HOST_BITS_PER_WIDE_INT)
426     {
427       *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
428       *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
429     }
430   else
431     {
432       *hv = signmask;
433       *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
434       *lv |= signmask << prec;
435     }
436 }
437 
438 /* Shift the doubleword integer in L1, H1 right by COUNT places
439    keeping only PREC bits of result.  COUNT must be positive.
440    ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
441    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
442 
443 void
444 rshift_double (l1, h1, count, prec, lv, hv, arith)
445      unsigned HOST_WIDE_INT l1;
446      HOST_WIDE_INT h1, count;
447      unsigned int prec;
448      unsigned HOST_WIDE_INT *lv;
449      HOST_WIDE_INT *hv;
450      int arith;
451 {
452   unsigned HOST_WIDE_INT signmask;
453 
454   signmask = (arith
455 	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
456 	      : 0);
457 
458 #ifdef SHIFT_COUNT_TRUNCATED
459   if (SHIFT_COUNT_TRUNCATED)
460     count %= prec;
461 #endif
462 
463   if (count >= 2 * HOST_BITS_PER_WIDE_INT)
464     {
465       /* Shifting by the host word size is undefined according to the
466 	 ANSI standard, so we must handle this as a special case.  */
467       *hv = 0;
468       *lv = 0;
469     }
470   else if (count >= HOST_BITS_PER_WIDE_INT)
471     {
472       *hv = 0;
473       *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
474     }
475   else
476     {
477       *hv = (unsigned HOST_WIDE_INT) h1 >> count;
478       *lv = ((l1 >> count)
479 	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
480     }
481 
482   /* Zero / sign extend all bits that are beyond the precision.  */
483 
484   if (count >= (HOST_WIDE_INT)prec)
485     {
486       *hv = signmask;
487       *lv = signmask;
488     }
489   else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
490     ;
491   else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
492     {
493       *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
494       *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
495     }
496   else
497     {
498       *hv = signmask;
499       *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
500       *lv |= signmask << (prec - count);
501     }
502 }
503 
504 /* Rotate the doubleword integer in L1, H1 left by COUNT places
505    keeping only PREC bits of result.
506    Rotate right if COUNT is negative.
507    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
508 
509 void
510 lrotate_double (l1, h1, count, prec, lv, hv)
511      unsigned HOST_WIDE_INT l1;
512      HOST_WIDE_INT h1, count;
513      unsigned int prec;
514      unsigned HOST_WIDE_INT *lv;
515      HOST_WIDE_INT *hv;
516 {
517   unsigned HOST_WIDE_INT s1l, s2l;
518   HOST_WIDE_INT s1h, s2h;
519 
520   count %= prec;
521   if (count < 0)
522     count += prec;
523 
524   lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525   rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
526   *lv = s1l | s2l;
527   *hv = s1h | s2h;
528 }
529 
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531    keeping only PREC bits of result.  COUNT must be positive.
532    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
533 
534 void
535 rrotate_double (l1, h1, count, prec, lv, hv)
536      unsigned HOST_WIDE_INT l1;
537      HOST_WIDE_INT h1, count;
538      unsigned int prec;
539      unsigned HOST_WIDE_INT *lv;
540      HOST_WIDE_INT *hv;
541 {
542   unsigned HOST_WIDE_INT s1l, s2l;
543   HOST_WIDE_INT s1h, s2h;
544 
545   count %= prec;
546   if (count < 0)
547     count += prec;
548 
549   rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550   lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551   *lv = s1l | s2l;
552   *hv = s1h | s2h;
553 }
554 
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556    for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557    CODE is a tree code for a kind of division, one of
558    TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559    or EXACT_DIV_EXPR
560    It controls how the quotient is rounded to an integer.
561    Return nonzero if the operation overflows.
562    UNS nonzero says do unsigned division.  */
563 
564 int
565 div_and_round_double (code, uns,
566 		      lnum_orig, hnum_orig, lden_orig, hden_orig,
567 		      lquo, hquo, lrem, hrem)
568      enum tree_code code;
569      int uns;
570      unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
571      HOST_WIDE_INT hnum_orig;
572      unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
573      HOST_WIDE_INT hden_orig;
574      unsigned HOST_WIDE_INT *lquo, *lrem;
575      HOST_WIDE_INT *hquo, *hrem;
576 {
577   int quo_neg = 0;
578   HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
579   HOST_WIDE_INT den[4], quo[4];
580   int i, j;
581   unsigned HOST_WIDE_INT work;
582   unsigned HOST_WIDE_INT carry = 0;
583   unsigned HOST_WIDE_INT lnum = lnum_orig;
584   HOST_WIDE_INT hnum = hnum_orig;
585   unsigned HOST_WIDE_INT lden = lden_orig;
586   HOST_WIDE_INT hden = hden_orig;
587   int overflow = 0;
588 
589   if (hden == 0 && lden == 0)
590     overflow = 1, lden = 1;
591 
592   /* calculate quotient sign and convert operands to unsigned.  */
593   if (!uns)
594     {
595       if (hnum < 0)
596 	{
597 	  quo_neg = ~ quo_neg;
598 	  /* (minimum integer) / (-1) is the only overflow case.  */
599 	  if (neg_double (lnum, hnum, &lnum, &hnum)
600 	      && ((HOST_WIDE_INT) lden & hden) == -1)
601 	    overflow = 1;
602 	}
603       if (hden < 0)
604 	{
605 	  quo_neg = ~ quo_neg;
606 	  neg_double (lden, hden, &lden, &hden);
607 	}
608     }
609 
610   if (hnum == 0 && hden == 0)
611     {				/* single precision */
612       *hquo = *hrem = 0;
613       /* This unsigned division rounds toward zero.  */
614       *lquo = lnum / lden;
615       goto finish_up;
616     }
617 
618   if (hnum == 0)
619     {				/* trivial case: dividend < divisor */
620       /* hden != 0 already checked.  */
621       *hquo = *lquo = 0;
622       *hrem = hnum;
623       *lrem = lnum;
624       goto finish_up;
625     }
626 
627   memset ((char *) quo, 0, sizeof quo);
628 
629   memset ((char *) num, 0, sizeof num);	/* to zero 9th element */
630   memset ((char *) den, 0, sizeof den);
631 
632   encode (num, lnum, hnum);
633   encode (den, lden, hden);
634 
635   /* Special code for when the divisor < BASE.  */
636   if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
637     {
638       /* hnum != 0 already checked.  */
639       for (i = 4 - 1; i >= 0; i--)
640 	{
641 	  work = num[i] + carry * BASE;
642 	  quo[i] = work / lden;
643 	  carry = work % lden;
644 	}
645     }
646   else
647     {
648       /* Full double precision division,
649 	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
650       int num_hi_sig, den_hi_sig;
651       unsigned HOST_WIDE_INT quo_est, scale;
652 
653       /* Find the highest nonzero divisor digit.  */
654       for (i = 4 - 1;; i--)
655 	if (den[i] != 0)
656 	  {
657 	    den_hi_sig = i;
658 	    break;
659 	  }
660 
661       /* Insure that the first digit of the divisor is at least BASE/2.
662 	 This is required by the quotient digit estimation algorithm.  */
663 
664       scale = BASE / (den[den_hi_sig] + 1);
665       if (scale > 1)
666 	{		/* scale divisor and dividend */
667 	  carry = 0;
668 	  for (i = 0; i <= 4 - 1; i++)
669 	    {
670 	      work = (num[i] * scale) + carry;
671 	      num[i] = LOWPART (work);
672 	      carry = HIGHPART (work);
673 	    }
674 
675 	  num[4] = carry;
676 	  carry = 0;
677 	  for (i = 0; i <= 4 - 1; i++)
678 	    {
679 	      work = (den[i] * scale) + carry;
680 	      den[i] = LOWPART (work);
681 	      carry = HIGHPART (work);
682 	      if (den[i] != 0) den_hi_sig = i;
683 	    }
684 	}
685 
686       num_hi_sig = 4;
687 
688       /* Main loop */
689       for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
690 	{
691 	  /* Guess the next quotient digit, quo_est, by dividing the first
692 	     two remaining dividend digits by the high order quotient digit.
693 	     quo_est is never low and is at most 2 high.  */
694 	  unsigned HOST_WIDE_INT tmp;
695 
696 	  num_hi_sig = i + den_hi_sig + 1;
697 	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 	  if (num[num_hi_sig] != den[den_hi_sig])
699 	    quo_est = work / den[den_hi_sig];
700 	  else
701 	    quo_est = BASE - 1;
702 
703 	  /* Refine quo_est so it's usually correct, and at most one high.  */
704 	  tmp = work - quo_est * den[den_hi_sig];
705 	  if (tmp < BASE
706 	      && (den[den_hi_sig - 1] * quo_est
707 		  > (tmp * BASE + num[num_hi_sig - 2])))
708 	    quo_est--;
709 
710 	  /* Try QUO_EST as the quotient digit, by multiplying the
711 	     divisor by QUO_EST and subtracting from the remaining dividend.
712 	     Keep in mind that QUO_EST is the I - 1st digit.  */
713 
714 	  carry = 0;
715 	  for (j = 0; j <= den_hi_sig; j++)
716 	    {
717 	      work = quo_est * den[j] + carry;
718 	      carry = HIGHPART (work);
719 	      work = num[i + j] - LOWPART (work);
720 	      num[i + j] = LOWPART (work);
721 	      carry += HIGHPART (work) != 0;
722 	    }
723 
724 	  /* If quo_est was high by one, then num[i] went negative and
725 	     we need to correct things.  */
726 	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
727 	    {
728 	      quo_est--;
729 	      carry = 0;		/* add divisor back in */
730 	      for (j = 0; j <= den_hi_sig; j++)
731 		{
732 		  work = num[i + j] + den[j] + carry;
733 		  carry = HIGHPART (work);
734 		  num[i + j] = LOWPART (work);
735 		}
736 
737 	      num [num_hi_sig] += carry;
738 	    }
739 
740 	  /* Store the quotient digit.  */
741 	  quo[i] = quo_est;
742 	}
743     }
744 
745   decode (quo, lquo, hquo);
746 
747  finish_up:
748   /* if result is negative, make it so.  */
749   if (quo_neg)
750     neg_double (*lquo, *hquo, lquo, hquo);
751 
752   /* compute trial remainder:  rem = num - (quo * den)  */
753   mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754   neg_double (*lrem, *hrem, lrem, hrem);
755   add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
756 
757   switch (code)
758     {
759     case TRUNC_DIV_EXPR:
760     case TRUNC_MOD_EXPR:	/* round toward zero */
761     case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
762       return overflow;
763 
764     case FLOOR_DIV_EXPR:
765     case FLOOR_MOD_EXPR:	/* round toward negative infinity */
766       if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
767 	{
768 	  /* quo = quo - 1;  */
769 	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
770 		      lquo, hquo);
771 	}
772       else
773 	return overflow;
774       break;
775 
776     case CEIL_DIV_EXPR:
777     case CEIL_MOD_EXPR:		/* round toward positive infinity */
778       if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
779 	{
780 	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781 		      lquo, hquo);
782 	}
783       else
784 	return overflow;
785       break;
786 
787     case ROUND_DIV_EXPR:
788     case ROUND_MOD_EXPR:	/* round to closest integer */
789       {
790 	unsigned HOST_WIDE_INT labs_rem = *lrem;
791 	HOST_WIDE_INT habs_rem = *hrem;
792 	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 	HOST_WIDE_INT habs_den = hden, htwice;
794 
795 	/* Get absolute values */
796 	if (*hrem < 0)
797 	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798 	if (hden < 0)
799 	  neg_double (lden, hden, &labs_den, &habs_den);
800 
801 	/* If (2 * abs (lrem) >= abs (lden)) */
802 	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 		    labs_rem, habs_rem, &ltwice, &htwice);
804 
805 	if (((unsigned HOST_WIDE_INT) habs_den
806 	     < (unsigned HOST_WIDE_INT) htwice)
807 	    || (((unsigned HOST_WIDE_INT) habs_den
808 		 == (unsigned HOST_WIDE_INT) htwice)
809 		&& (labs_den < ltwice)))
810 	  {
811 	    if (*hquo < 0)
812 	      /* quo = quo - 1;  */
813 	      add_double (*lquo, *hquo,
814 			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 	    else
816 	      /* quo = quo + 1; */
817 	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 			  lquo, hquo);
819 	  }
820 	else
821 	  return overflow;
822       }
823       break;
824 
825     default:
826       abort ();
827     }
828 
829   /* compute true remainder:  rem = num - (quo * den)  */
830   mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831   neg_double (*lrem, *hrem, lrem, hrem);
832   add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833   return overflow;
834 }
835 
836 /* Given T, an expression, return the negation of T.  Allow for T to be
837    null, in which case return null.  */
838 
839 static tree
840 negate_expr (t)
841      tree t;
842 {
843   tree type;
844   tree tem;
845 
846   if (t == 0)
847     return 0;
848 
849   type = TREE_TYPE (t);
850   STRIP_SIGN_NOPS (t);
851 
852   switch (TREE_CODE (t))
853     {
854     case INTEGER_CST:
855     case REAL_CST:
856       if (! TREE_UNSIGNED (type)
857 	  && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
858 	  && ! TREE_OVERFLOW (tem))
859 	return tem;
860       break;
861 
862     case NEGATE_EXPR:
863       return convert (type, TREE_OPERAND (t, 0));
864 
865     case MINUS_EXPR:
866       /* - (A - B) -> B - A  */
867       if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
868 	return convert (type,
869 			fold (build (MINUS_EXPR, TREE_TYPE (t),
870 				     TREE_OPERAND (t, 1),
871 				     TREE_OPERAND (t, 0))));
872       break;
873 
874     default:
875       break;
876     }
877 
878   return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
879 }
880 
881 /* Split a tree IN into a constant, literal and variable parts that could be
882    combined with CODE to make IN.  "constant" means an expression with
883    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
884    commutative arithmetic operation.  Store the constant part into *CONP,
885    the literal in *LITP and return the variable part.  If a part isn't
886    present, set it to null.  If the tree does not decompose in this way,
887    return the entire tree as the variable part and the other parts as null.
888 
889    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
890    case, we negate an operand that was subtracted.  Except if it is a
891    literal for which we use *MINUS_LITP instead.
892 
893    If NEGATE_P is true, we are negating all of IN, again except a literal
894    for which we use *MINUS_LITP instead.
895 
896    If IN is itself a literal or constant, return it as appropriate.
897 
898    Note that we do not guarantee that any of the three values will be the
899    same type as IN, but they will have the same signedness and mode.  */
900 
901 static tree
902 split_tree (in, code, conp, litp, minus_litp, negate_p)
903      tree in;
904      enum tree_code code;
905      tree *conp, *litp, *minus_litp;
906      int negate_p;
907 {
908   tree var = 0;
909 
910   *conp = 0;
911   *litp = 0;
912   *minus_litp = 0;
913 
914   /* Strip any conversions that don't change the machine mode or signedness.  */
915   STRIP_SIGN_NOPS (in);
916 
917   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
918     *litp = in;
919   else if (TREE_CODE (in) == code
920 	   || (! FLOAT_TYPE_P (TREE_TYPE (in))
921 	       /* We can associate addition and subtraction together (even
922 		  though the C standard doesn't say so) for integers because
923 		  the value is not affected.  For reals, the value might be
924 		  affected, so we can't.  */
925 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
926 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
927     {
928       tree op0 = TREE_OPERAND (in, 0);
929       tree op1 = TREE_OPERAND (in, 1);
930       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
931       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
932 
933       /* First see if either of the operands is a literal, then a constant.  */
934       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
935 	*litp = op0, op0 = 0;
936       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
937 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
938 
939       if (op0 != 0 && TREE_CONSTANT (op0))
940 	*conp = op0, op0 = 0;
941       else if (op1 != 0 && TREE_CONSTANT (op1))
942 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
943 
944       /* If we haven't dealt with either operand, this is not a case we can
945 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
946       if (op0 != 0 && op1 != 0)
947 	var = in;
948       else if (op0 != 0)
949 	var = op0;
950       else
951 	var = op1, neg_var_p = neg1_p;
952 
953       /* Now do any needed negations.  */
954       if (neg_litp_p)
955 	*minus_litp = *litp, *litp = 0;
956       if (neg_conp_p)
957 	*conp = negate_expr (*conp);
958       if (neg_var_p)
959 	var = negate_expr (var);
960     }
961   else if (TREE_CONSTANT (in))
962     *conp = in;
963   else
964     var = in;
965 
966   if (negate_p)
967     {
968       if (*litp)
969 	*minus_litp = *litp, *litp = 0;
970       else if (*minus_litp)
971 	*litp = *minus_litp, *minus_litp = 0;
972       *conp = negate_expr (*conp);
973       var = negate_expr (var);
974     }
975 
976   return var;
977 }
978 
979 /* Re-associate trees split by the above function.  T1 and T2 are either
980    expressions to associate or null.  Return the new expression, if any.  If
981    we build an operation, do it in TYPE and with CODE.  */
982 
983 static tree
984 associate_trees (t1, t2, code, type)
985      tree t1, t2;
986      enum tree_code code;
987      tree type;
988 {
989   if (t1 == 0)
990     return t2;
991   else if (t2 == 0)
992     return t1;
993 
994   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
995      try to fold this since we will have infinite recursion.  But do
996      deal with any NEGATE_EXPRs.  */
997   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
998       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
999     {
1000       if (code == PLUS_EXPR)
1001 	{
1002 	  if (TREE_CODE (t1) == NEGATE_EXPR)
1003 	    return build (MINUS_EXPR, type, convert (type, t2),
1004 			  convert (type, TREE_OPERAND (t1, 0)));
1005 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1006 	    return build (MINUS_EXPR, type, convert (type, t1),
1007 			  convert (type, TREE_OPERAND (t2, 0)));
1008 	}
1009       return build (code, type, convert (type, t1), convert (type, t2));
1010     }
1011 
1012   return fold (build (code, type, convert (type, t1), convert (type, t2)));
1013 }
1014 
1015 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1016    to produce a new constant.
1017 
1018    If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1019 
1020 static tree
1021 int_const_binop (code, arg1, arg2, notrunc)
1022      enum tree_code code;
1023      tree arg1, arg2;
1024      int notrunc;
1025 {
1026   unsigned HOST_WIDE_INT int1l, int2l;
1027   HOST_WIDE_INT int1h, int2h;
1028   unsigned HOST_WIDE_INT low;
1029   HOST_WIDE_INT hi;
1030   unsigned HOST_WIDE_INT garbagel;
1031   HOST_WIDE_INT garbageh;
1032   tree t;
1033   tree type = TREE_TYPE (arg1);
1034   int uns = TREE_UNSIGNED (type);
1035   int is_sizetype
1036     = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1037   int overflow = 0;
1038   int no_overflow = 0;
1039 
1040   int1l = TREE_INT_CST_LOW (arg1);
1041   int1h = TREE_INT_CST_HIGH (arg1);
1042   int2l = TREE_INT_CST_LOW (arg2);
1043   int2h = TREE_INT_CST_HIGH (arg2);
1044 
1045   switch (code)
1046     {
1047     case BIT_IOR_EXPR:
1048       low = int1l | int2l, hi = int1h | int2h;
1049       break;
1050 
1051     case BIT_XOR_EXPR:
1052       low = int1l ^ int2l, hi = int1h ^ int2h;
1053       break;
1054 
1055     case BIT_AND_EXPR:
1056       low = int1l & int2l, hi = int1h & int2h;
1057       break;
1058 
1059     case BIT_ANDTC_EXPR:
1060       low = int1l & ~int2l, hi = int1h & ~int2h;
1061       break;
1062 
1063     case RSHIFT_EXPR:
1064       int2l = -int2l;
1065     case LSHIFT_EXPR:
1066       /* It's unclear from the C standard whether shifts can overflow.
1067 	 The following code ignores overflow; perhaps a C standard
1068 	 interpretation ruling is needed.  */
1069       lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1070 		     &low, &hi, !uns);
1071       no_overflow = 1;
1072       break;
1073 
1074     case RROTATE_EXPR:
1075       int2l = - int2l;
1076     case LROTATE_EXPR:
1077       lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1078 		      &low, &hi);
1079       break;
1080 
1081     case PLUS_EXPR:
1082       overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1083       break;
1084 
1085     case MINUS_EXPR:
1086       neg_double (int2l, int2h, &low, &hi);
1087       add_double (int1l, int1h, low, hi, &low, &hi);
1088       overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1089       break;
1090 
1091     case MULT_EXPR:
1092       overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1093       break;
1094 
1095     case TRUNC_DIV_EXPR:
1096     case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1097     case EXACT_DIV_EXPR:
1098       /* This is a shortcut for a common special case.  */
1099       if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1100 	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1101 	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1102 	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1103 	{
1104 	  if (code == CEIL_DIV_EXPR)
1105 	    int1l += int2l - 1;
1106 
1107 	  low = int1l / int2l, hi = 0;
1108 	  break;
1109 	}
1110 
1111       /* ... fall through ...  */
1112 
1113     case ROUND_DIV_EXPR:
1114       if (int2h == 0 && int2l == 1)
1115 	{
1116 	  low = int1l, hi = int1h;
1117 	  break;
1118 	}
1119       if (int1l == int2l && int1h == int2h
1120 	  && ! (int1l == 0 && int1h == 0))
1121 	{
1122 	  low = 1, hi = 0;
1123 	  break;
1124 	}
1125       overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1126 				       &low, &hi, &garbagel, &garbageh);
1127       break;
1128 
1129     case TRUNC_MOD_EXPR:
1130     case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1131       /* This is a shortcut for a common special case.  */
1132       if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1133 	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1134 	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1135 	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1136 	{
1137 	  if (code == CEIL_MOD_EXPR)
1138 	    int1l += int2l - 1;
1139 	  low = int1l % int2l, hi = 0;
1140 	  break;
1141 	}
1142 
1143       /* ... fall through ...  */
1144 
1145     case ROUND_MOD_EXPR:
1146       overflow = div_and_round_double (code, uns,
1147 				       int1l, int1h, int2l, int2h,
1148 				       &garbagel, &garbageh, &low, &hi);
1149       break;
1150 
1151     case MIN_EXPR:
1152     case MAX_EXPR:
1153       if (uns)
1154 	low = (((unsigned HOST_WIDE_INT) int1h
1155 		< (unsigned HOST_WIDE_INT) int2h)
1156 	       || (((unsigned HOST_WIDE_INT) int1h
1157 		    == (unsigned HOST_WIDE_INT) int2h)
1158 		   && int1l < int2l));
1159       else
1160 	low = (int1h < int2h
1161 	       || (int1h == int2h && int1l < int2l));
1162 
1163       if (low == (code == MIN_EXPR))
1164 	low = int1l, hi = int1h;
1165       else
1166 	low = int2l, hi = int2h;
1167       break;
1168 
1169     default:
1170       abort ();
1171     }
1172 
1173   /* If this is for a sizetype, can be represented as one (signed)
1174      HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1175      constants.  */
1176   if (is_sizetype
1177       && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1178 	  || (hi == -1 && (HOST_WIDE_INT) low < 0))
1179       && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1180     return size_int_type_wide (low, type);
1181   else
1182     {
1183       t = build_int_2 (low, hi);
1184       TREE_TYPE (t) = TREE_TYPE (arg1);
1185     }
1186 
1187   TREE_OVERFLOW (t)
1188     = ((notrunc
1189 	? (!uns || is_sizetype) && overflow
1190 	: (force_fit_type (t, (!uns || is_sizetype) && overflow)
1191 	   && ! no_overflow))
1192        | TREE_OVERFLOW (arg1)
1193        | TREE_OVERFLOW (arg2));
1194 
1195   /* If we're doing a size calculation, unsigned arithmetic does overflow.
1196      So check if force_fit_type truncated the value.  */
1197   if (is_sizetype
1198       && ! TREE_OVERFLOW (t)
1199       && (TREE_INT_CST_HIGH (t) != hi
1200 	  || TREE_INT_CST_LOW (t) != low))
1201     TREE_OVERFLOW (t) = 1;
1202 
1203   TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1204 				| TREE_CONSTANT_OVERFLOW (arg1)
1205 				| TREE_CONSTANT_OVERFLOW (arg2));
1206   return t;
1207 }
1208 
1209 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1210    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1211    are the same kind of constant and the same machine mode.
1212 
1213    If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1214 
1215 static tree
1216 const_binop (code, arg1, arg2, notrunc)
1217      enum tree_code code;
1218      tree arg1, arg2;
1219      int notrunc;
1220 {
1221   STRIP_NOPS (arg1);
1222   STRIP_NOPS (arg2);
1223 
1224   if (TREE_CODE (arg1) == INTEGER_CST)
1225     return int_const_binop (code, arg1, arg2, notrunc);
1226 
1227   if (TREE_CODE (arg1) == REAL_CST)
1228     {
1229       REAL_VALUE_TYPE d1;
1230       REAL_VALUE_TYPE d2;
1231       REAL_VALUE_TYPE value;
1232       tree t;
1233 
1234       d1 = TREE_REAL_CST (arg1);
1235       d2 = TREE_REAL_CST (arg2);
1236 
1237       /* If either operand is a NaN, just return it.  Otherwise, set up
1238 	 for floating-point trap; we return an overflow.  */
1239       if (REAL_VALUE_ISNAN (d1))
1240 	return arg1;
1241       else if (REAL_VALUE_ISNAN (d2))
1242 	return arg2;
1243 
1244       REAL_ARITHMETIC (value, code, d1, d2);
1245 
1246       t = build_real (TREE_TYPE (arg1),
1247 		      real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1248 					   value));
1249 
1250       TREE_OVERFLOW (t)
1251 	= (force_fit_type (t, 0)
1252 	   | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1253       TREE_CONSTANT_OVERFLOW (t)
1254 	= TREE_OVERFLOW (t)
1255 	  | TREE_CONSTANT_OVERFLOW (arg1)
1256 	  | TREE_CONSTANT_OVERFLOW (arg2);
1257       return t;
1258     }
1259   if (TREE_CODE (arg1) == COMPLEX_CST)
1260     {
1261       tree type = TREE_TYPE (arg1);
1262       tree r1 = TREE_REALPART (arg1);
1263       tree i1 = TREE_IMAGPART (arg1);
1264       tree r2 = TREE_REALPART (arg2);
1265       tree i2 = TREE_IMAGPART (arg2);
1266       tree t;
1267 
1268       switch (code)
1269 	{
1270 	case PLUS_EXPR:
1271 	  t = build_complex (type,
1272 			     const_binop (PLUS_EXPR, r1, r2, notrunc),
1273 			     const_binop (PLUS_EXPR, i1, i2, notrunc));
1274 	  break;
1275 
1276 	case MINUS_EXPR:
1277 	  t = build_complex (type,
1278 			     const_binop (MINUS_EXPR, r1, r2, notrunc),
1279 			     const_binop (MINUS_EXPR, i1, i2, notrunc));
1280 	  break;
1281 
1282 	case MULT_EXPR:
1283 	  t = build_complex (type,
1284 			     const_binop (MINUS_EXPR,
1285 					  const_binop (MULT_EXPR,
1286 						       r1, r2, notrunc),
1287 					  const_binop (MULT_EXPR,
1288 						       i1, i2, notrunc),
1289 					  notrunc),
1290 			     const_binop (PLUS_EXPR,
1291 					  const_binop (MULT_EXPR,
1292 						       r1, i2, notrunc),
1293 					  const_binop (MULT_EXPR,
1294 						       i1, r2, notrunc),
1295 					  notrunc));
1296 	  break;
1297 
1298 	case RDIV_EXPR:
1299 	  {
1300 	    tree magsquared
1301 	      = const_binop (PLUS_EXPR,
1302 			     const_binop (MULT_EXPR, r2, r2, notrunc),
1303 			     const_binop (MULT_EXPR, i2, i2, notrunc),
1304 			     notrunc);
1305 
1306 	    t = build_complex (type,
1307 			       const_binop
1308 			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1309 				? TRUNC_DIV_EXPR : RDIV_EXPR,
1310 				const_binop (PLUS_EXPR,
1311 					     const_binop (MULT_EXPR, r1, r2,
1312 							  notrunc),
1313 					     const_binop (MULT_EXPR, i1, i2,
1314 							  notrunc),
1315 					     notrunc),
1316 				magsquared, notrunc),
1317 			       const_binop
1318 			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1319 				? TRUNC_DIV_EXPR : RDIV_EXPR,
1320 				const_binop (MINUS_EXPR,
1321 					     const_binop (MULT_EXPR, i1, r2,
1322 							  notrunc),
1323 					     const_binop (MULT_EXPR, r1, i2,
1324 							  notrunc),
1325 					     notrunc),
1326 				magsquared, notrunc));
1327 	  }
1328 	  break;
1329 
1330 	default:
1331 	  abort ();
1332 	}
1333       return t;
1334     }
1335   return 0;
1336 }
1337 
1338 /* These are the hash table functions for the hash table of INTEGER_CST
1339    nodes of a sizetype.  */
1340 
1341 /* Return the hash code code X, an INTEGER_CST.  */
1342 
1343 static hashval_t
1344 size_htab_hash (x)
1345      const void *x;
1346 {
1347   tree t = (tree) x;
1348 
1349   return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1350 	  ^ htab_hash_pointer (TREE_TYPE (t))
1351 	  ^ (TREE_OVERFLOW (t) << 20));
1352 }
1353 
1354 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1355    is the same as that given by *Y, which is the same.  */
1356 
1357 static int
1358 size_htab_eq (x, y)
1359      const void *x;
1360      const void *y;
1361 {
1362   tree xt = (tree) x;
1363   tree yt = (tree) y;
1364 
1365   return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1366 	  && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1367 	  && TREE_TYPE (xt) == TREE_TYPE (yt)
1368 	  && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1369 }
1370 
1371 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1372    bits are given by NUMBER and of the sizetype represented by KIND.  */
1373 
1374 tree
1375 size_int_wide (number, kind)
1376      HOST_WIDE_INT number;
1377      enum size_type_kind kind;
1378 {
1379   return size_int_type_wide (number, sizetype_tab[(int) kind]);
1380 }
1381 
1382 /* Likewise, but the desired type is specified explicitly.  */
1383 
1384 static GTY (()) tree new_const;
1385 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1386      htab_t size_htab;
1387 
1388 tree
1389 size_int_type_wide (number, type)
1390      HOST_WIDE_INT number;
1391      tree type;
1392 {
1393   PTR *slot;
1394 
1395   if (size_htab == 0)
1396     {
1397       size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1398       new_const = make_node (INTEGER_CST);
1399     }
1400 
1401   /* Adjust NEW_CONST to be the constant we want.  If it's already in the
1402      hash table, we return the value from the hash table.  Otherwise, we
1403      place that in the hash table and make a new node for the next time.  */
1404   TREE_INT_CST_LOW (new_const) = number;
1405   TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1406   TREE_TYPE (new_const) = type;
1407   TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1408     = force_fit_type (new_const, 0);
1409 
1410   slot = htab_find_slot (size_htab, new_const, INSERT);
1411   if (*slot == 0)
1412     {
1413       tree t = new_const;
1414 
1415       *slot = (PTR) new_const;
1416       new_const = make_node (INTEGER_CST);
1417       return t;
1418     }
1419   else
1420     return (tree) *slot;
1421 }
1422 
1423 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1424    is a tree code.  The type of the result is taken from the operands.
1425    Both must be the same type integer type and it must be a size type.
1426    If the operands are constant, so is the result.  */
1427 
1428 tree
1429 size_binop (code, arg0, arg1)
1430      enum tree_code code;
1431      tree arg0, arg1;
1432 {
1433   tree type = TREE_TYPE (arg0);
1434 
1435   if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1436       || type != TREE_TYPE (arg1))
1437     abort ();
1438 
1439   /* Handle the special case of two integer constants faster.  */
1440   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1441     {
1442       /* And some specific cases even faster than that.  */
1443       if (code == PLUS_EXPR && integer_zerop (arg0))
1444 	return arg1;
1445       else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1446 	       && integer_zerop (arg1))
1447 	return arg0;
1448       else if (code == MULT_EXPR && integer_onep (arg0))
1449 	return arg1;
1450 
1451       /* Handle general case of two integer constants.  */
1452       return int_const_binop (code, arg0, arg1, 0);
1453     }
1454 
1455   if (arg0 == error_mark_node || arg1 == error_mark_node)
1456     return error_mark_node;
1457 
1458   return fold (build (code, type, arg0, arg1));
1459 }
1460 
1461 /* Given two values, either both of sizetype or both of bitsizetype,
1462    compute the difference between the two values.  Return the value
1463    in signed type corresponding to the type of the operands.  */
1464 
1465 tree
1466 size_diffop (arg0, arg1)
1467      tree arg0, arg1;
1468 {
1469   tree type = TREE_TYPE (arg0);
1470   tree ctype;
1471 
1472   if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1473       || type != TREE_TYPE (arg1))
1474     abort ();
1475 
1476   /* If the type is already signed, just do the simple thing.  */
1477   if (! TREE_UNSIGNED (type))
1478     return size_binop (MINUS_EXPR, arg0, arg1);
1479 
1480   ctype = (type == bitsizetype || type == ubitsizetype
1481 	   ? sbitsizetype : ssizetype);
1482 
1483   /* If either operand is not a constant, do the conversions to the signed
1484      type and subtract.  The hardware will do the right thing with any
1485      overflow in the subtraction.  */
1486   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1487     return size_binop (MINUS_EXPR, convert (ctype, arg0),
1488 		       convert (ctype, arg1));
1489 
1490   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1491      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1492      overflow) and negate (which can't either).  Special-case a result
1493      of zero while we're here.  */
1494   if (tree_int_cst_equal (arg0, arg1))
1495     return convert (ctype, integer_zero_node);
1496   else if (tree_int_cst_lt (arg1, arg0))
1497     return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1498   else
1499     return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1500 		       convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1501 }
1502 
1503 
1504 /* Given T, a tree representing type conversion of ARG1, a constant,
1505    return a constant tree representing the result of conversion.  */
1506 
1507 static tree
1508 fold_convert (t, arg1)
1509      tree t;
1510      tree arg1;
1511 {
1512   tree type = TREE_TYPE (t);
1513   int overflow = 0;
1514 
1515   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1516     {
1517       if (TREE_CODE (arg1) == INTEGER_CST)
1518 	{
1519 	  /* If we would build a constant wider than GCC supports,
1520 	     leave the conversion unfolded.  */
1521 	  if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1522 	    return t;
1523 
1524 	  /* If we are trying to make a sizetype for a small integer, use
1525 	     size_int to pick up cached types to reduce duplicate nodes.  */
1526 	  if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1527 	      && !TREE_CONSTANT_OVERFLOW (arg1)
1528 	      && compare_tree_int (arg1, 10000) < 0)
1529 	    return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1530 
1531 	  /* Given an integer constant, make new constant with new type,
1532 	     appropriately sign-extended or truncated.  */
1533 	  t = build_int_2 (TREE_INT_CST_LOW (arg1),
1534 			   TREE_INT_CST_HIGH (arg1));
1535 	  TREE_TYPE (t) = type;
1536 	  /* Indicate an overflow if (1) ARG1 already overflowed,
1537 	     or (2) force_fit_type indicates an overflow.
1538 	     Tell force_fit_type that an overflow has already occurred
1539 	     if ARG1 is a too-large unsigned value and T is signed.
1540 	     But don't indicate an overflow if converting a pointer.  */
1541 	  TREE_OVERFLOW (t)
1542 	    = ((force_fit_type (t,
1543 				(TREE_INT_CST_HIGH (arg1) < 0
1544 				 && (TREE_UNSIGNED (type)
1545 				    < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1546 		&& ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1547 	       || TREE_OVERFLOW (arg1));
1548 	  TREE_CONSTANT_OVERFLOW (t)
1549 	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1550 	}
1551       else if (TREE_CODE (arg1) == REAL_CST)
1552 	{
1553 	  /* Don't initialize these, use assignments.
1554 	     Initialized local aggregates don't work on old compilers.  */
1555 	  REAL_VALUE_TYPE x;
1556 	  REAL_VALUE_TYPE l;
1557 	  REAL_VALUE_TYPE u;
1558 	  tree type1 = TREE_TYPE (arg1);
1559 	  int no_upper_bound;
1560 
1561 	  x = TREE_REAL_CST (arg1);
1562 	  l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1563 
1564 	  no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1565 	  if (!no_upper_bound)
1566 	    u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1567 
1568 	  /* See if X will be in range after truncation towards 0.
1569 	     To compensate for truncation, move the bounds away from 0,
1570 	     but reject if X exactly equals the adjusted bounds.  */
1571 	  REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1572 	  if (!no_upper_bound)
1573 	    REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1574 	  /* If X is a NaN, use zero instead and show we have an overflow.
1575 	     Otherwise, range check.  */
1576 	  if (REAL_VALUE_ISNAN (x))
1577 	    overflow = 1, x = dconst0;
1578 	  else if (! (REAL_VALUES_LESS (l, x)
1579 		      && !no_upper_bound
1580 		      && REAL_VALUES_LESS (x, u)))
1581 	    overflow = 1;
1582 
1583 	  {
1584 	    HOST_WIDE_INT low, high;
1585 	    REAL_VALUE_TO_INT (&low, &high, x);
1586 	    t = build_int_2 (low, high);
1587 	  }
1588 	  TREE_TYPE (t) = type;
1589 	  TREE_OVERFLOW (t)
1590 	    = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1591 	  TREE_CONSTANT_OVERFLOW (t)
1592 	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1593 	}
1594       TREE_TYPE (t) = type;
1595     }
1596   else if (TREE_CODE (type) == REAL_TYPE)
1597     {
1598       if (TREE_CODE (arg1) == INTEGER_CST)
1599 	return build_real_from_int_cst (type, arg1);
1600       if (TREE_CODE (arg1) == REAL_CST)
1601 	{
1602 	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1603 	    {
1604 	      /* We make a copy of ARG1 so that we don't modify an
1605 		 existing constant tree.  */
1606 	      t = copy_node (arg1);
1607 	      TREE_TYPE (t) = type;
1608 	      return t;
1609 	    }
1610 
1611 	  t = build_real (type,
1612 			  real_value_truncate (TYPE_MODE (type),
1613 					       TREE_REAL_CST (arg1)));
1614 
1615 	  TREE_OVERFLOW (t)
1616 	    = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1617 	  TREE_CONSTANT_OVERFLOW (t)
1618 	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1619 	  return t;
1620 	}
1621     }
1622   TREE_CONSTANT (t) = 1;
1623   return t;
1624 }
1625 
1626 /* Return an expr equal to X but certainly not valid as an lvalue.  */
1627 
1628 tree
1629 non_lvalue (x)
1630      tree x;
1631 {
1632   tree result;
1633 
1634   /* These things are certainly not lvalues.  */
1635   if (TREE_CODE (x) == NON_LVALUE_EXPR
1636       || TREE_CODE (x) == INTEGER_CST
1637       || TREE_CODE (x) == REAL_CST
1638       || TREE_CODE (x) == STRING_CST
1639       || TREE_CODE (x) == ADDR_EXPR)
1640     return x;
1641 
1642   result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1643   TREE_CONSTANT (result) = TREE_CONSTANT (x);
1644   return result;
1645 }
1646 
1647 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1648    Zero means allow extended lvalues.  */
1649 
1650 int pedantic_lvalues;
1651 
1652 /* When pedantic, return an expr equal to X but certainly not valid as a
1653    pedantic lvalue.  Otherwise, return X.  */
1654 
1655 tree
1656 pedantic_non_lvalue (x)
1657      tree x;
1658 {
1659   if (pedantic_lvalues)
1660     return non_lvalue (x);
1661   else
1662     return x;
1663 }
1664 
1665 /* Given a tree comparison code, return the code that is the logical inverse
1666    of the given code.  It is not safe to do this for floating-point
1667    comparisons, except for NE_EXPR and EQ_EXPR.  */
1668 
1669 static enum tree_code
1670 invert_tree_comparison (code)
1671      enum tree_code code;
1672 {
1673   switch (code)
1674     {
1675     case EQ_EXPR:
1676       return NE_EXPR;
1677     case NE_EXPR:
1678       return EQ_EXPR;
1679     case GT_EXPR:
1680       return LE_EXPR;
1681     case GE_EXPR:
1682       return LT_EXPR;
1683     case LT_EXPR:
1684       return GE_EXPR;
1685     case LE_EXPR:
1686       return GT_EXPR;
1687     default:
1688       abort ();
1689     }
1690 }
1691 
1692 /* Similar, but return the comparison that results if the operands are
1693    swapped.  This is safe for floating-point.  */
1694 
1695 static enum tree_code
1696 swap_tree_comparison (code)
1697      enum tree_code code;
1698 {
1699   switch (code)
1700     {
1701     case EQ_EXPR:
1702     case NE_EXPR:
1703       return code;
1704     case GT_EXPR:
1705       return LT_EXPR;
1706     case GE_EXPR:
1707       return LE_EXPR;
1708     case LT_EXPR:
1709       return GT_EXPR;
1710     case LE_EXPR:
1711       return GE_EXPR;
1712     default:
1713       abort ();
1714     }
1715 }
1716 
1717 
1718 /* Convert a comparison tree code from an enum tree_code representation
1719    into a compcode bit-based encoding.  This function is the inverse of
1720    compcode_to_comparison.  */
1721 
1722 static int
1723 comparison_to_compcode (code)
1724      enum tree_code code;
1725 {
1726   switch (code)
1727     {
1728     case LT_EXPR:
1729       return COMPCODE_LT;
1730     case EQ_EXPR:
1731       return COMPCODE_EQ;
1732     case LE_EXPR:
1733       return COMPCODE_LE;
1734     case GT_EXPR:
1735       return COMPCODE_GT;
1736     case NE_EXPR:
1737       return COMPCODE_NE;
1738     case GE_EXPR:
1739       return COMPCODE_GE;
1740     default:
1741       abort ();
1742     }
1743 }
1744 
1745 /* Convert a compcode bit-based encoding of a comparison operator back
1746    to GCC's enum tree_code representation.  This function is the
1747    inverse of comparison_to_compcode.  */
1748 
1749 static enum tree_code
1750 compcode_to_comparison (code)
1751      int code;
1752 {
1753   switch (code)
1754     {
1755     case COMPCODE_LT:
1756       return LT_EXPR;
1757     case COMPCODE_EQ:
1758       return EQ_EXPR;
1759     case COMPCODE_LE:
1760       return LE_EXPR;
1761     case COMPCODE_GT:
1762       return GT_EXPR;
1763     case COMPCODE_NE:
1764       return NE_EXPR;
1765     case COMPCODE_GE:
1766       return GE_EXPR;
1767     default:
1768       abort ();
1769     }
1770 }
1771 
1772 /* Return nonzero if CODE is a tree code that represents a truth value.  */
1773 
1774 static int
1775 truth_value_p (code)
1776      enum tree_code code;
1777 {
1778   return (TREE_CODE_CLASS (code) == '<'
1779 	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1780 	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1781 	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1782 }
1783 
1784 /* Return nonzero if two operands are necessarily equal.
1785    If ONLY_CONST is nonzero, only return nonzero for constants.
1786    This function tests whether the operands are indistinguishable;
1787    it does not test whether they are equal using C's == operation.
1788    The distinction is important for IEEE floating point, because
1789    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1790    (2) two NaNs may be indistinguishable, but NaN!=NaN.  */
1791 
1792 int
1793 operand_equal_p (arg0, arg1, only_const)
1794      tree arg0, arg1;
1795      int only_const;
1796 {
1797   /* If both types don't have the same signedness, then we can't consider
1798      them equal.  We must check this before the STRIP_NOPS calls
1799      because they may change the signedness of the arguments.  */
1800   if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1801     return 0;
1802 
1803   STRIP_NOPS (arg0);
1804   STRIP_NOPS (arg1);
1805 
1806   if (TREE_CODE (arg0) != TREE_CODE (arg1)
1807       /* This is needed for conversions and for COMPONENT_REF.
1808 	 Might as well play it safe and always test this.  */
1809       || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1810       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1811       || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1812     return 0;
1813 
1814   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1815      We don't care about side effects in that case because the SAVE_EXPR
1816      takes care of that for us. In all other cases, two expressions are
1817      equal if they have no side effects.  If we have two identical
1818      expressions with side effects that should be treated the same due
1819      to the only side effects being identical SAVE_EXPR's, that will
1820      be detected in the recursive calls below.  */
1821   if (arg0 == arg1 && ! only_const
1822       && (TREE_CODE (arg0) == SAVE_EXPR
1823 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1824     return 1;
1825 
1826   /* Next handle constant cases, those for which we can return 1 even
1827      if ONLY_CONST is set.  */
1828   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1829     switch (TREE_CODE (arg0))
1830       {
1831       case INTEGER_CST:
1832 	return (! TREE_CONSTANT_OVERFLOW (arg0)
1833 		&& ! TREE_CONSTANT_OVERFLOW (arg1)
1834 		&& tree_int_cst_equal (arg0, arg1));
1835 
1836       case REAL_CST:
1837 	return (! TREE_CONSTANT_OVERFLOW (arg0)
1838 		&& ! TREE_CONSTANT_OVERFLOW (arg1)
1839 		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1840 					  TREE_REAL_CST (arg1)));
1841 
1842       case VECTOR_CST:
1843 	{
1844 	  tree v1, v2;
1845 
1846 	  if (TREE_CONSTANT_OVERFLOW (arg0)
1847 	      || TREE_CONSTANT_OVERFLOW (arg1))
1848 	    return 0;
1849 
1850 	  v1 = TREE_VECTOR_CST_ELTS (arg0);
1851 	  v2 = TREE_VECTOR_CST_ELTS (arg1);
1852 	  while (v1 && v2)
1853 	    {
1854 	      if (!operand_equal_p (v1, v2, only_const))
1855 		return 0;
1856 	      v1 = TREE_CHAIN (v1);
1857 	      v2 = TREE_CHAIN (v2);
1858 	    }
1859 
1860 	  return 1;
1861 	}
1862 
1863       case COMPLEX_CST:
1864 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1865 				 only_const)
1866 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1867 				    only_const));
1868 
1869       case STRING_CST:
1870 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1871 		&& ! memcmp (TREE_STRING_POINTER (arg0),
1872 			      TREE_STRING_POINTER (arg1),
1873 			      TREE_STRING_LENGTH (arg0)));
1874 
1875       case ADDR_EXPR:
1876 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1877 				0);
1878       default:
1879 	break;
1880       }
1881 
1882   if (only_const)
1883     return 0;
1884 
1885   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1886     {
1887     case '1':
1888       /* Two conversions are equal only if signedness and modes match.  */
1889       if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1890 	  && (TREE_UNSIGNED (TREE_TYPE (arg0))
1891 	      != TREE_UNSIGNED (TREE_TYPE (arg1))))
1892 	return 0;
1893 
1894       return operand_equal_p (TREE_OPERAND (arg0, 0),
1895 			      TREE_OPERAND (arg1, 0), 0);
1896 
1897     case '<':
1898     case '2':
1899       if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1900 	  && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1901 			      0))
1902 	return 1;
1903 
1904       /* For commutative ops, allow the other order.  */
1905       return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1906 	       || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1907 	       || TREE_CODE (arg0) == BIT_IOR_EXPR
1908 	       || TREE_CODE (arg0) == BIT_XOR_EXPR
1909 	       || TREE_CODE (arg0) == BIT_AND_EXPR
1910 	       || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1911 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
1912 				  TREE_OPERAND (arg1, 1), 0)
1913 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
1914 				  TREE_OPERAND (arg1, 0), 0));
1915 
1916     case 'r':
1917       /* If either of the pointer (or reference) expressions we are dereferencing
1918 	 contain a side effect, these cannot be equal.  */
1919       if (TREE_SIDE_EFFECTS (arg0)
1920 	  || TREE_SIDE_EFFECTS (arg1))
1921 	return 0;
1922 
1923       switch (TREE_CODE (arg0))
1924 	{
1925 	case INDIRECT_REF:
1926 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
1927 				  TREE_OPERAND (arg1, 0), 0);
1928 
1929 	case COMPONENT_REF:
1930 	case ARRAY_REF:
1931 	case ARRAY_RANGE_REF:
1932 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
1933 				   TREE_OPERAND (arg1, 0), 0)
1934 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
1935 				      TREE_OPERAND (arg1, 1), 0));
1936 
1937 	case BIT_FIELD_REF:
1938 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
1939 				   TREE_OPERAND (arg1, 0), 0)
1940 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
1941 				      TREE_OPERAND (arg1, 1), 0)
1942 		  && operand_equal_p (TREE_OPERAND (arg0, 2),
1943 				      TREE_OPERAND (arg1, 2), 0));
1944 	default:
1945 	  return 0;
1946 	}
1947 
1948     case 'e':
1949       if (TREE_CODE (arg0) == RTL_EXPR)
1950 	return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1951       return 0;
1952 
1953     default:
1954       return 0;
1955     }
1956 }
1957 
1958 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1959    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1960 
1961    When in doubt, return 0.  */
1962 
1963 static int
1964 operand_equal_for_comparison_p (arg0, arg1, other)
1965      tree arg0, arg1;
1966      tree other;
1967 {
1968   int unsignedp1, unsignedpo;
1969   tree primarg0, primarg1, primother;
1970   unsigned int correct_width;
1971 
1972   if (operand_equal_p (arg0, arg1, 0))
1973     return 1;
1974 
1975   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1976       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1977     return 0;
1978 
1979   /* Discard any conversions that don't change the modes of ARG0 and ARG1
1980      and see if the inner values are the same.  This removes any
1981      signedness comparison, which doesn't matter here.  */
1982   primarg0 = arg0, primarg1 = arg1;
1983   STRIP_NOPS (primarg0);
1984   STRIP_NOPS (primarg1);
1985   if (operand_equal_p (primarg0, primarg1, 0))
1986     return 1;
1987 
1988   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1989      actual comparison operand, ARG0.
1990 
1991      First throw away any conversions to wider types
1992      already present in the operands.  */
1993 
1994   primarg1 = get_narrower (arg1, &unsignedp1);
1995   primother = get_narrower (other, &unsignedpo);
1996 
1997   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1998   if (unsignedp1 == unsignedpo
1999       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2000       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2001     {
2002       tree type = TREE_TYPE (arg0);
2003 
2004       /* Make sure shorter operand is extended the right way
2005 	 to match the longer operand.  */
2006       primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2007 			  (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2008 
2009       if (operand_equal_p (arg0, convert (type, primarg1), 0))
2010 	return 1;
2011     }
2012 
2013   return 0;
2014 }
2015 
2016 /* See if ARG is an expression that is either a comparison or is performing
2017    arithmetic on comparisons.  The comparisons must only be comparing
2018    two different values, which will be stored in *CVAL1 and *CVAL2; if
2019    they are nonzero it means that some operands have already been found.
2020    No variables may be used anywhere else in the expression except in the
2021    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2022    the expression and save_expr needs to be called with CVAL1 and CVAL2.
2023 
2024    If this is true, return 1.  Otherwise, return zero.  */
2025 
2026 static int
2027 twoval_comparison_p (arg, cval1, cval2, save_p)
2028      tree arg;
2029      tree *cval1, *cval2;
2030      int *save_p;
2031 {
2032   enum tree_code code = TREE_CODE (arg);
2033   char class = TREE_CODE_CLASS (code);
2034 
2035   /* We can handle some of the 'e' cases here.  */
2036   if (class == 'e' && code == TRUTH_NOT_EXPR)
2037     class = '1';
2038   else if (class == 'e'
2039 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2040 	       || code == COMPOUND_EXPR))
2041     class = '2';
2042 
2043   else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2044 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2045     {
2046       /* If we've already found a CVAL1 or CVAL2, this expression is
2047 	 two complex to handle.  */
2048       if (*cval1 || *cval2)
2049 	return 0;
2050 
2051       class = '1';
2052       *save_p = 1;
2053     }
2054 
2055   switch (class)
2056     {
2057     case '1':
2058       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2059 
2060     case '2':
2061       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2062 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2063 				      cval1, cval2, save_p));
2064 
2065     case 'c':
2066       return 1;
2067 
2068     case 'e':
2069       if (code == COND_EXPR)
2070 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2071 				     cval1, cval2, save_p)
2072 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2073 					cval1, cval2, save_p)
2074 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2075 					cval1, cval2, save_p));
2076       return 0;
2077 
2078     case '<':
2079       /* First see if we can handle the first operand, then the second.  For
2080 	 the second operand, we know *CVAL1 can't be zero.  It must be that
2081 	 one side of the comparison is each of the values; test for the
2082 	 case where this isn't true by failing if the two operands
2083 	 are the same.  */
2084 
2085       if (operand_equal_p (TREE_OPERAND (arg, 0),
2086 			   TREE_OPERAND (arg, 1), 0))
2087 	return 0;
2088 
2089       if (*cval1 == 0)
2090 	*cval1 = TREE_OPERAND (arg, 0);
2091       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2092 	;
2093       else if (*cval2 == 0)
2094 	*cval2 = TREE_OPERAND (arg, 0);
2095       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2096 	;
2097       else
2098 	return 0;
2099 
2100       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2101 	;
2102       else if (*cval2 == 0)
2103 	*cval2 = TREE_OPERAND (arg, 1);
2104       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2105 	;
2106       else
2107 	return 0;
2108 
2109       return 1;
2110 
2111     default:
2112       return 0;
2113     }
2114 }
2115 
2116 /* ARG is a tree that is known to contain just arithmetic operations and
2117    comparisons.  Evaluate the operations in the tree substituting NEW0 for
2118    any occurrence of OLD0 as an operand of a comparison and likewise for
2119    NEW1 and OLD1.  */
2120 
2121 static tree
2122 eval_subst (arg, old0, new0, old1, new1)
2123      tree arg;
2124      tree old0, new0, old1, new1;
2125 {
2126   tree type = TREE_TYPE (arg);
2127   enum tree_code code = TREE_CODE (arg);
2128   char class = TREE_CODE_CLASS (code);
2129 
2130   /* We can handle some of the 'e' cases here.  */
2131   if (class == 'e' && code == TRUTH_NOT_EXPR)
2132     class = '1';
2133   else if (class == 'e'
2134 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2135     class = '2';
2136 
2137   switch (class)
2138     {
2139     case '1':
2140       return fold (build1 (code, type,
2141 			   eval_subst (TREE_OPERAND (arg, 0),
2142 				       old0, new0, old1, new1)));
2143 
2144     case '2':
2145       return fold (build (code, type,
2146 			  eval_subst (TREE_OPERAND (arg, 0),
2147 				      old0, new0, old1, new1),
2148 			  eval_subst (TREE_OPERAND (arg, 1),
2149 				      old0, new0, old1, new1)));
2150 
2151     case 'e':
2152       switch (code)
2153 	{
2154 	case SAVE_EXPR:
2155 	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2156 
2157 	case COMPOUND_EXPR:
2158 	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2159 
2160 	case COND_EXPR:
2161 	  return fold (build (code, type,
2162 			      eval_subst (TREE_OPERAND (arg, 0),
2163 					  old0, new0, old1, new1),
2164 			      eval_subst (TREE_OPERAND (arg, 1),
2165 					  old0, new0, old1, new1),
2166 			      eval_subst (TREE_OPERAND (arg, 2),
2167 					  old0, new0, old1, new1)));
2168 	default:
2169 	  break;
2170 	}
2171       /* fall through - ??? */
2172 
2173     case '<':
2174       {
2175 	tree arg0 = TREE_OPERAND (arg, 0);
2176 	tree arg1 = TREE_OPERAND (arg, 1);
2177 
2178 	/* We need to check both for exact equality and tree equality.  The
2179 	   former will be true if the operand has a side-effect.  In that
2180 	   case, we know the operand occurred exactly once.  */
2181 
2182 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2183 	  arg0 = new0;
2184 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2185 	  arg0 = new1;
2186 
2187 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2188 	  arg1 = new0;
2189 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2190 	  arg1 = new1;
2191 
2192 	return fold (build (code, type, arg0, arg1));
2193       }
2194 
2195     default:
2196       return arg;
2197     }
2198 }
2199 
2200 /* Return a tree for the case when the result of an expression is RESULT
2201    converted to TYPE and OMITTED was previously an operand of the expression
2202    but is now not needed (e.g., we folded OMITTED * 0).
2203 
2204    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2205    the conversion of RESULT to TYPE.  */
2206 
2207 static tree
2208 omit_one_operand (type, result, omitted)
2209      tree type, result, omitted;
2210 {
2211   tree t = convert (type, result);
2212 
2213   if (TREE_SIDE_EFFECTS (omitted))
2214     return build (COMPOUND_EXPR, type, omitted, t);
2215 
2216   return non_lvalue (t);
2217 }
2218 
2219 /* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
2220 
2221 static tree
2222 pedantic_omit_one_operand (type, result, omitted)
2223      tree type, result, omitted;
2224 {
2225   tree t = convert (type, result);
2226 
2227   if (TREE_SIDE_EFFECTS (omitted))
2228     return build (COMPOUND_EXPR, type, omitted, t);
2229 
2230   return pedantic_non_lvalue (t);
2231 }
2232 
2233 /* Return a simplified tree node for the truth-negation of ARG.  This
2234    never alters ARG itself.  We assume that ARG is an operation that
2235    returns a truth value (0 or 1).  */
2236 
2237 tree
2238 invert_truthvalue (arg)
2239      tree arg;
2240 {
2241   tree type = TREE_TYPE (arg);
2242   enum tree_code code = TREE_CODE (arg);
2243 
2244   if (code == ERROR_MARK)
2245     return arg;
2246 
2247   /* If this is a comparison, we can simply invert it, except for
2248      floating-point non-equality comparisons, in which case we just
2249      enclose a TRUTH_NOT_EXPR around what we have.  */
2250 
2251   if (TREE_CODE_CLASS (code) == '<')
2252     {
2253       if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2254 	  && !flag_unsafe_math_optimizations
2255 	  && code != NE_EXPR
2256 	  && code != EQ_EXPR)
2257 	return build1 (TRUTH_NOT_EXPR, type, arg);
2258       else
2259 	return build (invert_tree_comparison (code), type,
2260 		      TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2261     }
2262 
2263   switch (code)
2264     {
2265     case INTEGER_CST:
2266       return convert (type, build_int_2 (integer_zerop (arg), 0));
2267 
2268     case TRUTH_AND_EXPR:
2269       return build (TRUTH_OR_EXPR, type,
2270 		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2271 		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2272 
2273     case TRUTH_OR_EXPR:
2274       return build (TRUTH_AND_EXPR, type,
2275 		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2276 		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2277 
2278     case TRUTH_XOR_EXPR:
2279       /* Here we can invert either operand.  We invert the first operand
2280 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
2281 	 result is the XOR of the first operand with the inside of the
2282 	 negation of the second operand.  */
2283 
2284       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2285 	return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2286 		      TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2287       else
2288 	return build (TRUTH_XOR_EXPR, type,
2289 		      invert_truthvalue (TREE_OPERAND (arg, 0)),
2290 		      TREE_OPERAND (arg, 1));
2291 
2292     case TRUTH_ANDIF_EXPR:
2293       return build (TRUTH_ORIF_EXPR, type,
2294 		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2295 		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2296 
2297     case TRUTH_ORIF_EXPR:
2298       return build (TRUTH_ANDIF_EXPR, type,
2299 		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2300 		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2301 
2302     case TRUTH_NOT_EXPR:
2303       return TREE_OPERAND (arg, 0);
2304 
2305     case COND_EXPR:
2306       return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2307 		    invert_truthvalue (TREE_OPERAND (arg, 1)),
2308 		    invert_truthvalue (TREE_OPERAND (arg, 2)));
2309 
2310     case COMPOUND_EXPR:
2311       return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2312 		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2313 
2314     case WITH_RECORD_EXPR:
2315       return build (WITH_RECORD_EXPR, type,
2316 		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2317 		    TREE_OPERAND (arg, 1));
2318 
2319     case NON_LVALUE_EXPR:
2320       return invert_truthvalue (TREE_OPERAND (arg, 0));
2321 
2322     case NOP_EXPR:
2323     case CONVERT_EXPR:
2324     case FLOAT_EXPR:
2325       return build1 (TREE_CODE (arg), type,
2326 		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2327 
2328     case BIT_AND_EXPR:
2329       if (!integer_onep (TREE_OPERAND (arg, 1)))
2330 	break;
2331       return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2332 
2333     case SAVE_EXPR:
2334       return build1 (TRUTH_NOT_EXPR, type, arg);
2335 
2336     case CLEANUP_POINT_EXPR:
2337       return build1 (CLEANUP_POINT_EXPR, type,
2338 		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2339 
2340     default:
2341       break;
2342     }
2343   if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2344     abort ();
2345   return build1 (TRUTH_NOT_EXPR, type, arg);
2346 }
2347 
2348 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2349    operands are another bit-wise operation with a common input.  If so,
2350    distribute the bit operations to save an operation and possibly two if
2351    constants are involved.  For example, convert
2352    	(A | B) & (A | C) into A | (B & C)
2353    Further simplification will occur if B and C are constants.
2354 
2355    If this optimization cannot be done, 0 will be returned.  */
2356 
2357 static tree
2358 distribute_bit_expr (code, type, arg0, arg1)
2359      enum tree_code code;
2360      tree type;
2361      tree arg0, arg1;
2362 {
2363   tree common;
2364   tree left, right;
2365 
2366   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2367       || TREE_CODE (arg0) == code
2368       || (TREE_CODE (arg0) != BIT_AND_EXPR
2369 	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
2370     return 0;
2371 
2372   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2373     {
2374       common = TREE_OPERAND (arg0, 0);
2375       left = TREE_OPERAND (arg0, 1);
2376       right = TREE_OPERAND (arg1, 1);
2377     }
2378   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2379     {
2380       common = TREE_OPERAND (arg0, 0);
2381       left = TREE_OPERAND (arg0, 1);
2382       right = TREE_OPERAND (arg1, 0);
2383     }
2384   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2385     {
2386       common = TREE_OPERAND (arg0, 1);
2387       left = TREE_OPERAND (arg0, 0);
2388       right = TREE_OPERAND (arg1, 1);
2389     }
2390   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2391     {
2392       common = TREE_OPERAND (arg0, 1);
2393       left = TREE_OPERAND (arg0, 0);
2394       right = TREE_OPERAND (arg1, 0);
2395     }
2396   else
2397     return 0;
2398 
2399   return fold (build (TREE_CODE (arg0), type, common,
2400 		      fold (build (code, type, left, right))));
2401 }
2402 
2403 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2404    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
2405 
2406 static tree
2407 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2408      tree inner;
2409      tree type;
2410      int bitsize, bitpos;
2411      int unsignedp;
2412 {
2413   tree result = build (BIT_FIELD_REF, type, inner,
2414 		       size_int (bitsize), bitsize_int (bitpos));
2415 
2416   TREE_UNSIGNED (result) = unsignedp;
2417 
2418   return result;
2419 }
2420 
2421 /* Optimize a bit-field compare.
2422 
2423    There are two cases:  First is a compare against a constant and the
2424    second is a comparison of two items where the fields are at the same
2425    bit position relative to the start of a chunk (byte, halfword, word)
2426    large enough to contain it.  In these cases we can avoid the shift
2427    implicit in bitfield extractions.
2428 
2429    For constants, we emit a compare of the shifted constant with the
2430    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2431    compared.  For two fields at the same position, we do the ANDs with the
2432    similar mask and compare the result of the ANDs.
2433 
2434    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2435    COMPARE_TYPE is the type of the comparison, and LHS and RHS
2436    are the left and right operands of the comparison, respectively.
2437 
2438    If the optimization described above can be done, we return the resulting
2439    tree.  Otherwise we return zero.  */
2440 
2441 static tree
2442 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2443      enum tree_code code;
2444      tree compare_type;
2445      tree lhs, rhs;
2446 {
2447   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2448   tree type = TREE_TYPE (lhs);
2449   tree signed_type, unsigned_type;
2450   int const_p = TREE_CODE (rhs) == INTEGER_CST;
2451   enum machine_mode lmode, rmode, nmode;
2452   int lunsignedp, runsignedp;
2453   int lvolatilep = 0, rvolatilep = 0;
2454   tree linner, rinner = NULL_TREE;
2455   tree mask;
2456   tree offset;
2457 
2458   /* Get all the information about the extractions being done.  If the bit size
2459      if the same as the size of the underlying object, we aren't doing an
2460      extraction at all and so can do nothing.  We also don't want to
2461      do anything if the inner expression is a PLACEHOLDER_EXPR since we
2462      then will no longer be able to replace it.  */
2463   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2464 				&lunsignedp, &lvolatilep);
2465   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2466       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2467     return 0;
2468 
2469  if (!const_p)
2470    {
2471      /* If this is not a constant, we can only do something if bit positions,
2472 	sizes, and signedness are the same.  */
2473      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2474 				   &runsignedp, &rvolatilep);
2475 
2476      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2477 	 || lunsignedp != runsignedp || offset != 0
2478 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2479        return 0;
2480    }
2481 
2482   /* See if we can find a mode to refer to this field.  We should be able to,
2483      but fail if we can't.  */
2484   nmode = get_best_mode (lbitsize, lbitpos,
2485 			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2486 			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2487 				TYPE_ALIGN (TREE_TYPE (rinner))),
2488 			 word_mode, lvolatilep || rvolatilep);
2489   if (nmode == VOIDmode)
2490     return 0;
2491 
2492   /* Set signed and unsigned types of the precision of this mode for the
2493      shifts below.  */
2494   signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2495   unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2496 
2497   /* Compute the bit position and size for the new reference and our offset
2498      within it. If the new reference is the same size as the original, we
2499      won't optimize anything, so return zero.  */
2500   nbitsize = GET_MODE_BITSIZE (nmode);
2501   nbitpos = lbitpos & ~ (nbitsize - 1);
2502   lbitpos -= nbitpos;
2503   if (nbitsize == lbitsize)
2504     return 0;
2505 
2506   if (BYTES_BIG_ENDIAN)
2507     lbitpos = nbitsize - lbitsize - lbitpos;
2508 
2509   /* Make the mask to be used against the extracted field.  */
2510   mask = build_int_2 (~0, ~0);
2511   TREE_TYPE (mask) = unsigned_type;
2512   force_fit_type (mask, 0);
2513   mask = convert (unsigned_type, mask);
2514   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2515   mask = const_binop (RSHIFT_EXPR, mask,
2516 		      size_int (nbitsize - lbitsize - lbitpos), 0);
2517 
2518   if (! const_p)
2519     /* If not comparing with constant, just rework the comparison
2520        and return.  */
2521     return build (code, compare_type,
2522 		  build (BIT_AND_EXPR, unsigned_type,
2523 			 make_bit_field_ref (linner, unsigned_type,
2524 					     nbitsize, nbitpos, 1),
2525 			 mask),
2526 		  build (BIT_AND_EXPR, unsigned_type,
2527 			 make_bit_field_ref (rinner, unsigned_type,
2528 					     nbitsize, nbitpos, 1),
2529 			 mask));
2530 
2531   /* Otherwise, we are handling the constant case. See if the constant is too
2532      big for the field.  Warn and return a tree of for 0 (false) if so.  We do
2533      this not only for its own sake, but to avoid having to test for this
2534      error case below.  If we didn't, we might generate wrong code.
2535 
2536      For unsigned fields, the constant shifted right by the field length should
2537      be all zero.  For signed fields, the high-order bits should agree with
2538      the sign bit.  */
2539 
2540   if (lunsignedp)
2541     {
2542       if (! integer_zerop (const_binop (RSHIFT_EXPR,
2543 					convert (unsigned_type, rhs),
2544 					size_int (lbitsize), 0)))
2545 	{
2546 	  warning ("comparison is always %d due to width of bit-field",
2547 		   code == NE_EXPR);
2548 	  return convert (compare_type,
2549 			  (code == NE_EXPR
2550 			   ? integer_one_node : integer_zero_node));
2551 	}
2552     }
2553   else
2554     {
2555       tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2556 			      size_int (lbitsize - 1), 0);
2557       if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2558 	{
2559 	  warning ("comparison is always %d due to width of bit-field",
2560 		   code == NE_EXPR);
2561 	  return convert (compare_type,
2562 			  (code == NE_EXPR
2563 			   ? integer_one_node : integer_zero_node));
2564 	}
2565     }
2566 
2567   /* Single-bit compares should always be against zero.  */
2568   if (lbitsize == 1 && ! integer_zerop (rhs))
2569     {
2570       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2571       rhs = convert (type, integer_zero_node);
2572     }
2573 
2574   /* Make a new bitfield reference, shift the constant over the
2575      appropriate number of bits and mask it with the computed mask
2576      (in case this was a signed field).  If we changed it, make a new one.  */
2577   lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2578   if (lvolatilep)
2579     {
2580       TREE_SIDE_EFFECTS (lhs) = 1;
2581       TREE_THIS_VOLATILE (lhs) = 1;
2582     }
2583 
2584   rhs = fold (const_binop (BIT_AND_EXPR,
2585 			   const_binop (LSHIFT_EXPR,
2586 					convert (unsigned_type, rhs),
2587 					size_int (lbitpos), 0),
2588 			   mask, 0));
2589 
2590   return build (code, compare_type,
2591 		build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2592 		rhs);
2593 }
2594 
2595 /* Subroutine for fold_truthop: decode a field reference.
2596 
2597    If EXP is a comparison reference, we return the innermost reference.
2598 
2599    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2600    set to the starting bit number.
2601 
2602    If the innermost field can be completely contained in a mode-sized
2603    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
2604 
2605    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2606    otherwise it is not changed.
2607 
2608    *PUNSIGNEDP is set to the signedness of the field.
2609 
2610    *PMASK is set to the mask used.  This is either contained in a
2611    BIT_AND_EXPR or derived from the width of the field.
2612 
2613    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2614 
2615    Return 0 if this is not a component reference or is one that we can't
2616    do anything with.  */
2617 
2618 static tree
2619 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2620 			pvolatilep, pmask, pand_mask)
2621      tree exp;
2622      HOST_WIDE_INT *pbitsize, *pbitpos;
2623      enum machine_mode *pmode;
2624      int *punsignedp, *pvolatilep;
2625      tree *pmask;
2626      tree *pand_mask;
2627 {
2628   tree and_mask = 0;
2629   tree mask, inner, offset;
2630   tree unsigned_type;
2631   unsigned int precision;
2632 
2633   /* All the optimizations using this function assume integer fields.
2634      There are problems with FP fields since the type_for_size call
2635      below can fail for, e.g., XFmode.  */
2636   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2637     return 0;
2638 
2639   STRIP_NOPS (exp);
2640 
2641   if (TREE_CODE (exp) == BIT_AND_EXPR)
2642     {
2643       and_mask = TREE_OPERAND (exp, 1);
2644       exp = TREE_OPERAND (exp, 0);
2645       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2646       if (TREE_CODE (and_mask) != INTEGER_CST)
2647 	return 0;
2648     }
2649 
2650   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2651 			       punsignedp, pvolatilep);
2652   if ((inner == exp && and_mask == 0)
2653       || *pbitsize < 0 || offset != 0
2654       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2655     return 0;
2656 
2657   /* Compute the mask to access the bitfield.  */
2658   unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2659   precision = TYPE_PRECISION (unsigned_type);
2660 
2661   mask = build_int_2 (~0, ~0);
2662   TREE_TYPE (mask) = unsigned_type;
2663   force_fit_type (mask, 0);
2664   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2665   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2666 
2667   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
2668   if (and_mask != 0)
2669     mask = fold (build (BIT_AND_EXPR, unsigned_type,
2670 			convert (unsigned_type, and_mask), mask));
2671 
2672   *pmask = mask;
2673   *pand_mask = and_mask;
2674   return inner;
2675 }
2676 
2677 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2678    bit positions.  */
2679 
2680 static int
2681 all_ones_mask_p (mask, size)
2682      tree mask;
2683      int size;
2684 {
2685   tree type = TREE_TYPE (mask);
2686   unsigned int precision = TYPE_PRECISION (type);
2687   tree tmask;
2688 
2689   tmask = build_int_2 (~0, ~0);
2690   TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2691   force_fit_type (tmask, 0);
2692   return
2693     tree_int_cst_equal (mask,
2694 			const_binop (RSHIFT_EXPR,
2695 				     const_binop (LSHIFT_EXPR, tmask,
2696 						  size_int (precision - size),
2697 						  0),
2698 				     size_int (precision - size), 0));
2699 }
2700 
2701 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2702    represents the sign bit of EXP's type.  If EXP represents a sign
2703    or zero extension, also test VAL against the unextended type.
2704    The return value is the (sub)expression whose sign bit is VAL,
2705    or NULL_TREE otherwise.  */
2706 
2707 static tree
2708 sign_bit_p (exp, val)
2709      tree exp;
2710      tree val;
2711 {
2712   unsigned HOST_WIDE_INT lo;
2713   HOST_WIDE_INT hi;
2714   int width;
2715   tree t;
2716 
2717   /* Tree EXP must have an integral type.  */
2718   t = TREE_TYPE (exp);
2719   if (! INTEGRAL_TYPE_P (t))
2720     return NULL_TREE;
2721 
2722   /* Tree VAL must be an integer constant.  */
2723   if (TREE_CODE (val) != INTEGER_CST
2724       || TREE_CONSTANT_OVERFLOW (val))
2725     return NULL_TREE;
2726 
2727   width = TYPE_PRECISION (t);
2728   if (width > HOST_BITS_PER_WIDE_INT)
2729     {
2730       hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2731       lo = 0;
2732     }
2733   else
2734     {
2735       hi = 0;
2736       lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2737     }
2738 
2739   if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2740     return exp;
2741 
2742   /* Handle extension from a narrower type.  */
2743   if (TREE_CODE (exp) == NOP_EXPR
2744       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2745     return sign_bit_p (TREE_OPERAND (exp, 0), val);
2746 
2747   return NULL_TREE;
2748 }
2749 
2750 /* Subroutine for fold_truthop: determine if an operand is simple enough
2751    to be evaluated unconditionally.  */
2752 
2753 static int
2754 simple_operand_p (exp)
2755      tree exp;
2756 {
2757   /* Strip any conversions that don't change the machine mode.  */
2758   while ((TREE_CODE (exp) == NOP_EXPR
2759 	  || TREE_CODE (exp) == CONVERT_EXPR)
2760 	 && (TYPE_MODE (TREE_TYPE (exp))
2761 	     == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2762     exp = TREE_OPERAND (exp, 0);
2763 
2764   return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2765 	  || (DECL_P (exp)
2766 	      && ! TREE_ADDRESSABLE (exp)
2767 	      && ! TREE_THIS_VOLATILE (exp)
2768 	      && ! DECL_NONLOCAL (exp)
2769 	      /* Don't regard global variables as simple.  They may be
2770 		 allocated in ways unknown to the compiler (shared memory,
2771 		 #pragma weak, etc).  */
2772 	      && ! TREE_PUBLIC (exp)
2773 	      && ! DECL_EXTERNAL (exp)
2774 	      /* Loading a static variable is unduly expensive, but global
2775 		 registers aren't expensive.  */
2776 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2777 }
2778 
2779 /* The following functions are subroutines to fold_range_test and allow it to
2780    try to change a logical combination of comparisons into a range test.
2781 
2782    For example, both
2783    	X == 2 || X == 3 || X == 4 || X == 5
2784    and
2785    	X >= 2 && X <= 5
2786    are converted to
2787 	(unsigned) (X - 2) <= 3
2788 
2789    We describe each set of comparisons as being either inside or outside
2790    a range, using a variable named like IN_P, and then describe the
2791    range with a lower and upper bound.  If one of the bounds is omitted,
2792    it represents either the highest or lowest value of the type.
2793 
2794    In the comments below, we represent a range by two numbers in brackets
2795    preceded by a "+" to designate being inside that range, or a "-" to
2796    designate being outside that range, so the condition can be inverted by
2797    flipping the prefix.  An omitted bound is represented by a "-".  For
2798    example, "- [-, 10]" means being outside the range starting at the lowest
2799    possible value and ending at 10, in other words, being greater than 10.
2800    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2801    always false.
2802 
2803    We set up things so that the missing bounds are handled in a consistent
2804    manner so neither a missing bound nor "true" and "false" need to be
2805    handled using a special case.  */
2806 
2807 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2808    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2809    and UPPER1_P are nonzero if the respective argument is an upper bound
2810    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
2811    must be specified for a comparison.  ARG1 will be converted to ARG0's
2812    type if both are specified.  */
2813 
2814 static tree
2815 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2816      enum tree_code code;
2817      tree type;
2818      tree arg0, arg1;
2819      int upper0_p, upper1_p;
2820 {
2821   tree tem;
2822   int result;
2823   int sgn0, sgn1;
2824 
2825   /* If neither arg represents infinity, do the normal operation.
2826      Else, if not a comparison, return infinity.  Else handle the special
2827      comparison rules. Note that most of the cases below won't occur, but
2828      are handled for consistency.  */
2829 
2830   if (arg0 != 0 && arg1 != 0)
2831     {
2832       tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2833 			 arg0, convert (TREE_TYPE (arg0), arg1)));
2834       STRIP_NOPS (tem);
2835       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2836     }
2837 
2838   if (TREE_CODE_CLASS (code) != '<')
2839     return 0;
2840 
2841   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2842      for neither.  In real maths, we cannot assume open ended ranges are
2843      the same. But, this is computer arithmetic, where numbers are finite.
2844      We can therefore make the transformation of any unbounded range with
2845      the value Z, Z being greater than any representable number. This permits
2846      us to treat unbounded ranges as equal.  */
2847   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2848   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2849   switch (code)
2850     {
2851     case EQ_EXPR:
2852       result = sgn0 == sgn1;
2853       break;
2854     case NE_EXPR:
2855       result = sgn0 != sgn1;
2856       break;
2857     case LT_EXPR:
2858       result = sgn0 < sgn1;
2859       break;
2860     case LE_EXPR:
2861       result = sgn0 <= sgn1;
2862       break;
2863     case GT_EXPR:
2864       result = sgn0 > sgn1;
2865       break;
2866     case GE_EXPR:
2867       result = sgn0 >= sgn1;
2868       break;
2869     default:
2870       abort ();
2871     }
2872 
2873   return convert (type, result ? integer_one_node : integer_zero_node);
2874 }
2875 
2876 /* Given EXP, a logical expression, set the range it is testing into
2877    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
2878    actually being tested.  *PLOW and *PHIGH will be made of the same type
2879    as the returned expression.  If EXP is not a comparison, we will most
2880    likely not be returning a useful value and range.  */
2881 
2882 static tree
2883 make_range (exp, pin_p, plow, phigh)
2884      tree exp;
2885      int *pin_p;
2886      tree *plow, *phigh;
2887 {
2888   enum tree_code code;
2889   tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2890   tree orig_type = NULL_TREE;
2891   int in_p, n_in_p;
2892   tree low, high, n_low, n_high;
2893 
2894   /* Start with simply saying "EXP != 0" and then look at the code of EXP
2895      and see if we can refine the range.  Some of the cases below may not
2896      happen, but it doesn't seem worth worrying about this.  We "continue"
2897      the outer loop when we've changed something; otherwise we "break"
2898      the switch, which will "break" the while.  */
2899 
2900   in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2901 
2902   while (1)
2903     {
2904       code = TREE_CODE (exp);
2905 
2906       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2907 	{
2908 	  arg0 = TREE_OPERAND (exp, 0);
2909 	  if (TREE_CODE_CLASS (code) == '<'
2910 	      || TREE_CODE_CLASS (code) == '1'
2911 	      || TREE_CODE_CLASS (code) == '2')
2912 	    type = TREE_TYPE (arg0);
2913 	  if (TREE_CODE_CLASS (code) == '2'
2914 	      || TREE_CODE_CLASS (code) == '<'
2915 	      || (TREE_CODE_CLASS (code) == 'e'
2916 		  && TREE_CODE_LENGTH (code) > 1))
2917 	    arg1 = TREE_OPERAND (exp, 1);
2918 	}
2919 
2920       /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2921 	 lose a cast by accident.  */
2922       if (type != NULL_TREE && orig_type == NULL_TREE)
2923 	orig_type = type;
2924 
2925       switch (code)
2926 	{
2927 	case TRUTH_NOT_EXPR:
2928 	  in_p = ! in_p, exp = arg0;
2929 	  continue;
2930 
2931 	case EQ_EXPR: case NE_EXPR:
2932 	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2933 	  /* We can only do something if the range is testing for zero
2934 	     and if the second operand is an integer constant.  Note that
2935 	     saying something is "in" the range we make is done by
2936 	     complementing IN_P since it will set in the initial case of
2937 	     being not equal to zero; "out" is leaving it alone.  */
2938 	  if (low == 0 || high == 0
2939 	      || ! integer_zerop (low) || ! integer_zerop (high)
2940 	      || TREE_CODE (arg1) != INTEGER_CST)
2941 	    break;
2942 
2943 	  switch (code)
2944 	    {
2945 	    case NE_EXPR:  /* - [c, c]  */
2946 	      low = high = arg1;
2947 	      break;
2948 	    case EQ_EXPR:  /* + [c, c]  */
2949 	      in_p = ! in_p, low = high = arg1;
2950 	      break;
2951 	    case GT_EXPR:  /* - [-, c] */
2952 	      low = 0, high = arg1;
2953 	      break;
2954 	    case GE_EXPR:  /* + [c, -] */
2955 	      in_p = ! in_p, low = arg1, high = 0;
2956 	      break;
2957 	    case LT_EXPR:  /* - [c, -] */
2958 	      low = arg1, high = 0;
2959 	      break;
2960 	    case LE_EXPR:  /* + [-, c] */
2961 	      in_p = ! in_p, low = 0, high = arg1;
2962 	      break;
2963 	    default:
2964 	      abort ();
2965 	    }
2966 
2967 	  exp = arg0;
2968 
2969 	  /* If this is an unsigned comparison, we also know that EXP is
2970 	     greater than or equal to zero.  We base the range tests we make
2971 	     on that fact, so we record it here so we can parse existing
2972 	     range tests.  */
2973 	  if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2974 	    {
2975 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2976 				  1, convert (type, integer_zero_node),
2977 				  NULL_TREE))
2978 		break;
2979 
2980 	      in_p = n_in_p, low = n_low, high = n_high;
2981 
2982 	      /* If the high bound is missing, but we
2983 		 have a low bound, reverse the range so
2984 		 it goes from zero to the low bound minus 1.  */
2985 	      if (high == 0 && low)
2986 		{
2987 		  in_p = ! in_p;
2988 		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2989 				      integer_one_node, 0);
2990 		  low = convert (type, integer_zero_node);
2991 		}
2992 	    }
2993 	  continue;
2994 
2995 	case NEGATE_EXPR:
2996 	  /* (-x) IN [a,b] -> x in [-b, -a]  */
2997 	  n_low = range_binop (MINUS_EXPR, type,
2998 			       convert (type, integer_zero_node), 0, high, 1);
2999 	  n_high = range_binop (MINUS_EXPR, type,
3000 				convert (type, integer_zero_node), 0, low, 0);
3001 	  low = n_low, high = n_high;
3002 	  exp = arg0;
3003 	  continue;
3004 
3005 	case BIT_NOT_EXPR:
3006 	  /* ~ X -> -X - 1  */
3007 	  exp = build (MINUS_EXPR, type, negate_expr (arg0),
3008 		       convert (type, integer_one_node));
3009 	  continue;
3010 
3011 	case PLUS_EXPR:  case MINUS_EXPR:
3012 	  if (TREE_CODE (arg1) != INTEGER_CST)
3013 	    break;
3014 
3015 	  /* If EXP is signed, any overflow in the computation is undefined,
3016 	     so we don't worry about it so long as our computations on
3017 	     the bounds don't overflow.  For unsigned, overflow is defined
3018 	     and this is exactly the right thing.  */
3019 	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3020 			       type, low, 0, arg1, 0);
3021 	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3022 				type, high, 1, arg1, 0);
3023 	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
3024 	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
3025 	    break;
3026 
3027 	  /* Check for an unsigned range which has wrapped around the maximum
3028 	     value thus making n_high < n_low, and normalize it.  */
3029 	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3030 	    {
3031 	      low = range_binop (PLUS_EXPR, type, n_high, 0,
3032 				 integer_one_node, 0);
3033 	      high = range_binop (MINUS_EXPR, type, n_low, 0,
3034 				  integer_one_node, 0);
3035 
3036 	      /* If the range is of the form +/- [ x+1, x ], we won't
3037 		 be able to normalize it.  But then, it represents the
3038 		 whole range or the empty set, so make it
3039 		 +/- [ -, - ].  */
3040 	      if (tree_int_cst_equal (n_low, low)
3041 		  && tree_int_cst_equal (n_high, high))
3042 		low = high = 0;
3043 	      else
3044 		in_p = ! in_p;
3045 	    }
3046 	  else
3047 	    low = n_low, high = n_high;
3048 
3049 	  exp = arg0;
3050 	  continue;
3051 
3052 	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
3053 	  if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3054 	    break;
3055 
3056 	  if (! INTEGRAL_TYPE_P (type)
3057 	      || (low != 0 && ! int_fits_type_p (low, type))
3058 	      || (high != 0 && ! int_fits_type_p (high, type)))
3059 	    break;
3060 
3061 	  n_low = low, n_high = high;
3062 
3063 	  if (n_low != 0)
3064 	    n_low = convert (type, n_low);
3065 
3066 	  if (n_high != 0)
3067 	    n_high = convert (type, n_high);
3068 
3069 	  /* If we're converting from an unsigned to a signed type,
3070 	     we will be doing the comparison as unsigned.  The tests above
3071 	     have already verified that LOW and HIGH are both positive.
3072 
3073 	     So we have to make sure that the original unsigned value will
3074 	     be interpreted as positive.  */
3075 	  if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3076 	    {
3077 	      tree equiv_type = (*lang_hooks.types.type_for_mode)
3078 		(TYPE_MODE (type), 1);
3079 	      tree high_positive;
3080 
3081 	      /* A range without an upper bound is, naturally, unbounded.
3082 		 Since convert would have cropped a very large value, use
3083 		 the max value for the destination type.  */
3084 	      high_positive
3085 		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3086 		  : TYPE_MAX_VALUE (type);
3087 
3088 	      if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3089 	        high_positive = fold (build (RSHIFT_EXPR, type,
3090 					     convert (type, high_positive),
3091 					     convert (type, integer_one_node)));
3092 
3093 	      /* If the low bound is specified, "and" the range with the
3094 		 range for which the original unsigned value will be
3095 		 positive.  */
3096 	      if (low != 0)
3097 		{
3098 		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3099 				      1, n_low, n_high,
3100 				      1, convert (type, integer_zero_node),
3101 				      high_positive))
3102 		    break;
3103 
3104 		  in_p = (n_in_p == in_p);
3105 		}
3106 	      else
3107 		{
3108 		  /* Otherwise, "or" the range with the range of the input
3109 		     that will be interpreted as negative.  */
3110 		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3111 				      0, n_low, n_high,
3112 				      1, convert (type, integer_zero_node),
3113 				      high_positive))
3114 		    break;
3115 
3116 		  in_p = (in_p != n_in_p);
3117 		}
3118 	    }
3119 
3120 	  exp = arg0;
3121 	  low = n_low, high = n_high;
3122 	  continue;
3123 
3124 	default:
3125 	  break;
3126 	}
3127 
3128       break;
3129     }
3130 
3131   /* If EXP is a constant, we can evaluate whether this is true or false.  */
3132   if (TREE_CODE (exp) == INTEGER_CST)
3133     {
3134       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3135 						 exp, 0, low, 0))
3136 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
3137 						    exp, 1, high, 1)));
3138       low = high = 0;
3139       exp = 0;
3140     }
3141 
3142   *pin_p = in_p, *plow = low, *phigh = high;
3143   return exp;
3144 }
3145 
3146 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3147    type, TYPE, return an expression to test if EXP is in (or out of, depending
3148    on IN_P) the range.  */
3149 
3150 static tree
3151 build_range_check (type, exp, in_p, low, high)
3152      tree type;
3153      tree exp;
3154      int in_p;
3155      tree low, high;
3156 {
3157   tree etype = TREE_TYPE (exp);
3158   tree value;
3159 
3160   if (! in_p
3161       && (0 != (value = build_range_check (type, exp, 1, low, high))))
3162     return invert_truthvalue (value);
3163 
3164   if (low == 0 && high == 0)
3165     return convert (type, integer_one_node);
3166 
3167   if (low == 0)
3168     return fold (build (LE_EXPR, type, exp, high));
3169 
3170   if (high == 0)
3171     return fold (build (GE_EXPR, type, exp, low));
3172 
3173   if (operand_equal_p (low, high, 0))
3174     return fold (build (EQ_EXPR, type, exp, low));
3175 
3176   if (integer_zerop (low))
3177     {
3178       if (! TREE_UNSIGNED (etype))
3179 	{
3180 	  etype = (*lang_hooks.types.unsigned_type) (etype);
3181 	  high = convert (etype, high);
3182 	  exp = convert (etype, exp);
3183 	}
3184       return build_range_check (type, exp, 1, 0, high);
3185     }
3186 
3187   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
3188   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3189     {
3190       unsigned HOST_WIDE_INT lo;
3191       HOST_WIDE_INT hi;
3192       int prec;
3193 
3194       prec = TYPE_PRECISION (etype);
3195       if (prec <= HOST_BITS_PER_WIDE_INT)
3196 	{
3197 	  hi = 0;
3198 	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3199 	}
3200       else
3201 	{
3202 	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3203 	  lo = (unsigned HOST_WIDE_INT) -1;
3204 	}
3205 
3206       if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3207 	{
3208 	  if (TREE_UNSIGNED (etype))
3209 	    {
3210 	      etype = (*lang_hooks.types.signed_type) (etype);
3211 	      exp = convert (etype, exp);
3212 	    }
3213 	  return fold (build (GT_EXPR, type, exp,
3214 			      convert (etype, integer_zero_node)));
3215 	}
3216     }
3217 
3218   if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3219       && ! TREE_OVERFLOW (value))
3220     return build_range_check (type,
3221 			      fold (build (MINUS_EXPR, etype, exp, low)),
3222 			      1, convert (etype, integer_zero_node), value);
3223 
3224   return 0;
3225 }
3226 
3227 /* Given two ranges, see if we can merge them into one.  Return 1 if we
3228    can, 0 if we can't.  Set the output range into the specified parameters.  */
3229 
3230 static int
3231 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3232      int *pin_p;
3233      tree *plow, *phigh;
3234      int in0_p, in1_p;
3235      tree low0, high0, low1, high1;
3236 {
3237   int no_overlap;
3238   int subset;
3239   int temp;
3240   tree tem;
3241   int in_p;
3242   tree low, high;
3243   int lowequal = ((low0 == 0 && low1 == 0)
3244 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3245 						low0, 0, low1, 0)));
3246   int highequal = ((high0 == 0 && high1 == 0)
3247 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3248 						 high0, 1, high1, 1)));
3249 
3250   /* Make range 0 be the range that starts first, or ends last if they
3251      start at the same value.  Swap them if it isn't.  */
3252   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3253 				 low0, 0, low1, 0))
3254       || (lowequal
3255 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
3256 					high1, 1, high0, 1))))
3257     {
3258       temp = in0_p, in0_p = in1_p, in1_p = temp;
3259       tem = low0, low0 = low1, low1 = tem;
3260       tem = high0, high0 = high1, high1 = tem;
3261     }
3262 
3263   /* Now flag two cases, whether the ranges are disjoint or whether the
3264      second range is totally subsumed in the first.  Note that the tests
3265      below are simplified by the ones above.  */
3266   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3267 					  high0, 1, low1, 0));
3268   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3269 				      high1, 1, high0, 1));
3270 
3271   /* We now have four cases, depending on whether we are including or
3272      excluding the two ranges.  */
3273   if (in0_p && in1_p)
3274     {
3275       /* If they don't overlap, the result is false.  If the second range
3276 	 is a subset it is the result.  Otherwise, the range is from the start
3277 	 of the second to the end of the first.  */
3278       if (no_overlap)
3279 	in_p = 0, low = high = 0;
3280       else if (subset)
3281 	in_p = 1, low = low1, high = high1;
3282       else
3283 	in_p = 1, low = low1, high = high0;
3284     }
3285 
3286   else if (in0_p && ! in1_p)
3287     {
3288       /* If they don't overlap, the result is the first range.  If they are
3289 	 equal, the result is false.  If the second range is a subset of the
3290 	 first, and the ranges begin at the same place, we go from just after
3291 	 the end of the first range to the end of the second.  If the second
3292 	 range is not a subset of the first, or if it is a subset and both
3293 	 ranges end at the same place, the range starts at the start of the
3294 	 first range and ends just before the second range.
3295 	 Otherwise, we can't describe this as a single range.  */
3296       if (no_overlap)
3297 	in_p = 1, low = low0, high = high0;
3298       else if (lowequal && highequal)
3299 	in_p = 0, low = high = 0;
3300       else if (subset && lowequal)
3301 	{
3302 	  in_p = 1, high = high0;
3303 	  low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3304 			     integer_one_node, 0);
3305 	}
3306       else if (! subset || highequal)
3307 	{
3308 	  in_p = 1, low = low0;
3309 	  high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3310 			      integer_one_node, 0);
3311 	}
3312       else
3313 	return 0;
3314     }
3315 
3316   else if (! in0_p && in1_p)
3317     {
3318       /* If they don't overlap, the result is the second range.  If the second
3319 	 is a subset of the first, the result is false.  Otherwise,
3320 	 the range starts just after the first range and ends at the
3321 	 end of the second.  */
3322       if (no_overlap)
3323 	in_p = 1, low = low1, high = high1;
3324       else if (subset || highequal)
3325 	in_p = 0, low = high = 0;
3326       else
3327 	{
3328 	  in_p = 1, high = high1;
3329 	  low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3330 			     integer_one_node, 0);
3331 	}
3332     }
3333 
3334   else
3335     {
3336       /* The case where we are excluding both ranges.  Here the complex case
3337 	 is if they don't overlap.  In that case, the only time we have a
3338 	 range is if they are adjacent.  If the second is a subset of the
3339 	 first, the result is the first.  Otherwise, the range to exclude
3340 	 starts at the beginning of the first range and ends at the end of the
3341 	 second.  */
3342       if (no_overlap)
3343 	{
3344 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3345 					 range_binop (PLUS_EXPR, NULL_TREE,
3346 						      high0, 1,
3347 						      integer_one_node, 1),
3348 					 1, low1, 0)))
3349 	    in_p = 0, low = low0, high = high1;
3350 	  else
3351 	    return 0;
3352 	}
3353       else if (subset)
3354 	in_p = 0, low = low0, high = high0;
3355       else
3356 	in_p = 0, low = low0, high = high1;
3357     }
3358 
3359   *pin_p = in_p, *plow = low, *phigh = high;
3360   return 1;
3361 }
3362 
3363 /* EXP is some logical combination of boolean tests.  See if we can
3364    merge it into some range test.  Return the new tree if so.  */
3365 
3366 static tree
3367 fold_range_test (exp)
3368      tree exp;
3369 {
3370   int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3371 	       || TREE_CODE (exp) == TRUTH_OR_EXPR);
3372   int in0_p, in1_p, in_p;
3373   tree low0, low1, low, high0, high1, high;
3374   tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3375   tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3376   tree tem;
3377 
3378   /* If this is an OR operation, invert both sides; we will invert
3379      again at the end.  */
3380   if (or_op)
3381     in0_p = ! in0_p, in1_p = ! in1_p;
3382 
3383   /* If both expressions are the same, if we can merge the ranges, and we
3384      can build the range test, return it or it inverted.  If one of the
3385      ranges is always true or always false, consider it to be the same
3386      expression as the other.  */
3387   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3388       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3389 		       in1_p, low1, high1)
3390       && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3391 					 lhs != 0 ? lhs
3392 					 : rhs != 0 ? rhs : integer_zero_node,
3393 					 in_p, low, high))))
3394     return or_op ? invert_truthvalue (tem) : tem;
3395 
3396   /* On machines where the branch cost is expensive, if this is a
3397      short-circuited branch and the underlying object on both sides
3398      is the same, make a non-short-circuit operation.  */
3399   else if (BRANCH_COST >= 2
3400 	   && lhs != 0 && rhs != 0
3401 	   && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3402 	       || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3403 	   && operand_equal_p (lhs, rhs, 0))
3404     {
3405       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
3406 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3407 	 which cases we can't do this.  */
3408       if (simple_operand_p (lhs))
3409 	return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3410 		      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3411 		      TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3412 		      TREE_OPERAND (exp, 1));
3413 
3414       else if ((*lang_hooks.decls.global_bindings_p) () == 0
3415 	       && ! contains_placeholder_p (lhs))
3416 	{
3417 	  tree common = save_expr (lhs);
3418 
3419 	  if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3420 					     or_op ? ! in0_p : in0_p,
3421 					     low0, high0))
3422 	      && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3423 						 or_op ? ! in1_p : in1_p,
3424 						 low1, high1))))
3425 	    return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3426 			  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3427 			  TREE_TYPE (exp), lhs, rhs);
3428 	}
3429     }
3430 
3431   return 0;
3432 }
3433 
3434 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3435    bit value.  Arrange things so the extra bits will be set to zero if and
3436    only if C is signed-extended to its full width.  If MASK is nonzero,
3437    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
3438 
3439 static tree
3440 unextend (c, p, unsignedp, mask)
3441      tree c;
3442      int p;
3443      int unsignedp;
3444      tree mask;
3445 {
3446   tree type = TREE_TYPE (c);
3447   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3448   tree temp;
3449 
3450   if (p == modesize || unsignedp)
3451     return c;
3452 
3453   /* We work by getting just the sign bit into the low-order bit, then
3454      into the high-order bit, then sign-extend.  We then XOR that value
3455      with C.  */
3456   temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3457   temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3458 
3459   /* We must use a signed type in order to get an arithmetic right shift.
3460      However, we must also avoid introducing accidental overflows, so that
3461      a subsequent call to integer_zerop will work.  Hence we must
3462      do the type conversion here.  At this point, the constant is either
3463      zero or one, and the conversion to a signed type can never overflow.
3464      We could get an overflow if this conversion is done anywhere else.  */
3465   if (TREE_UNSIGNED (type))
3466     temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3467 
3468   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3469   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3470   if (mask != 0)
3471     temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3472   /* If necessary, convert the type back to match the type of C.  */
3473   if (TREE_UNSIGNED (type))
3474     temp = convert (type, temp);
3475 
3476   return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3477 }
3478 
3479 /* Find ways of folding logical expressions of LHS and RHS:
3480    Try to merge two comparisons to the same innermost item.
3481    Look for range tests like "ch >= '0' && ch <= '9'".
3482    Look for combinations of simple terms on machines with expensive branches
3483    and evaluate the RHS unconditionally.
3484 
3485    For example, if we have p->a == 2 && p->b == 4 and we can make an
3486    object large enough to span both A and B, we can do this with a comparison
3487    against the object ANDed with the a mask.
3488 
3489    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3490    operations to do this with one comparison.
3491 
3492    We check for both normal comparisons and the BIT_AND_EXPRs made this by
3493    function and the one above.
3494 
3495    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
3496    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3497 
3498    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3499    two operands.
3500 
3501    We return the simplified tree or 0 if no optimization is possible.  */
3502 
3503 static tree
3504 fold_truthop (code, truth_type, lhs, rhs)
3505      enum tree_code code;
3506      tree truth_type, lhs, rhs;
3507 {
3508   /* If this is the "or" of two comparisons, we can do something if
3509      the comparisons are NE_EXPR.  If this is the "and", we can do something
3510      if the comparisons are EQ_EXPR.  I.e.,
3511      	(a->b == 2 && a->c == 4) can become (a->new == NEW).
3512 
3513      WANTED_CODE is this operation code.  For single bit fields, we can
3514      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3515      comparison for one-bit fields.  */
3516 
3517   enum tree_code wanted_code;
3518   enum tree_code lcode, rcode;
3519   tree ll_arg, lr_arg, rl_arg, rr_arg;
3520   tree ll_inner, lr_inner, rl_inner, rr_inner;
3521   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3522   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3523   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3524   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3525   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3526   enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3527   enum machine_mode lnmode, rnmode;
3528   tree ll_mask, lr_mask, rl_mask, rr_mask;
3529   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3530   tree l_const, r_const;
3531   tree lntype, rntype, result;
3532   int first_bit, end_bit;
3533   int volatilep;
3534 
3535   /* Start by getting the comparison codes.  Fail if anything is volatile.
3536      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3537      it were surrounded with a NE_EXPR.  */
3538 
3539   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3540     return 0;
3541 
3542   lcode = TREE_CODE (lhs);
3543   rcode = TREE_CODE (rhs);
3544 
3545   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3546     lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3547 
3548   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3549     rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3550 
3551   if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3552     return 0;
3553 
3554   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3555 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3556 
3557   ll_arg = TREE_OPERAND (lhs, 0);
3558   lr_arg = TREE_OPERAND (lhs, 1);
3559   rl_arg = TREE_OPERAND (rhs, 0);
3560   rr_arg = TREE_OPERAND (rhs, 1);
3561 
3562   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
3563   if (simple_operand_p (ll_arg)
3564       && simple_operand_p (lr_arg)
3565       && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3566     {
3567       int compcode;
3568 
3569       if (operand_equal_p (ll_arg, rl_arg, 0)
3570           && operand_equal_p (lr_arg, rr_arg, 0))
3571         {
3572           int lcompcode, rcompcode;
3573 
3574           lcompcode = comparison_to_compcode (lcode);
3575           rcompcode = comparison_to_compcode (rcode);
3576           compcode = (code == TRUTH_AND_EXPR)
3577                      ? lcompcode & rcompcode
3578                      : lcompcode | rcompcode;
3579         }
3580       else if (operand_equal_p (ll_arg, rr_arg, 0)
3581                && operand_equal_p (lr_arg, rl_arg, 0))
3582         {
3583           int lcompcode, rcompcode;
3584 
3585           rcode = swap_tree_comparison (rcode);
3586           lcompcode = comparison_to_compcode (lcode);
3587           rcompcode = comparison_to_compcode (rcode);
3588           compcode = (code == TRUTH_AND_EXPR)
3589                      ? lcompcode & rcompcode
3590                      : lcompcode | rcompcode;
3591         }
3592       else
3593 	compcode = -1;
3594 
3595       if (compcode == COMPCODE_TRUE)
3596 	return convert (truth_type, integer_one_node);
3597       else if (compcode == COMPCODE_FALSE)
3598 	return convert (truth_type, integer_zero_node);
3599       else if (compcode != -1)
3600 	return build (compcode_to_comparison (compcode),
3601 		      truth_type, ll_arg, lr_arg);
3602     }
3603 
3604   /* If the RHS can be evaluated unconditionally and its operands are
3605      simple, it wins to evaluate the RHS unconditionally on machines
3606      with expensive branches.  In this case, this isn't a comparison
3607      that can be merged.  Avoid doing this if the RHS is a floating-point
3608      comparison since those can trap.  */
3609 
3610   if (BRANCH_COST >= 2
3611       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3612       && simple_operand_p (rl_arg)
3613       && simple_operand_p (rr_arg))
3614     {
3615       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
3616       if (code == TRUTH_OR_EXPR
3617 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
3618 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
3619 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3620 	return build (NE_EXPR, truth_type,
3621 		      build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3622 			     ll_arg, rl_arg),
3623 		      integer_zero_node);
3624 
3625       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
3626       if (code == TRUTH_AND_EXPR
3627 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
3628 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
3629 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3630 	return build (EQ_EXPR, truth_type,
3631 		      build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3632 			     ll_arg, rl_arg),
3633 		      integer_zero_node);
3634 
3635       return build (code, truth_type, lhs, rhs);
3636     }
3637 
3638   /* See if the comparisons can be merged.  Then get all the parameters for
3639      each side.  */
3640 
3641   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3642       || (rcode != EQ_EXPR && rcode != NE_EXPR))
3643     return 0;
3644 
3645   volatilep = 0;
3646   ll_inner = decode_field_reference (ll_arg,
3647 				     &ll_bitsize, &ll_bitpos, &ll_mode,
3648 				     &ll_unsignedp, &volatilep, &ll_mask,
3649 				     &ll_and_mask);
3650   lr_inner = decode_field_reference (lr_arg,
3651 				     &lr_bitsize, &lr_bitpos, &lr_mode,
3652 				     &lr_unsignedp, &volatilep, &lr_mask,
3653 				     &lr_and_mask);
3654   rl_inner = decode_field_reference (rl_arg,
3655 				     &rl_bitsize, &rl_bitpos, &rl_mode,
3656 				     &rl_unsignedp, &volatilep, &rl_mask,
3657 				     &rl_and_mask);
3658   rr_inner = decode_field_reference (rr_arg,
3659 				     &rr_bitsize, &rr_bitpos, &rr_mode,
3660 				     &rr_unsignedp, &volatilep, &rr_mask,
3661 				     &rr_and_mask);
3662 
3663   /* It must be true that the inner operation on the lhs of each
3664      comparison must be the same if we are to be able to do anything.
3665      Then see if we have constants.  If not, the same must be true for
3666      the rhs's.  */
3667   if (volatilep || ll_inner == 0 || rl_inner == 0
3668       || ! operand_equal_p (ll_inner, rl_inner, 0))
3669     return 0;
3670 
3671   if (TREE_CODE (lr_arg) == INTEGER_CST
3672       && TREE_CODE (rr_arg) == INTEGER_CST)
3673     l_const = lr_arg, r_const = rr_arg;
3674   else if (lr_inner == 0 || rr_inner == 0
3675 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
3676     return 0;
3677   else
3678     l_const = r_const = 0;
3679 
3680   /* If either comparison code is not correct for our logical operation,
3681      fail.  However, we can convert a one-bit comparison against zero into
3682      the opposite comparison against that bit being set in the field.  */
3683 
3684   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3685   if (lcode != wanted_code)
3686     {
3687       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3688 	{
3689 	  /* Make the left operand unsigned, since we are only interested
3690 	     in the value of one bit.  Otherwise we are doing the wrong
3691 	     thing below.  */
3692 	  ll_unsignedp = 1;
3693 	  l_const = ll_mask;
3694 	}
3695       else
3696 	return 0;
3697     }
3698 
3699   /* This is analogous to the code for l_const above.  */
3700   if (rcode != wanted_code)
3701     {
3702       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3703 	{
3704 	  rl_unsignedp = 1;
3705 	  r_const = rl_mask;
3706 	}
3707       else
3708 	return 0;
3709     }
3710 
3711   /* After this point all optimizations will generate bit-field
3712      references, which we might not want.  */
3713   if (! (*lang_hooks.can_use_bit_fields_p) ())
3714     return 0;
3715 
3716   /* See if we can find a mode that contains both fields being compared on
3717      the left.  If we can't, fail.  Otherwise, update all constants and masks
3718      to be relative to a field of that size.  */
3719   first_bit = MIN (ll_bitpos, rl_bitpos);
3720   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3721   lnmode = get_best_mode (end_bit - first_bit, first_bit,
3722 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3723 			  volatilep);
3724   if (lnmode == VOIDmode)
3725     return 0;
3726 
3727   lnbitsize = GET_MODE_BITSIZE (lnmode);
3728   lnbitpos = first_bit & ~ (lnbitsize - 1);
3729   lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3730   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3731 
3732   if (BYTES_BIG_ENDIAN)
3733     {
3734       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3735       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3736     }
3737 
3738   ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3739 			 size_int (xll_bitpos), 0);
3740   rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3741 			 size_int (xrl_bitpos), 0);
3742 
3743   if (l_const)
3744     {
3745       l_const = convert (lntype, l_const);
3746       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3747       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3748       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3749 					fold (build1 (BIT_NOT_EXPR,
3750 						      lntype, ll_mask)),
3751 					0)))
3752 	{
3753 	  warning ("comparison is always %d", wanted_code == NE_EXPR);
3754 
3755 	  return convert (truth_type,
3756 			  wanted_code == NE_EXPR
3757 			  ? integer_one_node : integer_zero_node);
3758 	}
3759     }
3760   if (r_const)
3761     {
3762       r_const = convert (lntype, r_const);
3763       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3764       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3765       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3766 					fold (build1 (BIT_NOT_EXPR,
3767 						      lntype, rl_mask)),
3768 					0)))
3769 	{
3770 	  warning ("comparison is always %d", wanted_code == NE_EXPR);
3771 
3772 	  return convert (truth_type,
3773 			  wanted_code == NE_EXPR
3774 			  ? integer_one_node : integer_zero_node);
3775 	}
3776     }
3777 
3778   /* If the right sides are not constant, do the same for it.  Also,
3779      disallow this optimization if a size or signedness mismatch occurs
3780      between the left and right sides.  */
3781   if (l_const == 0)
3782     {
3783       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3784 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3785 	  /* Make sure the two fields on the right
3786 	     correspond to the left without being swapped.  */
3787 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3788 	return 0;
3789 
3790       first_bit = MIN (lr_bitpos, rr_bitpos);
3791       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3792       rnmode = get_best_mode (end_bit - first_bit, first_bit,
3793 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3794 			      volatilep);
3795       if (rnmode == VOIDmode)
3796 	return 0;
3797 
3798       rnbitsize = GET_MODE_BITSIZE (rnmode);
3799       rnbitpos = first_bit & ~ (rnbitsize - 1);
3800       rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3801       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3802 
3803       if (BYTES_BIG_ENDIAN)
3804 	{
3805 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3806 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3807 	}
3808 
3809       lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3810 			     size_int (xlr_bitpos), 0);
3811       rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3812 			     size_int (xrr_bitpos), 0);
3813 
3814       /* Make a mask that corresponds to both fields being compared.
3815 	 Do this for both items being compared.  If the operands are the
3816 	 same size and the bits being compared are in the same position
3817 	 then we can do this by masking both and comparing the masked
3818 	 results.  */
3819       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3820       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3821       if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3822 	{
3823 	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3824 				    ll_unsignedp || rl_unsignedp);
3825 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
3826 	    lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3827 
3828 	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3829 				    lr_unsignedp || rr_unsignedp);
3830 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
3831 	    rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3832 
3833 	  return build (wanted_code, truth_type, lhs, rhs);
3834 	}
3835 
3836       /* There is still another way we can do something:  If both pairs of
3837 	 fields being compared are adjacent, we may be able to make a wider
3838 	 field containing them both.
3839 
3840 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
3841 	 the mask must be shifted to account for the shift done by
3842 	 make_bit_field_ref.  */
3843       if ((ll_bitsize + ll_bitpos == rl_bitpos
3844 	   && lr_bitsize + lr_bitpos == rr_bitpos)
3845 	  || (ll_bitpos == rl_bitpos + rl_bitsize
3846 	      && lr_bitpos == rr_bitpos + rr_bitsize))
3847 	{
3848 	  tree type;
3849 
3850 	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3851 				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3852 	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3853 				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3854 
3855 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3856 				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3857 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3858 				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3859 
3860 	  /* Convert to the smaller type before masking out unwanted bits.  */
3861 	  type = lntype;
3862 	  if (lntype != rntype)
3863 	    {
3864 	      if (lnbitsize > rnbitsize)
3865 		{
3866 		  lhs = convert (rntype, lhs);
3867 		  ll_mask = convert (rntype, ll_mask);
3868 		  type = rntype;
3869 		}
3870 	      else if (lnbitsize < rnbitsize)
3871 		{
3872 		  rhs = convert (lntype, rhs);
3873 		  lr_mask = convert (lntype, lr_mask);
3874 		  type = lntype;
3875 		}
3876 	    }
3877 
3878 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3879 	    lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3880 
3881 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3882 	    rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3883 
3884 	  return build (wanted_code, truth_type, lhs, rhs);
3885 	}
3886 
3887       return 0;
3888     }
3889 
3890   /* Handle the case of comparisons with constants.  If there is something in
3891      common between the masks, those bits of the constants must be the same.
3892      If not, the condition is always false.  Test for this to avoid generating
3893      incorrect code below.  */
3894   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3895   if (! integer_zerop (result)
3896       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3897 			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3898     {
3899       if (wanted_code == NE_EXPR)
3900 	{
3901 	  warning ("`or' of unmatched not-equal tests is always 1");
3902 	  return convert (truth_type, integer_one_node);
3903 	}
3904       else
3905 	{
3906 	  warning ("`and' of mutually exclusive equal-tests is always 0");
3907 	  return convert (truth_type, integer_zero_node);
3908 	}
3909     }
3910 
3911   /* Construct the expression we will return.  First get the component
3912      reference we will make.  Unless the mask is all ones the width of
3913      that field, perform the mask operation.  Then compare with the
3914      merged constant.  */
3915   result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3916 			       ll_unsignedp || rl_unsignedp);
3917 
3918   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3919   if (! all_ones_mask_p (ll_mask, lnbitsize))
3920     result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3921 
3922   return build (wanted_code, truth_type, result,
3923 		const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3924 }
3925 
3926 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3927    constant.  */
3928 
3929 static tree
3930 optimize_minmax_comparison (t)
3931      tree t;
3932 {
3933   tree type = TREE_TYPE (t);
3934   tree arg0 = TREE_OPERAND (t, 0);
3935   enum tree_code op_code;
3936   tree comp_const = TREE_OPERAND (t, 1);
3937   tree minmax_const;
3938   int consts_equal, consts_lt;
3939   tree inner;
3940 
3941   STRIP_SIGN_NOPS (arg0);
3942 
3943   op_code = TREE_CODE (arg0);
3944   minmax_const = TREE_OPERAND (arg0, 1);
3945   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3946   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3947   inner = TREE_OPERAND (arg0, 0);
3948 
3949   /* If something does not permit us to optimize, return the original tree.  */
3950   if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3951       || TREE_CODE (comp_const) != INTEGER_CST
3952       || TREE_CONSTANT_OVERFLOW (comp_const)
3953       || TREE_CODE (minmax_const) != INTEGER_CST
3954       || TREE_CONSTANT_OVERFLOW (minmax_const))
3955     return t;
3956 
3957   /* Now handle all the various comparison codes.  We only handle EQ_EXPR
3958      and GT_EXPR, doing the rest with recursive calls using logical
3959      simplifications.  */
3960   switch (TREE_CODE (t))
3961     {
3962     case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
3963       return
3964 	invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3965 
3966     case GE_EXPR:
3967       return
3968 	fold (build (TRUTH_ORIF_EXPR, type,
3969 		     optimize_minmax_comparison
3970 		     (build (EQ_EXPR, type, arg0, comp_const)),
3971 		     optimize_minmax_comparison
3972 		     (build (GT_EXPR, type, arg0, comp_const))));
3973 
3974     case EQ_EXPR:
3975       if (op_code == MAX_EXPR && consts_equal)
3976 	/* MAX (X, 0) == 0  ->  X <= 0  */
3977 	return fold (build (LE_EXPR, type, inner, comp_const));
3978 
3979       else if (op_code == MAX_EXPR && consts_lt)
3980 	/* MAX (X, 0) == 5  ->  X == 5   */
3981 	return fold (build (EQ_EXPR, type, inner, comp_const));
3982 
3983       else if (op_code == MAX_EXPR)
3984 	/* MAX (X, 0) == -1  ->  false  */
3985 	return omit_one_operand (type, integer_zero_node, inner);
3986 
3987       else if (consts_equal)
3988 	/* MIN (X, 0) == 0  ->  X >= 0  */
3989 	return fold (build (GE_EXPR, type, inner, comp_const));
3990 
3991       else if (consts_lt)
3992 	/* MIN (X, 0) == 5  ->  false  */
3993 	return omit_one_operand (type, integer_zero_node, inner);
3994 
3995       else
3996 	/* MIN (X, 0) == -1  ->  X == -1  */
3997 	return fold (build (EQ_EXPR, type, inner, comp_const));
3998 
3999     case GT_EXPR:
4000       if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4001 	/* MAX (X, 0) > 0  ->  X > 0
4002 	   MAX (X, 0) > 5  ->  X > 5  */
4003 	return fold (build (GT_EXPR, type, inner, comp_const));
4004 
4005       else if (op_code == MAX_EXPR)
4006 	/* MAX (X, 0) > -1  ->  true  */
4007 	return omit_one_operand (type, integer_one_node, inner);
4008 
4009       else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4010 	/* MIN (X, 0) > 0  ->  false
4011 	   MIN (X, 0) > 5  ->  false  */
4012 	return omit_one_operand (type, integer_zero_node, inner);
4013 
4014       else
4015 	/* MIN (X, 0) > -1  ->  X > -1  */
4016 	return fold (build (GT_EXPR, type, inner, comp_const));
4017 
4018     default:
4019       return t;
4020     }
4021 }
4022 
4023 /* T is an integer expression that is being multiplied, divided, or taken a
4024    modulus (CODE says which and what kind of divide or modulus) by a
4025    constant C.  See if we can eliminate that operation by folding it with
4026    other operations already in T.  WIDE_TYPE, if non-null, is a type that
4027    should be used for the computation if wider than our type.
4028 
4029    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4030    (X * 2) + (Y * 4).  We must, however, be assured that either the original
4031    expression would not overflow or that overflow is undefined for the type
4032    in the language in question.
4033 
4034    We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4035    the machine has a multiply-accumulate insn or that this is part of an
4036    addressing calculation.
4037 
4038    If we return a non-null expression, it is an equivalent form of the
4039    original computation, but need not be in the original type.  */
4040 
4041 static tree
4042 extract_muldiv (t, c, code, wide_type)
4043      tree t;
4044      tree c;
4045      enum tree_code code;
4046      tree wide_type;
4047 {
4048   /* To avoid exponential search depth, refuse to allow recursion past
4049      three levels.  Beyond that (1) it's highly unlikely that we'll find
4050      something interesting and (2) we've probably processed it before
4051      when we built the inner expression.  */
4052 
4053   static int depth;
4054   tree ret;
4055 
4056   if (depth > 3)
4057     return NULL;
4058 
4059   depth++;
4060   ret = extract_muldiv_1 (t, c, code, wide_type);
4061   depth--;
4062 
4063   return ret;
4064 }
4065 
4066 static tree
4067 extract_muldiv_1 (t, c, code, wide_type)
4068      tree t;
4069      tree c;
4070      enum tree_code code;
4071      tree wide_type;
4072 {
4073   tree type = TREE_TYPE (t);
4074   enum tree_code tcode = TREE_CODE (t);
4075   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4076 				   > GET_MODE_SIZE (TYPE_MODE (type)))
4077 		? wide_type : type);
4078   tree t1, t2;
4079   int same_p = tcode == code;
4080   tree op0 = NULL_TREE, op1 = NULL_TREE;
4081 
4082   /* Don't deal with constants of zero here; they confuse the code below.  */
4083   if (integer_zerop (c))
4084     return NULL_TREE;
4085 
4086   if (TREE_CODE_CLASS (tcode) == '1')
4087     op0 = TREE_OPERAND (t, 0);
4088 
4089   if (TREE_CODE_CLASS (tcode) == '2')
4090     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4091 
4092   /* Note that we need not handle conditional operations here since fold
4093      already handles those cases.  So just do arithmetic here.  */
4094   switch (tcode)
4095     {
4096     case INTEGER_CST:
4097       /* For a constant, we can always simplify if we are a multiply
4098 	 or (for divide and modulus) if it is a multiple of our constant.  */
4099       if (code == MULT_EXPR
4100 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4101 	return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4102       break;
4103 
4104     case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
4105       /* If op0 is an expression ...  */
4106       if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4107 	   || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4108 	   || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4109 	   || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4110 	  /* ... and is unsigned, and its type is smaller than ctype,
4111 	     then we cannot pass through as widening.  */
4112 	  && ((TREE_UNSIGNED (TREE_TYPE (op0))
4113 	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4114 		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4115 	       && (GET_MODE_SIZE (TYPE_MODE (ctype))
4116 	           > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4117 	      /* ... or its type is larger than ctype,
4118 		 then we cannot pass through this truncation.  */
4119 	      || (GET_MODE_SIZE (TYPE_MODE (ctype))
4120 		  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4121 	      /* ... or signedness changes for division or modulus,
4122 		 then we cannot pass through this conversion.  */
4123 	      || (code != MULT_EXPR
4124 		  && (TREE_UNSIGNED (ctype)
4125 		      != TREE_UNSIGNED (TREE_TYPE (op0))))))
4126 	break;
4127 
4128       /* Pass the constant down and see if we can make a simplification.  If
4129 	 we can, replace this expression with the inner simplification for
4130 	 possible later conversion to our or some other type.  */
4131       if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4132 				     code == MULT_EXPR ? ctype : NULL_TREE)))
4133 	return t1;
4134       break;
4135 
4136     case NEGATE_EXPR:  case ABS_EXPR:
4137       if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4138 	return fold (build1 (tcode, ctype, convert (ctype, t1)));
4139       break;
4140 
4141     case MIN_EXPR:  case MAX_EXPR:
4142       /* If widening the type changes the signedness, then we can't perform
4143 	 this optimization as that changes the result.  */
4144       if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4145 	break;
4146 
4147       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
4148       if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4149 	  && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4150 	{
4151 	  if (tree_int_cst_sgn (c) < 0)
4152 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4153 
4154 	  return fold (build (tcode, ctype, convert (ctype, t1),
4155 			      convert (ctype, t2)));
4156 	}
4157       break;
4158 
4159     case WITH_RECORD_EXPR:
4160       if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4161 	return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4162 		      TREE_OPERAND (t, 1));
4163       break;
4164 
4165     case LSHIFT_EXPR:  case RSHIFT_EXPR:
4166       /* If the second operand is constant, this is a multiplication
4167 	 or floor division, by a power of two, so we can treat it that
4168 	 way unless the multiplier or divisor overflows.  */
4169       if (TREE_CODE (op1) == INTEGER_CST
4170 	  /* const_binop may not detect overflow correctly,
4171 	     so check for it explicitly here.  */
4172 	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4173 	  && TREE_INT_CST_HIGH (op1) == 0
4174 	  && 0 != (t1 = convert (ctype,
4175 				 const_binop (LSHIFT_EXPR, size_one_node,
4176 					      op1, 0)))
4177 	  && ! TREE_OVERFLOW (t1))
4178 	return extract_muldiv (build (tcode == LSHIFT_EXPR
4179 				      ? MULT_EXPR : FLOOR_DIV_EXPR,
4180 				      ctype, convert (ctype, op0), t1),
4181 			       c, code, wide_type);
4182       break;
4183 
4184     case PLUS_EXPR:  case MINUS_EXPR:
4185       /* See if we can eliminate the operation on both sides.  If we can, we
4186 	 can return a new PLUS or MINUS.  If we can't, the only remaining
4187 	 cases where we can do anything are if the second operand is a
4188 	 constant.  */
4189       t1 = extract_muldiv (op0, c, code, wide_type);
4190       t2 = extract_muldiv (op1, c, code, wide_type);
4191       if (t1 != 0 && t2 != 0
4192 	  && (code == MULT_EXPR
4193 	      /* If not multiplication, we can only do this if both operands
4194 		 are divisible by c.  */
4195 	      || (multiple_of_p (ctype, op0, c)
4196 	          && multiple_of_p (ctype, op1, c))))
4197 	return fold (build (tcode, ctype, convert (ctype, t1),
4198 			    convert (ctype, t2)));
4199 
4200       /* If this was a subtraction, negate OP1 and set it to be an addition.
4201 	 This simplifies the logic below.  */
4202       if (tcode == MINUS_EXPR)
4203 	tcode = PLUS_EXPR, op1 = negate_expr (op1);
4204 
4205       if (TREE_CODE (op1) != INTEGER_CST)
4206 	break;
4207 
4208       /* If either OP1 or C are negative, this optimization is not safe for
4209 	 some of the division and remainder types while for others we need
4210 	 to change the code.  */
4211       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4212 	{
4213 	  if (code == CEIL_DIV_EXPR)
4214 	    code = FLOOR_DIV_EXPR;
4215 	  else if (code == FLOOR_DIV_EXPR)
4216 	    code = CEIL_DIV_EXPR;
4217 	  else if (code != MULT_EXPR
4218 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4219 	    break;
4220 	}
4221 
4222       /* If it's a multiply or a division/modulus operation of a multiple
4223          of our constant, do the operation and verify it doesn't overflow.  */
4224       if (code == MULT_EXPR
4225 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4226 	{
4227 	  op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4228 	  if (op1 == 0 || TREE_OVERFLOW (op1))
4229 	    break;
4230 	}
4231       else
4232 	break;
4233 
4234       /* If we have an unsigned type is not a sizetype, we cannot widen
4235 	 the operation since it will change the result if the original
4236 	 computation overflowed.  */
4237       if (TREE_UNSIGNED (ctype)
4238 	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4239 	  && ctype != type)
4240 	break;
4241 
4242       /* If we were able to eliminate our operation from the first side,
4243 	 apply our operation to the second side and reform the PLUS.  */
4244       if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4245 	return fold (build (tcode, ctype, convert (ctype, t1), op1));
4246 
4247       /* The last case is if we are a multiply.  In that case, we can
4248 	 apply the distributive law to commute the multiply and addition
4249 	 if the multiplication of the constants doesn't overflow.  */
4250       if (code == MULT_EXPR)
4251 	return fold (build (tcode, ctype, fold (build (code, ctype,
4252 						       convert (ctype, op0),
4253 						       convert (ctype, c))),
4254 			    op1));
4255 
4256       break;
4257 
4258     case MULT_EXPR:
4259       /* We have a special case here if we are doing something like
4260 	 (C * 8) % 4 since we know that's zero.  */
4261       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4262 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4263 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4264 	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4265 	return omit_one_operand (type, integer_zero_node, op0);
4266 
4267       /* ... fall through ...  */
4268 
4269     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
4270     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
4271       /* If we can extract our operation from the LHS, do so and return a
4272 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
4273 	 do something only if the second operand is a constant.  */
4274       if (same_p
4275 	  && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4276 	return fold (build (tcode, ctype, convert (ctype, t1),
4277 			    convert (ctype, op1)));
4278       else if (tcode == MULT_EXPR && code == MULT_EXPR
4279 	       && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4280 	return fold (build (tcode, ctype, convert (ctype, op0),
4281 			    convert (ctype, t1)));
4282       else if (TREE_CODE (op1) != INTEGER_CST)
4283 	return 0;
4284 
4285       /* If these are the same operation types, we can associate them
4286 	 assuming no overflow.  */
4287       if (tcode == code
4288 	  && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4289 				     convert (ctype, c), 0))
4290 	  && ! TREE_OVERFLOW (t1))
4291 	return fold (build (tcode, ctype, convert (ctype, op0), t1));
4292 
4293       /* If these operations "cancel" each other, we have the main
4294 	 optimizations of this pass, which occur when either constant is a
4295 	 multiple of the other, in which case we replace this with either an
4296 	 operation or CODE or TCODE.
4297 
4298 	 If we have an unsigned type that is not a sizetype, we cannot do
4299 	 this since it will change the result if the original computation
4300 	 overflowed.  */
4301       if ((! TREE_UNSIGNED (ctype)
4302 	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4303 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4304 	      || (tcode == MULT_EXPR
4305 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4306 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4307 	{
4308 	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4309 	    return fold (build (tcode, ctype, convert (ctype, op0),
4310 				convert (ctype,
4311 					 const_binop (TRUNC_DIV_EXPR,
4312 						      op1, c, 0))));
4313 	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4314 	    return fold (build (code, ctype, convert (ctype, op0),
4315 				convert (ctype,
4316 					 const_binop (TRUNC_DIV_EXPR,
4317 						      c, op1, 0))));
4318 	}
4319       break;
4320 
4321     default:
4322       break;
4323     }
4324 
4325   return 0;
4326 }
4327 
4328 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4329    S, a SAVE_EXPR, return the expression actually being evaluated.   Note
4330    that we may sometimes modify the tree.  */
4331 
4332 static tree
4333 strip_compound_expr (t, s)
4334      tree t;
4335      tree s;
4336 {
4337   enum tree_code code = TREE_CODE (t);
4338 
4339   /* See if this is the COMPOUND_EXPR we want to eliminate.  */
4340   if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4341       && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4342     return TREE_OPERAND (t, 1);
4343 
4344   /* See if this is a COND_EXPR or a simple arithmetic operator.   We
4345      don't bother handling any other types.  */
4346   else if (code == COND_EXPR)
4347     {
4348       TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4349       TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4350       TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4351     }
4352   else if (TREE_CODE_CLASS (code) == '1')
4353     TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4354   else if (TREE_CODE_CLASS (code) == '<'
4355 	   || TREE_CODE_CLASS (code) == '2')
4356     {
4357       TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4358       TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4359     }
4360 
4361   return t;
4362 }
4363 
4364 /* Return a node which has the indicated constant VALUE (either 0 or
4365    1), and is of the indicated TYPE.  */
4366 
4367 static tree
4368 constant_boolean_node (value, type)
4369      int value;
4370      tree type;
4371 {
4372   if (type == integer_type_node)
4373     return value ? integer_one_node : integer_zero_node;
4374   else if (TREE_CODE (type) == BOOLEAN_TYPE)
4375     return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4376 						integer_zero_node);
4377   else
4378     {
4379       tree t = build_int_2 (value, 0);
4380 
4381       TREE_TYPE (t) = type;
4382       return t;
4383     }
4384 }
4385 
4386 /* Utility function for the following routine, to see how complex a nesting of
4387    COND_EXPRs can be.  EXPR is the expression and LIMIT is a count beyond which
4388    we don't care (to avoid spending too much time on complex expressions.).  */
4389 
4390 static int
4391 count_cond (expr, lim)
4392      tree expr;
4393      int lim;
4394 {
4395   int ctrue, cfalse;
4396 
4397   if (TREE_CODE (expr) != COND_EXPR)
4398     return 0;
4399   else if (lim <= 0)
4400     return 0;
4401 
4402   ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4403   cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4404   return MIN (lim, 1 + ctrue + cfalse);
4405 }
4406 
4407 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4408    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
4409    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4410    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
4411    COND is the first argument to CODE; otherwise (as in the example
4412    given here), it is the second argument.  TYPE is the type of the
4413    original expression.  */
4414 
4415 static tree
4416 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4417      enum tree_code code;
4418      tree type;
4419      tree cond;
4420      tree arg;
4421      int cond_first_p;
4422 {
4423   tree test, true_value, false_value;
4424   tree lhs = NULL_TREE;
4425   tree rhs = NULL_TREE;
4426   /* In the end, we'll produce a COND_EXPR.  Both arms of the
4427      conditional expression will be binary operations.  The left-hand
4428      side of the expression to be executed if the condition is true
4429      will be pointed to by TRUE_LHS.  Similarly, the right-hand side
4430      of the expression to be executed if the condition is true will be
4431      pointed to by TRUE_RHS.  FALSE_LHS and FALSE_RHS are analogous --
4432      but apply to the expression to be executed if the conditional is
4433      false.  */
4434   tree *true_lhs;
4435   tree *true_rhs;
4436   tree *false_lhs;
4437   tree *false_rhs;
4438   /* These are the codes to use for the left-hand side and right-hand
4439      side of the COND_EXPR.  Normally, they are the same as CODE.  */
4440   enum tree_code lhs_code = code;
4441   enum tree_code rhs_code = code;
4442   /* And these are the types of the expressions.  */
4443   tree lhs_type = type;
4444   tree rhs_type = type;
4445   int save = 0;
4446 
4447   if (cond_first_p)
4448     {
4449       true_rhs = false_rhs = &arg;
4450       true_lhs = &true_value;
4451       false_lhs = &false_value;
4452     }
4453   else
4454     {
4455       true_lhs = false_lhs = &arg;
4456       true_rhs = &true_value;
4457       false_rhs = &false_value;
4458     }
4459 
4460   if (TREE_CODE (cond) == COND_EXPR)
4461     {
4462       test = TREE_OPERAND (cond, 0);
4463       true_value = TREE_OPERAND (cond, 1);
4464       false_value = TREE_OPERAND (cond, 2);
4465       /* If this operand throws an expression, then it does not make
4466 	 sense to try to perform a logical or arithmetic operation
4467 	 involving it.  Instead of building `a + throw 3' for example,
4468 	 we simply build `a, throw 3'.  */
4469       if (VOID_TYPE_P (TREE_TYPE (true_value)))
4470 	{
4471 	  if (! cond_first_p)
4472 	    {
4473 	      lhs_code = COMPOUND_EXPR;
4474 	      lhs_type = void_type_node;
4475 	    }
4476 	  else
4477 	    lhs = true_value;
4478 	}
4479       if (VOID_TYPE_P (TREE_TYPE (false_value)))
4480 	{
4481 	  if (! cond_first_p)
4482 	    {
4483 	      rhs_code = COMPOUND_EXPR;
4484 	      rhs_type = void_type_node;
4485 	    }
4486 	  else
4487 	    rhs = false_value;
4488 	}
4489     }
4490   else
4491     {
4492       tree testtype = TREE_TYPE (cond);
4493       test = cond;
4494       true_value = convert (testtype, integer_one_node);
4495       false_value = convert (testtype, integer_zero_node);
4496     }
4497 
4498   /* If ARG is complex we want to make sure we only evaluate
4499      it once.  Though this is only required if it is volatile, it
4500      might be more efficient even if it is not.  However, if we
4501      succeed in folding one part to a constant, we do not need
4502      to make this SAVE_EXPR.  Since we do this optimization
4503      primarily to see if we do end up with constant and this
4504      SAVE_EXPR interferes with later optimizations, suppressing
4505      it when we can is important.
4506 
4507      If we are not in a function, we can't make a SAVE_EXPR, so don't
4508      try to do so.  Don't try to see if the result is a constant
4509      if an arm is a COND_EXPR since we get exponential behavior
4510      in that case.  */
4511 
4512   if (TREE_CODE (arg) == SAVE_EXPR)
4513     save = 1;
4514   else if (lhs == 0 && rhs == 0
4515 	   && !TREE_CONSTANT (arg)
4516 	   && (*lang_hooks.decls.global_bindings_p) () == 0
4517 	   && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4518 	       || TREE_SIDE_EFFECTS (arg)))
4519     {
4520       if (TREE_CODE (true_value) != COND_EXPR)
4521 	lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4522 
4523       if (TREE_CODE (false_value) != COND_EXPR)
4524 	rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4525 
4526       if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4527 	  && (rhs == 0 || !TREE_CONSTANT (rhs)))
4528 	{
4529 	  arg = save_expr (arg);
4530 	  lhs = rhs = 0;
4531 	  save = 1;
4532 	}
4533     }
4534 
4535   if (lhs == 0)
4536     lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4537   if (rhs == 0)
4538     rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4539 
4540   test = fold (build (COND_EXPR, type, test, lhs, rhs));
4541 
4542   if (save)
4543     return build (COMPOUND_EXPR, type,
4544 		  convert (void_type_node, arg),
4545 		  strip_compound_expr (test, arg));
4546   else
4547     return convert (type, test);
4548 }
4549 
4550 
4551 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4552 
4553    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4554    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
4555    ADDEND is the same as X.
4556 
4557    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4558    and finite.  The problematic cases are when X is zero, and its mode
4559    has signed zeros.  In the case of rounding towards -infinity,
4560    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
4561    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
4562 
4563 static bool
4564 fold_real_zero_addition_p (type, addend, negate)
4565      tree type, addend;
4566      int negate;
4567 {
4568   if (!real_zerop (addend))
4569     return false;
4570 
4571   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
4572   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4573     return true;
4574 
4575   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
4576   if (TREE_CODE (addend) == REAL_CST
4577       && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4578     negate = !negate;
4579 
4580   /* The mode has signed zeros, and we have to honor their sign.
4581      In this situation, there is only one case we can return true for.
4582      X - 0 is the same as X unless rounding towards -infinity is
4583      supported.  */
4584   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4585 }
4586 
4587 
4588 /* Perform constant folding and related simplification of EXPR.
4589    The related simplifications include x*1 => x, x*0 => 0, etc.,
4590    and application of the associative law.
4591    NOP_EXPR conversions may be removed freely (as long as we
4592    are careful not to change the C type of the overall expression)
4593    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4594    but we can constant-fold them if they have constant operands.  */
4595 
4596 tree
4597 fold (expr)
4598      tree expr;
4599 {
4600   tree t = expr;
4601   tree t1 = NULL_TREE;
4602   tree tem;
4603   tree type = TREE_TYPE (expr);
4604   tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4605   enum tree_code code = TREE_CODE (t);
4606   int kind = TREE_CODE_CLASS (code);
4607   int invert;
4608   /* WINS will be nonzero when the switch is done
4609      if all operands are constant.  */
4610   int wins = 1;
4611 
4612   /* Don't try to process an RTL_EXPR since its operands aren't trees.
4613      Likewise for a SAVE_EXPR that's already been evaluated.  */
4614   if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4615     return t;
4616 
4617   /* Return right away if a constant.  */
4618   if (kind == 'c')
4619     return t;
4620 
4621 #ifdef MAX_INTEGER_COMPUTATION_MODE
4622   check_max_integer_computation_mode (expr);
4623 #endif
4624 
4625   if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4626     {
4627       tree subop;
4628 
4629       /* Special case for conversion ops that can have fixed point args.  */
4630       arg0 = TREE_OPERAND (t, 0);
4631 
4632       /* Don't use STRIP_NOPS, because signedness of argument type matters.  */
4633       if (arg0 != 0)
4634 	STRIP_SIGN_NOPS (arg0);
4635 
4636       if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4637 	subop = TREE_REALPART (arg0);
4638       else
4639 	subop = arg0;
4640 
4641       if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4642 	  && TREE_CODE (subop) != REAL_CST
4643 	  )
4644 	/* Note that TREE_CONSTANT isn't enough:
4645 	   static var addresses are constant but we can't
4646 	   do arithmetic on them.  */
4647 	wins = 0;
4648     }
4649   else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4650     {
4651       int len = first_rtl_op (code);
4652       int i;
4653       for (i = 0; i < len; i++)
4654 	{
4655 	  tree op = TREE_OPERAND (t, i);
4656 	  tree subop;
4657 
4658 	  if (op == 0)
4659 	    continue;		/* Valid for CALL_EXPR, at least.  */
4660 
4661 	  if (kind == '<' || code == RSHIFT_EXPR)
4662 	    {
4663 	      /* Signedness matters here.  Perhaps we can refine this
4664 		 later.  */
4665 	      STRIP_SIGN_NOPS (op);
4666 	    }
4667 	  else
4668 	    /* Strip any conversions that don't change the mode.  */
4669 	    STRIP_NOPS (op);
4670 
4671 	  if (TREE_CODE (op) == COMPLEX_CST)
4672 	    subop = TREE_REALPART (op);
4673 	  else
4674 	    subop = op;
4675 
4676 	  if (TREE_CODE (subop) != INTEGER_CST
4677 	      && TREE_CODE (subop) != REAL_CST)
4678 	    /* Note that TREE_CONSTANT isn't enough:
4679 	       static var addresses are constant but we can't
4680 	       do arithmetic on them.  */
4681 	    wins = 0;
4682 
4683 	  if (i == 0)
4684 	    arg0 = op;
4685 	  else if (i == 1)
4686 	    arg1 = op;
4687 	}
4688     }
4689 
4690   /* If this is a commutative operation, and ARG0 is a constant, move it
4691      to ARG1 to reduce the number of tests below.  */
4692   if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4693        || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4694        || code == BIT_AND_EXPR)
4695       && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4696     {
4697       tem = arg0; arg0 = arg1; arg1 = tem;
4698 
4699       tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4700       TREE_OPERAND (t, 1) = tem;
4701     }
4702 
4703   /* Now WINS is set as described above,
4704      ARG0 is the first operand of EXPR,
4705      and ARG1 is the second operand (if it has more than one operand).
4706 
4707      First check for cases where an arithmetic operation is applied to a
4708      compound, conditional, or comparison operation.  Push the arithmetic
4709      operation inside the compound or conditional to see if any folding
4710      can then be done.  Convert comparison to conditional for this purpose.
4711      The also optimizes non-constant cases that used to be done in
4712      expand_expr.
4713 
4714      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4715      one of the operands is a comparison and the other is a comparison, a
4716      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
4717      code below would make the expression more complex.  Change it to a
4718      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
4719      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
4720 
4721   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4722        || code == EQ_EXPR || code == NE_EXPR)
4723       && ((truth_value_p (TREE_CODE (arg0))
4724 	   && (truth_value_p (TREE_CODE (arg1))
4725 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
4726 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
4727 	  || (truth_value_p (TREE_CODE (arg1))
4728 	      && (truth_value_p (TREE_CODE (arg0))
4729 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
4730 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
4731     {
4732       t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4733 		       : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4734 		       : TRUTH_XOR_EXPR,
4735 		       type, arg0, arg1));
4736 
4737       if (code == EQ_EXPR)
4738 	t = invert_truthvalue (t);
4739 
4740       return t;
4741     }
4742 
4743   if (TREE_CODE_CLASS (code) == '1')
4744     {
4745       if (TREE_CODE (arg0) == COMPOUND_EXPR)
4746 	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4747 		      fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4748       else if (TREE_CODE (arg0) == COND_EXPR)
4749 	{
4750 	  tree arg01 = TREE_OPERAND (arg0, 1);
4751 	  tree arg02 = TREE_OPERAND (arg0, 2);
4752 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
4753 	    arg01 = fold (build1 (code, type, arg01));
4754 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
4755 	    arg02 = fold (build1 (code, type, arg02));
4756 	  t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4757 			   arg01, arg02));
4758 
4759 	  /* If this was a conversion, and all we did was to move into
4760 	     inside the COND_EXPR, bring it back out.  But leave it if
4761 	     it is a conversion from integer to integer and the
4762 	     result precision is no wider than a word since such a
4763 	     conversion is cheap and may be optimized away by combine,
4764 	     while it couldn't if it were outside the COND_EXPR.  Then return
4765 	     so we don't get into an infinite recursion loop taking the
4766 	     conversion out and then back in.  */
4767 
4768 	  if ((code == NOP_EXPR || code == CONVERT_EXPR
4769 	       || code == NON_LVALUE_EXPR)
4770 	      && TREE_CODE (t) == COND_EXPR
4771 	      && TREE_CODE (TREE_OPERAND (t, 1)) == code
4772 	      && TREE_CODE (TREE_OPERAND (t, 2)) == code
4773 	      && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
4774 	      && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
4775 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4776 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4777 	      && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4778 		    && (INTEGRAL_TYPE_P
4779 			(TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4780 		    && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4781 	    t = build1 (code, type,
4782 			build (COND_EXPR,
4783 			       TREE_TYPE (TREE_OPERAND
4784 					  (TREE_OPERAND (t, 1), 0)),
4785 			       TREE_OPERAND (t, 0),
4786 			       TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4787 			       TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4788 	  return t;
4789 	}
4790       else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4791 	return fold (build (COND_EXPR, type, arg0,
4792 			    fold (build1 (code, type, integer_one_node)),
4793 			    fold (build1 (code, type, integer_zero_node))));
4794    }
4795   else if (TREE_CODE_CLASS (code) == '2'
4796 	   || TREE_CODE_CLASS (code) == '<')
4797     {
4798       if (TREE_CODE (arg1) == COMPOUND_EXPR
4799 	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
4800 	  && ! TREE_SIDE_EFFECTS (arg0))
4801 	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4802 		      fold (build (code, type,
4803 				   arg0, TREE_OPERAND (arg1, 1))));
4804       else if ((TREE_CODE (arg1) == COND_EXPR
4805 		|| (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4806 		    && TREE_CODE_CLASS (code) != '<'))
4807 	       && (TREE_CODE (arg0) != COND_EXPR
4808 		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4809 	       && (! TREE_SIDE_EFFECTS (arg0)
4810 		   || ((*lang_hooks.decls.global_bindings_p) () == 0
4811 		       && ! contains_placeholder_p (arg0))))
4812 	return
4813 	  fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4814 					       /*cond_first_p=*/0);
4815       else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4816 	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4817 		      fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4818       else if ((TREE_CODE (arg0) == COND_EXPR
4819 		|| (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4820 		    && TREE_CODE_CLASS (code) != '<'))
4821 	       && (TREE_CODE (arg1) != COND_EXPR
4822 		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4823 	       && (! TREE_SIDE_EFFECTS (arg1)
4824 		   || ((*lang_hooks.decls.global_bindings_p) () == 0
4825 		       && ! contains_placeholder_p (arg1))))
4826 	return
4827 	  fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4828 					       /*cond_first_p=*/1);
4829     }
4830   else if (TREE_CODE_CLASS (code) == '<'
4831 	   && TREE_CODE (arg0) == COMPOUND_EXPR)
4832     return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4833 		  fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4834   else if (TREE_CODE_CLASS (code) == '<'
4835 	   && TREE_CODE (arg1) == COMPOUND_EXPR)
4836     return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4837 		  fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4838 
4839   switch (code)
4840     {
4841     case INTEGER_CST:
4842     case REAL_CST:
4843     case VECTOR_CST:
4844     case STRING_CST:
4845     case COMPLEX_CST:
4846     case CONSTRUCTOR:
4847       return t;
4848 
4849     case CONST_DECL:
4850       return fold (DECL_INITIAL (t));
4851 
4852     case NOP_EXPR:
4853     case FLOAT_EXPR:
4854     case CONVERT_EXPR:
4855     case FIX_TRUNC_EXPR:
4856       /* Other kinds of FIX are not handled properly by fold_convert.  */
4857 
4858       if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4859 	return TREE_OPERAND (t, 0);
4860 
4861       /* Handle cases of two conversions in a row.  */
4862       if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4863 	  || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4864 	{
4865 	  tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4866 	  tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4867 	  tree final_type = TREE_TYPE (t);
4868 	  int inside_int = INTEGRAL_TYPE_P (inside_type);
4869 	  int inside_ptr = POINTER_TYPE_P (inside_type);
4870 	  int inside_float = FLOAT_TYPE_P (inside_type);
4871 	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
4872 	  int inside_unsignedp = TREE_UNSIGNED (inside_type);
4873 	  int inter_int = INTEGRAL_TYPE_P (inter_type);
4874 	  int inter_ptr = POINTER_TYPE_P (inter_type);
4875 	  int inter_float = FLOAT_TYPE_P (inter_type);
4876 	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
4877 	  int inter_unsignedp = TREE_UNSIGNED (inter_type);
4878 	  int final_int = INTEGRAL_TYPE_P (final_type);
4879 	  int final_ptr = POINTER_TYPE_P (final_type);
4880 	  int final_float = FLOAT_TYPE_P (final_type);
4881 	  unsigned int final_prec = TYPE_PRECISION (final_type);
4882 	  int final_unsignedp = TREE_UNSIGNED (final_type);
4883 
4884 	  /* In addition to the cases of two conversions in a row
4885 	     handled below, if we are converting something to its own
4886 	     type via an object of identical or wider precision, neither
4887 	     conversion is needed.  */
4888 	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4889 	      && ((inter_int && final_int) || (inter_float && final_float))
4890 	      && inter_prec >= final_prec)
4891 	    return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4892 
4893 	  /* Likewise, if the intermediate and final types are either both
4894 	     float or both integer, we don't need the middle conversion if
4895 	     it is wider than the final type and doesn't change the signedness
4896 	     (for integers).  Avoid this if the final type is a pointer
4897 	     since then we sometimes need the inner conversion.  Likewise if
4898 	     the outer has a precision not equal to the size of its mode.  */
4899 	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4900 	       || (inter_float && inside_float))
4901 	      && inter_prec >= inside_prec
4902 	      && (inter_float || inter_unsignedp == inside_unsignedp)
4903 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4904 		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4905 	      && ! final_ptr)
4906 	    return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4907 
4908 	  /* If we have a sign-extension of a zero-extended value, we can
4909 	     replace that by a single zero-extension.  */
4910 	  if (inside_int && inter_int && final_int
4911 	      && inside_prec < inter_prec && inter_prec < final_prec
4912 	      && inside_unsignedp && !inter_unsignedp)
4913 	    return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4914 
4915 	  /* Two conversions in a row are not needed unless:
4916 	     - some conversion is floating-point (overstrict for now), or
4917 	     - the intermediate type is narrower than both initial and
4918 	       final, or
4919 	     - the intermediate type and innermost type differ in signedness,
4920 	       and the outermost type is wider than the intermediate, or
4921 	     - the initial type is a pointer type and the precisions of the
4922 	       intermediate and final types differ, or
4923 	     - the final type is a pointer type and the precisions of the
4924 	       initial and intermediate types differ.  */
4925 	  if (! inside_float && ! inter_float && ! final_float
4926 	      && (inter_prec > inside_prec || inter_prec > final_prec)
4927 	      && ! (inside_int && inter_int
4928 		    && inter_unsignedp != inside_unsignedp
4929 		    && inter_prec < final_prec)
4930 	      && ((inter_unsignedp && inter_prec > inside_prec)
4931 		  == (final_unsignedp && final_prec > inter_prec))
4932 	      && ! (inside_ptr && inter_prec != final_prec)
4933 	      && ! (final_ptr && inside_prec != inter_prec)
4934 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4935 		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4936 	      && ! final_ptr)
4937 	    return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4938 	}
4939 
4940       if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4941 	  && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4942 	  /* Detect assigning a bitfield.  */
4943 	  && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4944 	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4945 	{
4946 	  /* Don't leave an assignment inside a conversion
4947 	     unless assigning a bitfield.  */
4948 	  tree prev = TREE_OPERAND (t, 0);
4949 	  TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4950 	  /* First do the assignment, then return converted constant.  */
4951 	  t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4952 	  TREE_USED (t) = 1;
4953 	  return t;
4954 	}
4955 
4956       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4957 	 constants (if x has signed type, the sign bit cannot be set
4958 	 in c).  This folds extension into the BIT_AND_EXPR.  */
4959       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4960 	  && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4961 	  && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4962 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4963 	{
4964 	  tree and = TREE_OPERAND (t, 0);
4965 	  tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4966 	  int change = 0;
4967 
4968 	  if (TREE_UNSIGNED (TREE_TYPE (and))
4969 	      || (TYPE_PRECISION (TREE_TYPE (t))
4970 		  <= TYPE_PRECISION (TREE_TYPE (and))))
4971 	    change = 1;
4972 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
4973 		   <= HOST_BITS_PER_WIDE_INT
4974 		   && host_integerp (and1, 1))
4975 	    {
4976 	      unsigned HOST_WIDE_INT cst;
4977 
4978 	      cst = tree_low_cst (and1, 1);
4979 	      cst &= (HOST_WIDE_INT) -1
4980 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4981 	      change = (cst == 0);
4982 #ifdef LOAD_EXTEND_OP
4983 	      if (change
4984 		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4985 		      == ZERO_EXTEND))
4986 		{
4987 		  tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4988 		  and0 = convert (uns, and0);
4989 	  	  and1 = convert (uns, and1);
4990 		}
4991 #endif
4992 	    }
4993 	  if (change)
4994 	    return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4995 				convert (TREE_TYPE (t), and0),
4996 				convert (TREE_TYPE (t), and1)));
4997 	}
4998 
4999       if (!wins)
5000 	{
5001 	  TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5002 	  return t;
5003 	}
5004       return fold_convert (t, arg0);
5005 
5006     case VIEW_CONVERT_EXPR:
5007       if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5008 	return build1 (VIEW_CONVERT_EXPR, type,
5009 		       TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5010       return t;
5011 
5012     case COMPONENT_REF:
5013       if (TREE_CODE (arg0) == CONSTRUCTOR)
5014 	{
5015 	  tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5016 	  if (m)
5017 	    t = TREE_VALUE (m);
5018 	}
5019       return t;
5020 
5021     case RANGE_EXPR:
5022       TREE_CONSTANT (t) = wins;
5023       return t;
5024 
5025     case NEGATE_EXPR:
5026       if (wins)
5027 	{
5028 	  if (TREE_CODE (arg0) == INTEGER_CST)
5029 	    {
5030 	      unsigned HOST_WIDE_INT low;
5031 	      HOST_WIDE_INT high;
5032 	      int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5033 					 TREE_INT_CST_HIGH (arg0),
5034 					 &low, &high);
5035 	      t = build_int_2 (low, high);
5036 	      TREE_TYPE (t) = type;
5037 	      TREE_OVERFLOW (t)
5038 		= (TREE_OVERFLOW (arg0)
5039 		   | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5040 	      TREE_CONSTANT_OVERFLOW (t)
5041 		= TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5042 	    }
5043 	  else if (TREE_CODE (arg0) == REAL_CST)
5044 	    t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5045 	}
5046       else if (TREE_CODE (arg0) == NEGATE_EXPR)
5047 	return TREE_OPERAND (arg0, 0);
5048 
5049       /* Convert - (a - b) to (b - a) for non-floating-point.  */
5050       else if (TREE_CODE (arg0) == MINUS_EXPR
5051 	       && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5052 	return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5053 		      TREE_OPERAND (arg0, 0));
5054 
5055       return t;
5056 
5057     case ABS_EXPR:
5058       if (wins)
5059 	{
5060 	  if (TREE_CODE (arg0) == INTEGER_CST)
5061 	    {
5062 	      /* If the value is unsigned, then the absolute value is
5063 		 the same as the ordinary value.  */
5064 	      if (TREE_UNSIGNED (type))
5065 		return arg0;
5066 	      /* Similarly, if the value is non-negative.  */
5067 	      else if (INT_CST_LT (integer_minus_one_node, arg0))
5068 		return arg0;
5069 	      /* If the value is negative, then the absolute value is
5070 		 its negation.  */
5071 	      else
5072 		{
5073 		  unsigned HOST_WIDE_INT low;
5074 		  HOST_WIDE_INT high;
5075 		  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5076 					     TREE_INT_CST_HIGH (arg0),
5077 					     &low, &high);
5078 		  t = build_int_2 (low, high);
5079 		  TREE_TYPE (t) = type;
5080 		  TREE_OVERFLOW (t)
5081 		    = (TREE_OVERFLOW (arg0)
5082 		       | force_fit_type (t, overflow));
5083 		  TREE_CONSTANT_OVERFLOW (t)
5084 		    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5085 		}
5086 	    }
5087 	  else if (TREE_CODE (arg0) == REAL_CST)
5088 	    {
5089 	      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5090 		t = build_real (type,
5091 				REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5092 	    }
5093 	}
5094       else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5095 	return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5096       return t;
5097 
5098     case CONJ_EXPR:
5099       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5100 	return convert (type, arg0);
5101       else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5102 	return build (COMPLEX_EXPR, type,
5103 		      TREE_OPERAND (arg0, 0),
5104 		      negate_expr (TREE_OPERAND (arg0, 1)));
5105       else if (TREE_CODE (arg0) == COMPLEX_CST)
5106 	return build_complex (type, TREE_REALPART (arg0),
5107 			      negate_expr (TREE_IMAGPART (arg0)));
5108       else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5109 	return fold (build (TREE_CODE (arg0), type,
5110 			    fold (build1 (CONJ_EXPR, type,
5111 					  TREE_OPERAND (arg0, 0))),
5112 			    fold (build1 (CONJ_EXPR,
5113 					  type, TREE_OPERAND (arg0, 1)))));
5114       else if (TREE_CODE (arg0) == CONJ_EXPR)
5115 	return TREE_OPERAND (arg0, 0);
5116       return t;
5117 
5118     case BIT_NOT_EXPR:
5119       if (wins)
5120 	{
5121 	  t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5122 			   ~ TREE_INT_CST_HIGH (arg0));
5123 	  TREE_TYPE (t) = type;
5124 	  force_fit_type (t, 0);
5125 	  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5126 	  TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5127 	}
5128       else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5129 	return TREE_OPERAND (arg0, 0);
5130       return t;
5131 
5132     case PLUS_EXPR:
5133       /* A + (-B) -> A - B */
5134       if (TREE_CODE (arg1) == NEGATE_EXPR)
5135 	return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5136       /* (-A) + B -> B - A */
5137       if (TREE_CODE (arg0) == NEGATE_EXPR)
5138 	return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5139       else if (! FLOAT_TYPE_P (type))
5140 	{
5141 	  if (integer_zerop (arg1))
5142 	    return non_lvalue (convert (type, arg0));
5143 
5144 	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5145 	     with a constant, and the two constants have no bits in common,
5146 	     we should treat this as a BIT_IOR_EXPR since this may produce more
5147 	     simplifications.  */
5148 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
5149 	      && TREE_CODE (arg1) == BIT_AND_EXPR
5150 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5151 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5152 	      && integer_zerop (const_binop (BIT_AND_EXPR,
5153 					     TREE_OPERAND (arg0, 1),
5154 					     TREE_OPERAND (arg1, 1), 0)))
5155 	    {
5156 	      code = BIT_IOR_EXPR;
5157 	      goto bit_ior;
5158 	    }
5159 
5160 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5161 	     (plus (plus (mult) (mult)) (foo)) so that we can
5162 	     take advantage of the factoring cases below.  */
5163 	  if ((TREE_CODE (arg0) == PLUS_EXPR
5164 	       && TREE_CODE (arg1) == MULT_EXPR)
5165 	      || (TREE_CODE (arg1) == PLUS_EXPR
5166 		  && TREE_CODE (arg0) == MULT_EXPR))
5167 	    {
5168 	      tree parg0, parg1, parg, marg;
5169 
5170 	      if (TREE_CODE (arg0) == PLUS_EXPR)
5171 		parg = arg0, marg = arg1;
5172 	      else
5173 		parg = arg1, marg = arg0;
5174 	      parg0 = TREE_OPERAND (parg, 0);
5175 	      parg1 = TREE_OPERAND (parg, 1);
5176 	      STRIP_NOPS (parg0);
5177 	      STRIP_NOPS (parg1);
5178 
5179 	      if (TREE_CODE (parg0) == MULT_EXPR
5180 		  && TREE_CODE (parg1) != MULT_EXPR)
5181 		return fold (build (PLUS_EXPR, type,
5182 				    fold (build (PLUS_EXPR, type, parg0, marg)),
5183 				    parg1));
5184 	      if (TREE_CODE (parg0) != MULT_EXPR
5185 		  && TREE_CODE (parg1) == MULT_EXPR)
5186 		return fold (build (PLUS_EXPR, type,
5187 				    fold (build (PLUS_EXPR, type, parg1, marg)),
5188 				    parg0));
5189 	    }
5190 
5191 	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5192 	    {
5193 	      tree arg00, arg01, arg10, arg11;
5194 	      tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5195 
5196 	      /* (A * C) + (B * C) -> (A+B) * C.
5197 		 We are most concerned about the case where C is a constant,
5198 		 but other combinations show up during loop reduction.  Since
5199 		 it is not difficult, try all four possibilities.  */
5200 
5201 	      arg00 = TREE_OPERAND (arg0, 0);
5202 	      arg01 = TREE_OPERAND (arg0, 1);
5203 	      arg10 = TREE_OPERAND (arg1, 0);
5204 	      arg11 = TREE_OPERAND (arg1, 1);
5205 	      same = NULL_TREE;
5206 
5207 	      if (operand_equal_p (arg01, arg11, 0))
5208 		same = arg01, alt0 = arg00, alt1 = arg10;
5209 	      else if (operand_equal_p (arg00, arg10, 0))
5210 		same = arg00, alt0 = arg01, alt1 = arg11;
5211 	      else if (operand_equal_p (arg00, arg11, 0))
5212 		same = arg00, alt0 = arg01, alt1 = arg10;
5213 	      else if (operand_equal_p (arg01, arg10, 0))
5214 		same = arg01, alt0 = arg00, alt1 = arg11;
5215 
5216 	      /* No identical multiplicands; see if we can find a common
5217 		 power-of-two factor in non-power-of-two multiplies.  This
5218 		 can help in multi-dimensional array access.  */
5219 	      else if (TREE_CODE (arg01) == INTEGER_CST
5220 		       && TREE_CODE (arg11) == INTEGER_CST
5221 		       && TREE_INT_CST_HIGH (arg01) == 0
5222 		       && TREE_INT_CST_HIGH (arg11) == 0)
5223 		{
5224 		  HOST_WIDE_INT int01, int11, tmp;
5225 		  int01 = TREE_INT_CST_LOW (arg01);
5226 		  int11 = TREE_INT_CST_LOW (arg11);
5227 
5228 		  /* Move min of absolute values to int11.  */
5229 		  if ((int01 >= 0 ? int01 : -int01)
5230 		      < (int11 >= 0 ? int11 : -int11))
5231 		    {
5232 		      tmp = int01, int01 = int11, int11 = tmp;
5233 		      alt0 = arg00, arg00 = arg10, arg10 = alt0;
5234 		      alt0 = arg01, arg01 = arg11, arg11 = alt0;
5235 		    }
5236 
5237 		  if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5238 		    {
5239 		      alt0 = fold (build (MULT_EXPR, type, arg00,
5240 					  build_int_2 (int01 / int11, 0)));
5241 		      alt1 = arg10;
5242 		      same = arg11;
5243 		    }
5244 		}
5245 
5246 	      if (same)
5247 		return fold (build (MULT_EXPR, type,
5248 				    fold (build (PLUS_EXPR, type, alt0, alt1)),
5249 				    same));
5250 	    }
5251 	}
5252 
5253       /* See if ARG1 is zero and X + ARG1 reduces to X.  */
5254       else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5255 	return non_lvalue (convert (type, arg0));
5256 
5257       /* Likewise if the operands are reversed.  */
5258       else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5259 	return non_lvalue (convert (type, arg1));
5260 
5261      bit_rotate:
5262       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5263 	 is a rotate of A by C1 bits.  */
5264       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5265 	 is a rotate of A by B bits.  */
5266       {
5267 	enum tree_code code0, code1;
5268 	code0 = TREE_CODE (arg0);
5269 	code1 = TREE_CODE (arg1);
5270 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5271 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5272 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
5273 			        TREE_OPERAND (arg1, 0), 0)
5274 	    && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5275 	  {
5276 	    tree tree01, tree11;
5277 	    enum tree_code code01, code11;
5278 
5279 	    tree01 = TREE_OPERAND (arg0, 1);
5280 	    tree11 = TREE_OPERAND (arg1, 1);
5281 	    STRIP_NOPS (tree01);
5282 	    STRIP_NOPS (tree11);
5283 	    code01 = TREE_CODE (tree01);
5284 	    code11 = TREE_CODE (tree11);
5285 	    if (code01 == INTEGER_CST
5286 		&& code11 == INTEGER_CST
5287 		&& TREE_INT_CST_HIGH (tree01) == 0
5288 		&& TREE_INT_CST_HIGH (tree11) == 0
5289 		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5290 		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5291 	      return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5292 			    code0 == LSHIFT_EXPR ? tree01 : tree11);
5293 	    else if (code11 == MINUS_EXPR)
5294 	      {
5295 		tree tree110, tree111;
5296 		tree110 = TREE_OPERAND (tree11, 0);
5297 		tree111 = TREE_OPERAND (tree11, 1);
5298 		STRIP_NOPS (tree110);
5299 		STRIP_NOPS (tree111);
5300 		if (TREE_CODE (tree110) == INTEGER_CST
5301 		    && 0 == compare_tree_int (tree110,
5302 					      TYPE_PRECISION
5303 					      (TREE_TYPE (TREE_OPERAND
5304 							  (arg0, 0))))
5305 		    && operand_equal_p (tree01, tree111, 0))
5306 		  return build ((code0 == LSHIFT_EXPR
5307 				 ? LROTATE_EXPR
5308 				 : RROTATE_EXPR),
5309 				type, TREE_OPERAND (arg0, 0), tree01);
5310 	      }
5311 	    else if (code01 == MINUS_EXPR)
5312 	      {
5313 		tree tree010, tree011;
5314 		tree010 = TREE_OPERAND (tree01, 0);
5315 		tree011 = TREE_OPERAND (tree01, 1);
5316 		STRIP_NOPS (tree010);
5317 		STRIP_NOPS (tree011);
5318 		if (TREE_CODE (tree010) == INTEGER_CST
5319 		    && 0 == compare_tree_int (tree010,
5320 					      TYPE_PRECISION
5321 					      (TREE_TYPE (TREE_OPERAND
5322 							  (arg0, 0))))
5323 		    && operand_equal_p (tree11, tree011, 0))
5324 		  return build ((code0 != LSHIFT_EXPR
5325 				 ? LROTATE_EXPR
5326 				 : RROTATE_EXPR),
5327 				type, TREE_OPERAND (arg0, 0), tree11);
5328 	      }
5329 	  }
5330       }
5331 
5332     associate:
5333       /* In most languages, can't associate operations on floats through
5334 	 parentheses.  Rather than remember where the parentheses were, we
5335 	 don't associate floats at all.  It shouldn't matter much.  However,
5336 	 associating multiplications is only very slightly inaccurate, so do
5337 	 that if -funsafe-math-optimizations is specified.  */
5338 
5339       if (! wins
5340 	  && (! FLOAT_TYPE_P (type)
5341 	      || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5342 	{
5343 	  tree var0, con0, lit0, minus_lit0;
5344 	  tree var1, con1, lit1, minus_lit1;
5345 
5346 	  /* Split both trees into variables, constants, and literals.  Then
5347 	     associate each group together, the constants with literals,
5348 	     then the result with variables.  This increases the chances of
5349 	     literals being recombined later and of generating relocatable
5350 	     expressions for the sum of a constant and literal.  */
5351 	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5352 	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5353 			     code == MINUS_EXPR);
5354 
5355 	  /* Only do something if we found more than two objects.  Otherwise,
5356 	     nothing has changed and we risk infinite recursion.  */
5357 	  if (2 < ((var0 != 0) + (var1 != 0)
5358 		   + (con0 != 0) + (con1 != 0)
5359 		   + (lit0 != 0) + (lit1 != 0)
5360 		   + (minus_lit0 != 0) + (minus_lit1 != 0)))
5361 	    {
5362 	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
5363 	      if (code == MINUS_EXPR)
5364 		code = PLUS_EXPR;
5365 
5366 	      var0 = associate_trees (var0, var1, code, type);
5367 	      con0 = associate_trees (con0, con1, code, type);
5368 	      lit0 = associate_trees (lit0, lit1, code, type);
5369 	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5370 
5371 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
5372 		 greater than the positive part.  Otherwise, the multiplicative
5373 		 folding code (i.e extract_muldiv) may be fooled in case
5374 		 unsigned constants are substracted, like in the following
5375 		 example: ((X*2 + 4) - 8U)/2.  */
5376 	      if (minus_lit0 && lit0)
5377 		{
5378 		  if (tree_int_cst_lt (lit0, minus_lit0))
5379 		    {
5380 		      minus_lit0 = associate_trees (minus_lit0, lit0,
5381 						    MINUS_EXPR, type);
5382 		      lit0 = 0;
5383 		    }
5384 		  else
5385 		    {
5386 		      lit0 = associate_trees (lit0, minus_lit0,
5387 					      MINUS_EXPR, type);
5388 		      minus_lit0 = 0;
5389 		    }
5390 		}
5391 	      if (minus_lit0)
5392 		{
5393 		  if (con0 == 0)
5394 		    return convert (type, associate_trees (var0, minus_lit0,
5395 							   MINUS_EXPR, type));
5396 		  else
5397 		    {
5398 		      con0 = associate_trees (con0, minus_lit0,
5399 					      MINUS_EXPR, type);
5400 		      return convert (type, associate_trees (var0, con0,
5401 							     PLUS_EXPR, type));
5402 		    }
5403 		}
5404 
5405 	      con0 = associate_trees (con0, lit0, code, type);
5406 	      return convert (type, associate_trees (var0, con0, code, type));
5407 	    }
5408 	}
5409 
5410     binary:
5411       if (wins)
5412 	t1 = const_binop (code, arg0, arg1, 0);
5413       if (t1 != NULL_TREE)
5414 	{
5415 	  /* The return value should always have
5416 	     the same type as the original expression.  */
5417 	  if (TREE_TYPE (t1) != TREE_TYPE (t))
5418 	    t1 = convert (TREE_TYPE (t), t1);
5419 
5420 	  return t1;
5421 	}
5422       return t;
5423 
5424     case MINUS_EXPR:
5425       /* A - (-B) -> A + B */
5426       if (TREE_CODE (arg1) == NEGATE_EXPR)
5427 	return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5428       /* (-A) - CST -> (-CST) - A   for floating point (what about ints ?)  */
5429       if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5430 	return
5431 	  fold (build (MINUS_EXPR, type,
5432 		       build_real (TREE_TYPE (arg1),
5433 				   REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5434 		       TREE_OPERAND (arg0, 0)));
5435 
5436       if (! FLOAT_TYPE_P (type))
5437 	{
5438 	  if (! wins && integer_zerop (arg0))
5439 	    return negate_expr (convert (type, arg1));
5440 	  if (integer_zerop (arg1))
5441 	    return non_lvalue (convert (type, arg0));
5442 
5443 	  /* (A * C) - (B * C) -> (A-B) * C.  Since we are most concerned
5444 	     about the case where C is a constant, just try one of the
5445 	     four possibilities.  */
5446 
5447 	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5448 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
5449 				  TREE_OPERAND (arg1, 1), 0))
5450 	    return fold (build (MULT_EXPR, type,
5451 				fold (build (MINUS_EXPR, type,
5452 					     TREE_OPERAND (arg0, 0),
5453 					     TREE_OPERAND (arg1, 0))),
5454 				TREE_OPERAND (arg0, 1)));
5455 	}
5456 
5457       /* See if ARG1 is zero and X - ARG1 reduces to X.  */
5458       else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5459 	return non_lvalue (convert (type, arg0));
5460 
5461       /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
5462 	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5463 	 (-ARG1 + ARG0) reduces to -ARG1.  */
5464       else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5465 	return negate_expr (convert (type, arg1));
5466 
5467       /* Fold &x - &x.  This can happen from &x.foo - &x.
5468 	 This is unsafe for certain floats even in non-IEEE formats.
5469 	 In IEEE, it is unsafe because it does wrong for NaNs.
5470 	 Also note that operand_equal_p is always false if an operand
5471 	 is volatile.  */
5472 
5473       if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5474 	  && operand_equal_p (arg0, arg1, 0))
5475 	return convert (type, integer_zero_node);
5476 
5477       goto associate;
5478 
5479     case MULT_EXPR:
5480       /* (-A) * (-B) -> A * B  */
5481       if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5482 	return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5483 			    TREE_OPERAND (arg1, 0)));
5484 
5485       if (! FLOAT_TYPE_P (type))
5486 	{
5487 	  if (integer_zerop (arg1))
5488 	    return omit_one_operand (type, arg1, arg0);
5489 	  if (integer_onep (arg1))
5490 	    return non_lvalue (convert (type, arg0));
5491 
5492 	  /* (a * (1 << b)) is (a << b)  */
5493 	  if (TREE_CODE (arg1) == LSHIFT_EXPR
5494 	      && integer_onep (TREE_OPERAND (arg1, 0)))
5495 	    return fold (build (LSHIFT_EXPR, type, arg0,
5496 				TREE_OPERAND (arg1, 1)));
5497 	  if (TREE_CODE (arg0) == LSHIFT_EXPR
5498 	      && integer_onep (TREE_OPERAND (arg0, 0)))
5499 	    return fold (build (LSHIFT_EXPR, type, arg1,
5500 				TREE_OPERAND (arg0, 1)));
5501 
5502 	  if (TREE_CODE (arg1) == INTEGER_CST
5503 	      && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5504 					     code, NULL_TREE)))
5505 	    return convert (type, tem);
5506 
5507 	}
5508       else
5509 	{
5510 	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
5511 	     when x is NaN, since x * 0 is also NaN.  Nor are they the
5512 	     same in modes with signed zeros, since multiplying a
5513 	     negative value by 0 gives -0, not +0.  */
5514 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5515 	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5516 	      && real_zerop (arg1))
5517 	    return omit_one_operand (type, arg1, arg0);
5518 	  /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
5519 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5520 	      && real_onep (arg1))
5521 	    return non_lvalue (convert (type, arg0));
5522 
5523 	  /* Transform x * -1.0 into -x.  */
5524 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5525 	      && real_minus_onep (arg1))
5526 	    return fold (build1 (NEGATE_EXPR, type, arg0));
5527 
5528 	  /* x*2 is x+x */
5529 	  if (! wins && real_twop (arg1)
5530 	      && (*lang_hooks.decls.global_bindings_p) () == 0
5531 	      && ! contains_placeholder_p (arg0))
5532 	    {
5533 	      tree arg = save_expr (arg0);
5534 	      return build (PLUS_EXPR, type, arg, arg);
5535 	    }
5536 	}
5537       goto associate;
5538 
5539     case BIT_IOR_EXPR:
5540     bit_ior:
5541       if (integer_all_onesp (arg1))
5542 	return omit_one_operand (type, arg1, arg0);
5543       if (integer_zerop (arg1))
5544 	return non_lvalue (convert (type, arg0));
5545       t1 = distribute_bit_expr (code, type, arg0, arg1);
5546       if (t1 != NULL_TREE)
5547 	return t1;
5548 
5549       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5550 
5551 	 This results in more efficient code for machines without a NAND
5552 	 instruction.  Combine will canonicalize to the first form
5553 	 which will allow use of NAND instructions provided by the
5554 	 backend if they exist.  */
5555       if (TREE_CODE (arg0) == BIT_NOT_EXPR
5556 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
5557 	{
5558 	  return fold (build1 (BIT_NOT_EXPR, type,
5559 			       build (BIT_AND_EXPR, type,
5560 				      TREE_OPERAND (arg0, 0),
5561 				      TREE_OPERAND (arg1, 0))));
5562 	}
5563 
5564       /* See if this can be simplified into a rotate first.  If that
5565 	 is unsuccessful continue in the association code.  */
5566       goto bit_rotate;
5567 
5568     case BIT_XOR_EXPR:
5569       if (integer_zerop (arg1))
5570 	return non_lvalue (convert (type, arg0));
5571       if (integer_all_onesp (arg1))
5572 	return fold (build1 (BIT_NOT_EXPR, type, arg0));
5573 
5574       /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5575          with a constant, and the two constants have no bits in common,
5576 	 we should treat this as a BIT_IOR_EXPR since this may produce more
5577 	 simplifications.  */
5578       if (TREE_CODE (arg0) == BIT_AND_EXPR
5579 	  && TREE_CODE (arg1) == BIT_AND_EXPR
5580 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5581 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5582 	  && integer_zerop (const_binop (BIT_AND_EXPR,
5583 					 TREE_OPERAND (arg0, 1),
5584 					 TREE_OPERAND (arg1, 1), 0)))
5585 	{
5586 	  code = BIT_IOR_EXPR;
5587 	  goto bit_ior;
5588 	}
5589 
5590       /* See if this can be simplified into a rotate first.  If that
5591 	 is unsuccessful continue in the association code.  */
5592       goto bit_rotate;
5593 
5594     case BIT_AND_EXPR:
5595     bit_and:
5596       if (integer_all_onesp (arg1))
5597 	return non_lvalue (convert (type, arg0));
5598       if (integer_zerop (arg1))
5599 	return omit_one_operand (type, arg1, arg0);
5600       t1 = distribute_bit_expr (code, type, arg0, arg1);
5601       if (t1 != NULL_TREE)
5602 	return t1;
5603       /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char.  */
5604       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5605 	  && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5606 	{
5607 	  unsigned int prec
5608 	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5609 
5610 	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5611 	      && (~TREE_INT_CST_LOW (arg1)
5612 		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5613 	    return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5614 	}
5615 
5616       /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5617 
5618 	 This results in more efficient code for machines without a NOR
5619 	 instruction.  Combine will canonicalize to the first form
5620 	 which will allow use of NOR instructions provided by the
5621 	 backend if they exist.  */
5622       if (TREE_CODE (arg0) == BIT_NOT_EXPR
5623 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
5624 	{
5625 	  return fold (build1 (BIT_NOT_EXPR, type,
5626 			       build (BIT_IOR_EXPR, type,
5627 				      TREE_OPERAND (arg0, 0),
5628 				      TREE_OPERAND (arg1, 0))));
5629 	}
5630 
5631       goto associate;
5632 
5633     case BIT_ANDTC_EXPR:
5634       if (integer_all_onesp (arg0))
5635 	return non_lvalue (convert (type, arg1));
5636       if (integer_zerop (arg0))
5637 	return omit_one_operand (type, arg0, arg1);
5638       if (TREE_CODE (arg1) == INTEGER_CST)
5639 	{
5640 	  arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5641 	  code = BIT_AND_EXPR;
5642 	  goto bit_and;
5643 	}
5644       goto binary;
5645 
5646     case RDIV_EXPR:
5647       /* Don't touch a floating-point divide by zero unless the mode
5648 	 of the constant can represent infinity.  */
5649       if (TREE_CODE (arg1) == REAL_CST
5650 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5651 	  && real_zerop (arg1))
5652 	return t;
5653 
5654       /* (-A) / (-B) -> A / B  */
5655       if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5656 	return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5657 			    TREE_OPERAND (arg1, 0)));
5658 
5659       /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
5660       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5661 	  && real_onep (arg1))
5662 	return non_lvalue (convert (type, arg0));
5663 
5664       /* If ARG1 is a constant, we can convert this to a multiply by the
5665 	 reciprocal.  This does not have the same rounding properties,
5666 	 so only do this if -funsafe-math-optimizations.  We can actually
5667 	 always safely do it if ARG1 is a power of two, but it's hard to
5668 	 tell if it is or not in a portable manner.  */
5669       if (TREE_CODE (arg1) == REAL_CST)
5670 	{
5671 	  if (flag_unsafe_math_optimizations
5672 	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
5673 					  arg1, 0)))
5674 	    return fold (build (MULT_EXPR, type, arg0, tem));
5675 	  /* Find the reciprocal if optimizing and the result is exact.  */
5676 	  else if (optimize)
5677 	    {
5678 	      REAL_VALUE_TYPE r;
5679 	      r = TREE_REAL_CST (arg1);
5680 	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5681 		{
5682 		  tem = build_real (type, r);
5683 		  return fold (build (MULT_EXPR, type, arg0, tem));
5684 		}
5685 	    }
5686 	}
5687       /* Convert A/B/C to A/(B*C).  */
5688       if (flag_unsafe_math_optimizations
5689 	  && TREE_CODE (arg0) == RDIV_EXPR)
5690 	{
5691 	  return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5692 			      build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5693 				     arg1)));
5694 	}
5695       /* Convert A/(B/C) to (A/B)*C.  */
5696       if (flag_unsafe_math_optimizations
5697 	  && TREE_CODE (arg1) == RDIV_EXPR)
5698 	{
5699 	  return fold (build (MULT_EXPR, type,
5700 			      build (RDIV_EXPR, type, arg0,
5701 			     	     TREE_OPERAND (arg1, 0)),
5702 	 		      TREE_OPERAND (arg1, 1)));
5703 	}
5704       goto binary;
5705 
5706     case TRUNC_DIV_EXPR:
5707     case ROUND_DIV_EXPR:
5708     case FLOOR_DIV_EXPR:
5709     case CEIL_DIV_EXPR:
5710     case EXACT_DIV_EXPR:
5711       if (integer_onep (arg1))
5712 	return non_lvalue (convert (type, arg0));
5713       if (integer_zerop (arg1))
5714 	return t;
5715 
5716       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5717 	 operation, EXACT_DIV_EXPR.
5718 
5719 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5720 	 At one time others generated faster code, it's not clear if they do
5721 	 after the last round to changes to the DIV code in expmed.c.  */
5722       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5723 	  && multiple_of_p (type, arg0, arg1))
5724 	return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5725 
5726       if (TREE_CODE (arg1) == INTEGER_CST
5727 	  && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5728 					 code, NULL_TREE)))
5729 	return convert (type, tem);
5730 
5731       goto binary;
5732 
5733     case CEIL_MOD_EXPR:
5734     case FLOOR_MOD_EXPR:
5735     case ROUND_MOD_EXPR:
5736     case TRUNC_MOD_EXPR:
5737       if (integer_onep (arg1))
5738 	return omit_one_operand (type, integer_zero_node, arg0);
5739       if (integer_zerop (arg1))
5740 	return t;
5741 
5742       if (TREE_CODE (arg1) == INTEGER_CST
5743 	  && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5744 					 code, NULL_TREE)))
5745 	return convert (type, tem);
5746 
5747       goto binary;
5748 
5749     case LSHIFT_EXPR:
5750     case RSHIFT_EXPR:
5751     case LROTATE_EXPR:
5752     case RROTATE_EXPR:
5753       if (integer_zerop (arg1))
5754 	return non_lvalue (convert (type, arg0));
5755       /* Since negative shift count is not well-defined,
5756 	 don't try to compute it in the compiler.  */
5757       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5758 	return t;
5759       /* Rewrite an LROTATE_EXPR by a constant into an
5760 	 RROTATE_EXPR by a new constant.  */
5761       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5762 	{
5763 	  TREE_SET_CODE (t, RROTATE_EXPR);
5764 	  code = RROTATE_EXPR;
5765 	  TREE_OPERAND (t, 1) = arg1
5766 	    = const_binop
5767 	      (MINUS_EXPR,
5768 	       convert (TREE_TYPE (arg1),
5769 			build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5770 	       arg1, 0);
5771 	  if (tree_int_cst_sgn (arg1) < 0)
5772 	    return t;
5773 	}
5774 
5775       /* If we have a rotate of a bit operation with the rotate count and
5776 	 the second operand of the bit operation both constant,
5777 	 permute the two operations.  */
5778       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5779 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
5780 	      || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5781 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
5782 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
5783 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5784 	return fold (build (TREE_CODE (arg0), type,
5785 			    fold (build (code, type,
5786 					 TREE_OPERAND (arg0, 0), arg1)),
5787 			    fold (build (code, type,
5788 					 TREE_OPERAND (arg0, 1), arg1))));
5789 
5790       /* Two consecutive rotates adding up to the width of the mode can
5791 	 be ignored.  */
5792       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5793 	  && TREE_CODE (arg0) == RROTATE_EXPR
5794 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5795 	  && TREE_INT_CST_HIGH (arg1) == 0
5796 	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5797 	  && ((TREE_INT_CST_LOW (arg1)
5798 	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5799 	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5800 	return TREE_OPERAND (arg0, 0);
5801 
5802       goto binary;
5803 
5804     case MIN_EXPR:
5805       if (operand_equal_p (arg0, arg1, 0))
5806 	return omit_one_operand (type, arg0, arg1);
5807       if (INTEGRAL_TYPE_P (type)
5808 	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5809 	return omit_one_operand (type, arg1, arg0);
5810       goto associate;
5811 
5812     case MAX_EXPR:
5813       if (operand_equal_p (arg0, arg1, 0))
5814 	return omit_one_operand (type, arg0, arg1);
5815       if (INTEGRAL_TYPE_P (type)
5816 	  && TYPE_MAX_VALUE (type)
5817 	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5818 	return omit_one_operand (type, arg1, arg0);
5819       goto associate;
5820 
5821     case TRUTH_NOT_EXPR:
5822       /* Note that the operand of this must be an int
5823 	 and its values must be 0 or 1.
5824 	 ("true" is a fixed value perhaps depending on the language,
5825 	 but we don't handle values other than 1 correctly yet.)  */
5826       tem = invert_truthvalue (arg0);
5827       /* Avoid infinite recursion.  */
5828       if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5829 	return t;
5830       return convert (type, tem);
5831 
5832     case TRUTH_ANDIF_EXPR:
5833       /* Note that the operands of this must be ints
5834 	 and their values must be 0 or 1.
5835 	 ("true" is a fixed value perhaps depending on the language.)  */
5836       /* If first arg is constant zero, return it.  */
5837       if (integer_zerop (arg0))
5838 	return convert (type, arg0);
5839     case TRUTH_AND_EXPR:
5840       /* If either arg is constant true, drop it.  */
5841       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5842 	return non_lvalue (convert (type, arg1));
5843       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5844 	  /* Preserve sequence points.  */
5845 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5846 	return non_lvalue (convert (type, arg0));
5847       /* If second arg is constant zero, result is zero, but first arg
5848 	 must be evaluated.  */
5849       if (integer_zerop (arg1))
5850 	return omit_one_operand (type, arg1, arg0);
5851       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5852 	 case will be handled here.  */
5853       if (integer_zerop (arg0))
5854 	return omit_one_operand (type, arg0, arg1);
5855 
5856     truth_andor:
5857       /* We only do these simplifications if we are optimizing.  */
5858       if (!optimize)
5859 	return t;
5860 
5861       /* Check for things like (A || B) && (A || C).  We can convert this
5862 	 to A || (B && C).  Note that either operator can be any of the four
5863 	 truth and/or operations and the transformation will still be
5864 	 valid.   Also note that we only care about order for the
5865 	 ANDIF and ORIF operators.  If B contains side effects, this
5866 	 might change the truth-value of A.  */
5867       if (TREE_CODE (arg0) == TREE_CODE (arg1)
5868 	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5869 	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5870 	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
5871 	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5872 	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5873 	{
5874 	  tree a00 = TREE_OPERAND (arg0, 0);
5875 	  tree a01 = TREE_OPERAND (arg0, 1);
5876 	  tree a10 = TREE_OPERAND (arg1, 0);
5877 	  tree a11 = TREE_OPERAND (arg1, 1);
5878 	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5879 			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5880 			     && (code == TRUTH_AND_EXPR
5881 				 || code == TRUTH_OR_EXPR));
5882 
5883 	  if (operand_equal_p (a00, a10, 0))
5884 	    return fold (build (TREE_CODE (arg0), type, a00,
5885 				fold (build (code, type, a01, a11))));
5886 	  else if (commutative && operand_equal_p (a00, a11, 0))
5887 	    return fold (build (TREE_CODE (arg0), type, a00,
5888 				fold (build (code, type, a01, a10))));
5889 	  else if (commutative && operand_equal_p (a01, a10, 0))
5890 	    return fold (build (TREE_CODE (arg0), type, a01,
5891 				fold (build (code, type, a00, a11))));
5892 
5893 	  /* This case if tricky because we must either have commutative
5894 	     operators or else A10 must not have side-effects.  */
5895 
5896 	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5897 		   && operand_equal_p (a01, a11, 0))
5898 	    return fold (build (TREE_CODE (arg0), type,
5899 				fold (build (code, type, a00, a10)),
5900 				a01));
5901 	}
5902 
5903       /* See if we can build a range comparison.  */
5904       if (0 != (tem = fold_range_test (t)))
5905 	return tem;
5906 
5907       /* Check for the possibility of merging component references.  If our
5908 	 lhs is another similar operation, try to merge its rhs with our
5909 	 rhs.  Then try to merge our lhs and rhs.  */
5910       if (TREE_CODE (arg0) == code
5911 	  && 0 != (tem = fold_truthop (code, type,
5912 				       TREE_OPERAND (arg0, 1), arg1)))
5913 	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5914 
5915       if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5916 	return tem;
5917 
5918       return t;
5919 
5920     case TRUTH_ORIF_EXPR:
5921       /* Note that the operands of this must be ints
5922 	 and their values must be 0 or true.
5923 	 ("true" is a fixed value perhaps depending on the language.)  */
5924       /* If first arg is constant true, return it.  */
5925       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5926 	return convert (type, arg0);
5927     case TRUTH_OR_EXPR:
5928       /* If either arg is constant zero, drop it.  */
5929       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5930 	return non_lvalue (convert (type, arg1));
5931       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5932 	  /* Preserve sequence points.  */
5933 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5934 	return non_lvalue (convert (type, arg0));
5935       /* If second arg is constant true, result is true, but we must
5936 	 evaluate first arg.  */
5937       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5938 	return omit_one_operand (type, arg1, arg0);
5939       /* Likewise for first arg, but note this only occurs here for
5940 	 TRUTH_OR_EXPR.  */
5941       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5942 	return omit_one_operand (type, arg0, arg1);
5943       goto truth_andor;
5944 
5945     case TRUTH_XOR_EXPR:
5946       /* If either arg is constant zero, drop it.  */
5947       if (integer_zerop (arg0))
5948 	return non_lvalue (convert (type, arg1));
5949       if (integer_zerop (arg1))
5950 	return non_lvalue (convert (type, arg0));
5951       /* If either arg is constant true, this is a logical inversion.  */
5952       if (integer_onep (arg0))
5953 	return non_lvalue (convert (type, invert_truthvalue (arg1)));
5954       if (integer_onep (arg1))
5955 	return non_lvalue (convert (type, invert_truthvalue (arg0)));
5956       return t;
5957 
5958     case EQ_EXPR:
5959     case NE_EXPR:
5960     case LT_EXPR:
5961     case GT_EXPR:
5962     case LE_EXPR:
5963     case GE_EXPR:
5964       /* If one arg is a real or integer constant, put it last.  */
5965       if ((TREE_CODE (arg0) == INTEGER_CST
5966 	   && TREE_CODE (arg1) != INTEGER_CST)
5967 	  || (TREE_CODE (arg0) == REAL_CST
5968 	      && TREE_CODE (arg0) != REAL_CST))
5969 	{
5970 	  TREE_OPERAND (t, 0) = arg1;
5971 	  TREE_OPERAND (t, 1) = arg0;
5972 	  arg0 = TREE_OPERAND (t, 0);
5973 	  arg1 = TREE_OPERAND (t, 1);
5974 	  code = swap_tree_comparison (code);
5975 	  TREE_SET_CODE (t, code);
5976 	}
5977 
5978       if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5979 	{
5980 	  /* (-a) CMP (-b) -> b CMP a  */
5981 	  if (TREE_CODE (arg0) == NEGATE_EXPR
5982 	      && TREE_CODE (arg1) == NEGATE_EXPR)
5983 	    return fold (build (code, type, TREE_OPERAND (arg1, 0),
5984 				TREE_OPERAND (arg0, 0)));
5985 	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
5986 	  if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5987 	    return
5988 	      fold (build
5989 		    (swap_tree_comparison (code), type,
5990 		     TREE_OPERAND (arg0, 0),
5991 		     build_real (TREE_TYPE (arg1),
5992 				 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5993 	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
5994 	  /* a CMP (-0) -> a CMP 0  */
5995 	  if (TREE_CODE (arg1) == REAL_CST
5996 	      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5997 	    return fold (build (code, type, arg0,
5998 				build_real (TREE_TYPE (arg1), dconst0)));
5999 
6000 	  /* If this is a comparison of a real constant with a PLUS_EXPR
6001 	     or a MINUS_EXPR of a real constant, we can convert it into a
6002 	     comparison with a revised real constant as long as no overflow
6003 	     occurs when unsafe_math_optimizations are enabled.  */
6004 	  if (flag_unsafe_math_optimizations
6005 	      && TREE_CODE (arg1) == REAL_CST
6006 	      && (TREE_CODE (arg0) == PLUS_EXPR
6007 		  || TREE_CODE (arg0) == MINUS_EXPR)
6008 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6009 	      && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6010 					  ? MINUS_EXPR : PLUS_EXPR,
6011 					  arg1, TREE_OPERAND (arg0, 1), 0))
6012 	      && ! TREE_CONSTANT_OVERFLOW (tem))
6013 	    return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6014 	}
6015 
6016       /* Convert foo++ == CONST into ++foo == CONST + INCR.
6017 	 First, see if one arg is constant; find the constant arg
6018 	 and the other one.  */
6019       {
6020 	tree constop = 0, varop = NULL_TREE;
6021 	int constopnum = -1;
6022 
6023 	if (TREE_CONSTANT (arg1))
6024 	  constopnum = 1, constop = arg1, varop = arg0;
6025 	if (TREE_CONSTANT (arg0))
6026 	  constopnum = 0, constop = arg0, varop = arg1;
6027 
6028 	if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6029 	  {
6030 	    /* This optimization is invalid for ordered comparisons
6031 	       if CONST+INCR overflows or if foo+incr might overflow.
6032 	       This optimization is invalid for floating point due to rounding.
6033 	       For pointer types we assume overflow doesn't happen.  */
6034 	    if (POINTER_TYPE_P (TREE_TYPE (varop))
6035 		|| (! FLOAT_TYPE_P (TREE_TYPE (varop))
6036 		    && (code == EQ_EXPR || code == NE_EXPR)))
6037 	      {
6038 		tree newconst
6039 		  = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6040 				 constop, TREE_OPERAND (varop, 1)));
6041 
6042 		/* Do not overwrite the current varop to be a preincrement,
6043 		   create a new node so that we won't confuse our caller who
6044 		   might create trees and throw them away, reusing the
6045 		   arguments that they passed to build.  This shows up in
6046 		   the THEN or ELSE parts of ?: being postincrements.  */
6047 		varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6048 			       TREE_OPERAND (varop, 0),
6049 			       TREE_OPERAND (varop, 1));
6050 
6051 		/* If VAROP is a reference to a bitfield, we must mask
6052 		   the constant by the width of the field.  */
6053 		if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6054 		    && DECL_BIT_FIELD(TREE_OPERAND
6055 				      (TREE_OPERAND (varop, 0), 1)))
6056 		  {
6057 		    int size
6058 		      = TREE_INT_CST_LOW (DECL_SIZE
6059 					  (TREE_OPERAND
6060 					   (TREE_OPERAND (varop, 0), 1)));
6061 		    tree mask, unsigned_type;
6062 		    unsigned int precision;
6063 		    tree folded_compare;
6064 
6065 		    /* First check whether the comparison would come out
6066 		       always the same.  If we don't do that we would
6067 		       change the meaning with the masking.  */
6068 		    if (constopnum == 0)
6069 		      folded_compare = fold (build (code, type, constop,
6070 						    TREE_OPERAND (varop, 0)));
6071 		    else
6072 		      folded_compare = fold (build (code, type,
6073 						    TREE_OPERAND (varop, 0),
6074 						    constop));
6075 		    if (integer_zerop (folded_compare)
6076 			|| integer_onep (folded_compare))
6077 		      return omit_one_operand (type, folded_compare, varop);
6078 
6079 		    unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6080 		    precision = TYPE_PRECISION (unsigned_type);
6081 		    mask = build_int_2 (~0, ~0);
6082 		    TREE_TYPE (mask) = unsigned_type;
6083 		    force_fit_type (mask, 0);
6084 		    mask = const_binop (RSHIFT_EXPR, mask,
6085 					size_int (precision - size), 0);
6086 		    newconst = fold (build (BIT_AND_EXPR,
6087 					    TREE_TYPE (varop), newconst,
6088 					    convert (TREE_TYPE (varop),
6089 						     mask)));
6090 		  }
6091 
6092 		t = build (code, type,
6093 			   (constopnum == 0) ? newconst : varop,
6094 			   (constopnum == 1) ? newconst : varop);
6095 		return t;
6096 	      }
6097 	  }
6098 	else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6099 	  {
6100 	    if (POINTER_TYPE_P (TREE_TYPE (varop))
6101 		|| (! FLOAT_TYPE_P (TREE_TYPE (varop))
6102 		    && (code == EQ_EXPR || code == NE_EXPR)))
6103 	      {
6104 		tree newconst
6105 		  = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6106 				 constop, TREE_OPERAND (varop, 1)));
6107 
6108 		/* Do not overwrite the current varop to be a predecrement,
6109 		   create a new node so that we won't confuse our caller who
6110 		   might create trees and throw them away, reusing the
6111 		   arguments that they passed to build.  This shows up in
6112 		   the THEN or ELSE parts of ?: being postdecrements.  */
6113 		varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6114 			       TREE_OPERAND (varop, 0),
6115 			       TREE_OPERAND (varop, 1));
6116 
6117 		if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6118 		    && DECL_BIT_FIELD(TREE_OPERAND
6119 				      (TREE_OPERAND (varop, 0), 1)))
6120 		  {
6121 		    int size
6122 		      = TREE_INT_CST_LOW (DECL_SIZE
6123 					  (TREE_OPERAND
6124 					   (TREE_OPERAND (varop, 0), 1)));
6125 		    tree mask, unsigned_type;
6126 		    unsigned int precision;
6127 		    tree folded_compare;
6128 
6129 		    if (constopnum == 0)
6130 		      folded_compare = fold (build (code, type, constop,
6131 						    TREE_OPERAND (varop, 0)));
6132 		    else
6133 		      folded_compare = fold (build (code, type,
6134 						    TREE_OPERAND (varop, 0),
6135 						    constop));
6136 		    if (integer_zerop (folded_compare)
6137 			|| integer_onep (folded_compare))
6138 		      return omit_one_operand (type, folded_compare, varop);
6139 
6140 		    unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6141 		    precision = TYPE_PRECISION (unsigned_type);
6142 		    mask = build_int_2 (~0, ~0);
6143 		    TREE_TYPE (mask) = TREE_TYPE (varop);
6144 		    force_fit_type (mask, 0);
6145 		    mask = const_binop (RSHIFT_EXPR, mask,
6146 					size_int (precision - size), 0);
6147 		    newconst = fold (build (BIT_AND_EXPR,
6148 					    TREE_TYPE (varop), newconst,
6149 					    convert (TREE_TYPE (varop),
6150 						     mask)));
6151 		  }
6152 
6153 		t = build (code, type,
6154 			   (constopnum == 0) ? newconst : varop,
6155 			   (constopnum == 1) ? newconst : varop);
6156 		return t;
6157 	      }
6158 	  }
6159       }
6160 
6161       /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6162 	 This transformation affects the cases which are handled in later
6163 	 optimizations involving comparisons with non-negative constants.  */
6164       if (TREE_CODE (arg1) == INTEGER_CST
6165 	  && TREE_CODE (arg0) != INTEGER_CST
6166 	  && tree_int_cst_sgn (arg1) > 0)
6167 	{
6168 	  switch (code)
6169 	    {
6170 	    case GE_EXPR:
6171 	      code = GT_EXPR;
6172 	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6173 	      t = build (code, type, TREE_OPERAND (t, 0), arg1);
6174 	      break;
6175 
6176 	    case LT_EXPR:
6177 	      code = LE_EXPR;
6178 	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6179 	      t = build (code, type, TREE_OPERAND (t, 0), arg1);
6180 	      break;
6181 
6182 	    default:
6183 	      break;
6184 	    }
6185 	}
6186 
6187       /* Comparisons with the highest or lowest possible integer of
6188 	 the specified size will have known values.  */
6189       {
6190 	int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6191 
6192 	if (TREE_CODE (arg1) == INTEGER_CST
6193 	    && ! TREE_CONSTANT_OVERFLOW (arg1)
6194 	    && width <= HOST_BITS_PER_WIDE_INT
6195 	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6196 		|| POINTER_TYPE_P (TREE_TYPE (arg1))))
6197 	  {
6198 	    unsigned HOST_WIDE_INT signed_max;
6199 	    unsigned HOST_WIDE_INT max, min;
6200 
6201 	    signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6202 
6203 	    if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6204 	      {
6205 	        max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6206 		min = 0;
6207 	      }
6208 	    else
6209 	      {
6210 	        max = signed_max;
6211 		min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6212 	      }
6213 
6214 	    if (TREE_INT_CST_HIGH (arg1) == 0
6215 		&& TREE_INT_CST_LOW (arg1) == max)
6216 	      switch (code)
6217 		{
6218 		case GT_EXPR:
6219 		  return omit_one_operand (type,
6220 					   convert (type, integer_zero_node),
6221 					   arg0);
6222 		case GE_EXPR:
6223 		  code = EQ_EXPR;
6224 		  TREE_SET_CODE (t, EQ_EXPR);
6225 		  break;
6226 		case LE_EXPR:
6227 		  return omit_one_operand (type,
6228 					   convert (type, integer_one_node),
6229 					   arg0);
6230 		case LT_EXPR:
6231 		  code = NE_EXPR;
6232 		  TREE_SET_CODE (t, NE_EXPR);
6233 		  break;
6234 
6235 		/* The GE_EXPR and LT_EXPR cases above are not normally
6236 		   reached because of  previous transformations.  */
6237 
6238 		default:
6239 		  break;
6240 		}
6241 	    else if (TREE_INT_CST_HIGH (arg1) == 0
6242 		     && TREE_INT_CST_LOW (arg1) == max - 1)
6243 	      switch (code)
6244 		{
6245 		case GT_EXPR:
6246 		  code = EQ_EXPR;
6247 		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6248 		  t = build (code, type, TREE_OPERAND (t, 0), arg1);
6249 		  break;
6250 		case LE_EXPR:
6251 		  code = NE_EXPR;
6252 		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6253 		  t = build (code, type, TREE_OPERAND (t, 0), arg1);
6254 		  break;
6255 		default:
6256 		  break;
6257 		}
6258 	    else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6259 		     && TREE_INT_CST_LOW (arg1) == min)
6260 	      switch (code)
6261 		{
6262 		case LT_EXPR:
6263 		  return omit_one_operand (type,
6264 					   convert (type, integer_zero_node),
6265 					   arg0);
6266 		case LE_EXPR:
6267 		  code = EQ_EXPR;
6268 		  TREE_SET_CODE (t, EQ_EXPR);
6269 		  break;
6270 
6271 		case GE_EXPR:
6272 		  return omit_one_operand (type,
6273 					   convert (type, integer_one_node),
6274 					   arg0);
6275 		case GT_EXPR:
6276 		  code = NE_EXPR;
6277 		  TREE_SET_CODE (t, NE_EXPR);
6278 		  break;
6279 
6280 		default:
6281 		  break;
6282 		}
6283 	    else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6284 		     && TREE_INT_CST_LOW (arg1) == min + 1)
6285 	      switch (code)
6286 		{
6287 		case GE_EXPR:
6288 		  code = NE_EXPR;
6289 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6290 		  t = build (code, type, TREE_OPERAND (t, 0), arg1);
6291 		  break;
6292 		case LT_EXPR:
6293 		  code = EQ_EXPR;
6294 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6295 		  t = build (code, type, TREE_OPERAND (t, 0), arg1);
6296 		  break;
6297 		default:
6298 		  break;
6299 		}
6300 
6301 	    else if (TREE_INT_CST_HIGH (arg1) == 0
6302 		     && TREE_INT_CST_LOW (arg1) == signed_max
6303 		     && TREE_UNSIGNED (TREE_TYPE (arg1))
6304 		     /* signed_type does not work on pointer types.  */
6305 		     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6306 	      {
6307 		/* The following case also applies to X < signed_max+1
6308 		   and X >= signed_max+1 because previous transformations.  */
6309 		if (code == LE_EXPR || code == GT_EXPR)
6310 		  {
6311 		    tree st0, st1;
6312 		    st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6313 		    st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6314 		    return fold
6315 		      (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6316 			      type, convert (st0, arg0),
6317 			      convert (st1, integer_zero_node)));
6318 		  }
6319 	      }
6320 	  }
6321       }
6322 
6323       /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6324 	 a MINUS_EXPR of a constant, we can convert it into a comparison with
6325 	 a revised constant as long as no overflow occurs.  */
6326       if ((code == EQ_EXPR || code == NE_EXPR)
6327 	  && TREE_CODE (arg1) == INTEGER_CST
6328 	  && (TREE_CODE (arg0) == PLUS_EXPR
6329 	      || TREE_CODE (arg0) == MINUS_EXPR)
6330 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6331 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6332 				      ? MINUS_EXPR : PLUS_EXPR,
6333 				      arg1, TREE_OPERAND (arg0, 1), 0))
6334 	  && ! TREE_CONSTANT_OVERFLOW (tem))
6335 	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6336 
6337       /* Similarly for a NEGATE_EXPR.  */
6338       else if ((code == EQ_EXPR || code == NE_EXPR)
6339 	       && TREE_CODE (arg0) == NEGATE_EXPR
6340 	       && TREE_CODE (arg1) == INTEGER_CST
6341 	       && 0 != (tem = negate_expr (arg1))
6342 	       && TREE_CODE (tem) == INTEGER_CST
6343 	       && ! TREE_CONSTANT_OVERFLOW (tem))
6344 	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6345 
6346       /* If we have X - Y == 0, we can convert that to X == Y and similarly
6347 	 for !=.  Don't do this for ordered comparisons due to overflow.  */
6348       else if ((code == NE_EXPR || code == EQ_EXPR)
6349 	       && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6350 	return fold (build (code, type,
6351 			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6352 
6353       /* If we are widening one operand of an integer comparison,
6354 	 see if the other operand is similarly being widened.  Perhaps we
6355 	 can do the comparison in the narrower type.  */
6356       else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6357 	       && TREE_CODE (arg0) == NOP_EXPR
6358 	       && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6359 	       && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6360 	       && (TREE_TYPE (t1) == TREE_TYPE (tem)
6361 		   || (TREE_CODE (t1) == INTEGER_CST
6362 		       && int_fits_type_p (t1, TREE_TYPE (tem)))))
6363 	return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6364 
6365       /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6366 	 constant, we can simplify it.  */
6367       else if (TREE_CODE (arg1) == INTEGER_CST
6368 	       && (TREE_CODE (arg0) == MIN_EXPR
6369 		   || TREE_CODE (arg0) == MAX_EXPR)
6370 	       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6371 	return optimize_minmax_comparison (t);
6372 
6373       /* If we are comparing an ABS_EXPR with a constant, we can
6374 	 convert all the cases into explicit comparisons, but they may
6375 	 well not be faster than doing the ABS and one comparison.
6376 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
6377 	 and a comparison, and is probably faster.  */
6378       else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6379 	       && TREE_CODE (arg0) == ABS_EXPR
6380 	       && ! TREE_SIDE_EFFECTS (arg0)
6381 	       && (0 != (tem = negate_expr (arg1)))
6382 	       && TREE_CODE (tem) == INTEGER_CST
6383 	       && ! TREE_CONSTANT_OVERFLOW (tem))
6384 	return fold (build (TRUTH_ANDIF_EXPR, type,
6385 			    build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6386 			    build (LE_EXPR, type,
6387 				   TREE_OPERAND (arg0, 0), arg1)));
6388 
6389       /* If this is an EQ or NE comparison with zero and ARG0 is
6390 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
6391 	 two operations, but the latter can be done in one less insn
6392 	 on machines that have only two-operand insns or on which a
6393 	 constant cannot be the first operand.  */
6394       if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6395 	  && TREE_CODE (arg0) == BIT_AND_EXPR)
6396 	{
6397 	  if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6398 	      && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6399 	    return
6400 	      fold (build (code, type,
6401 			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
6402 				  build (RSHIFT_EXPR,
6403 					 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6404 					 TREE_OPERAND (arg0, 1),
6405 					 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6406 				  convert (TREE_TYPE (arg0),
6407 					   integer_one_node)),
6408 			   arg1));
6409 	  else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6410 		   && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6411 	    return
6412 	      fold (build (code, type,
6413 			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
6414 				  build (RSHIFT_EXPR,
6415 					 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6416 					 TREE_OPERAND (arg0, 0),
6417 					 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6418 				  convert (TREE_TYPE (arg0),
6419 					   integer_one_node)),
6420 			   arg1));
6421 	}
6422 
6423       /* If this is an NE or EQ comparison of zero against the result of a
6424 	 signed MOD operation whose second operand is a power of 2, make
6425 	 the MOD operation unsigned since it is simpler and equivalent.  */
6426       if ((code == NE_EXPR || code == EQ_EXPR)
6427 	  && integer_zerop (arg1)
6428 	  && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6429 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6430 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
6431 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6432 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6433 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
6434 	{
6435 	  tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6436 	  tree newmod = build (TREE_CODE (arg0), newtype,
6437 			       convert (newtype, TREE_OPERAND (arg0, 0)),
6438 			       convert (newtype, TREE_OPERAND (arg0, 1)));
6439 
6440 	  return build (code, type, newmod, convert (newtype, arg1));
6441 	}
6442 
6443       /* If this is an NE comparison of zero with an AND of one, remove the
6444 	 comparison since the AND will give the correct value.  */
6445       if (code == NE_EXPR && integer_zerop (arg1)
6446 	  && TREE_CODE (arg0) == BIT_AND_EXPR
6447 	  && integer_onep (TREE_OPERAND (arg0, 1)))
6448 	return convert (type, arg0);
6449 
6450       /* If we have (A & C) == C where C is a power of 2, convert this into
6451 	 (A & C) != 0.  Similarly for NE_EXPR.  */
6452       if ((code == EQ_EXPR || code == NE_EXPR)
6453 	  && TREE_CODE (arg0) == BIT_AND_EXPR
6454 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
6455 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6456 	return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6457 			    arg0, integer_zero_node));
6458 
6459       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6460 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6461       if ((code == EQ_EXPR || code == NE_EXPR)
6462 	  && TREE_CODE (arg0) == BIT_AND_EXPR
6463 	  && integer_zerop (arg1))
6464 	{
6465 	  tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6466 				   TREE_OPERAND (arg0, 1));
6467 	  if (arg00 != NULL_TREE)
6468 	  {
6469 	    tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6470 	    return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6471 			        convert (stype, arg00),
6472 				convert (stype, integer_zero_node)));
6473 	  }
6474 	}
6475 
6476       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6477 	 and similarly for >= into !=.  */
6478       if ((code == LT_EXPR || code == GE_EXPR)
6479 	  && TREE_UNSIGNED (TREE_TYPE (arg0))
6480 	  && TREE_CODE (arg1) == LSHIFT_EXPR
6481 	  && integer_onep (TREE_OPERAND (arg1, 0)))
6482 	return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6483 		      build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6484 			     TREE_OPERAND (arg1, 1)),
6485 		      convert (TREE_TYPE (arg0), integer_zero_node));
6486 
6487       else if ((code == LT_EXPR || code == GE_EXPR)
6488 	       && TREE_UNSIGNED (TREE_TYPE (arg0))
6489 	       && (TREE_CODE (arg1) == NOP_EXPR
6490 		   || TREE_CODE (arg1) == CONVERT_EXPR)
6491 	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6492 	       && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6493 	return
6494 	  build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6495 		 convert (TREE_TYPE (arg0),
6496 			  build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6497 				 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6498 		 convert (TREE_TYPE (arg0), integer_zero_node));
6499 
6500       /* Simplify comparison of something with itself.  (For IEEE
6501 	 floating-point, we can only do some of these simplifications.)  */
6502       if (operand_equal_p (arg0, arg1, 0))
6503 	{
6504 	  switch (code)
6505 	    {
6506 	    case EQ_EXPR:
6507 	    case GE_EXPR:
6508 	    case LE_EXPR:
6509 	      if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6510 		return constant_boolean_node (1, type);
6511 	      code = EQ_EXPR;
6512 	      TREE_SET_CODE (t, code);
6513 	      break;
6514 
6515 	    case NE_EXPR:
6516 	      /* For NE, we can only do this simplification if integer.  */
6517 	      if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6518 		break;
6519 	      /* ... fall through ...  */
6520 	    case GT_EXPR:
6521 	    case LT_EXPR:
6522 	      return constant_boolean_node (0, type);
6523 	    default:
6524 	      abort ();
6525 	    }
6526 	}
6527 
6528       /* If we are comparing an expression that just has comparisons
6529 	 of two integer values, arithmetic expressions of those comparisons,
6530 	 and constants, we can simplify it.  There are only three cases
6531 	 to check: the two values can either be equal, the first can be
6532 	 greater, or the second can be greater.  Fold the expression for
6533 	 those three values.  Since each value must be 0 or 1, we have
6534 	 eight possibilities, each of which corresponds to the constant 0
6535 	 or 1 or one of the six possible comparisons.
6536 
6537 	 This handles common cases like (a > b) == 0 but also handles
6538 	 expressions like  ((x > y) - (y > x)) > 0, which supposedly
6539 	 occur in macroized code.  */
6540 
6541       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6542 	{
6543 	  tree cval1 = 0, cval2 = 0;
6544 	  int save_p = 0;
6545 
6546 	  if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6547 	      /* Don't handle degenerate cases here; they should already
6548 		 have been handled anyway.  */
6549 	      && cval1 != 0 && cval2 != 0
6550 	      && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6551 	      && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6552 	      && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6553 	      && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6554 	      && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6555 	      && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6556 				    TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6557 	    {
6558 	      tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6559 	      tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6560 
6561 	      /* We can't just pass T to eval_subst in case cval1 or cval2
6562 		 was the same as ARG1.  */
6563 
6564 	      tree high_result
6565 		= fold (build (code, type,
6566 			       eval_subst (arg0, cval1, maxval, cval2, minval),
6567 			       arg1));
6568 	      tree equal_result
6569 		= fold (build (code, type,
6570 			       eval_subst (arg0, cval1, maxval, cval2, maxval),
6571 			       arg1));
6572 	      tree low_result
6573 		= fold (build (code, type,
6574 			       eval_subst (arg0, cval1, minval, cval2, maxval),
6575 			       arg1));
6576 
6577 	      /* All three of these results should be 0 or 1.  Confirm they
6578 		 are.  Then use those values to select the proper code
6579 		 to use.  */
6580 
6581 	      if ((integer_zerop (high_result)
6582 		   || integer_onep (high_result))
6583 		  && (integer_zerop (equal_result)
6584 		      || integer_onep (equal_result))
6585 		  && (integer_zerop (low_result)
6586 		      || integer_onep (low_result)))
6587 		{
6588 		  /* Make a 3-bit mask with the high-order bit being the
6589 		     value for `>', the next for '=', and the low for '<'.  */
6590 		  switch ((integer_onep (high_result) * 4)
6591 			  + (integer_onep (equal_result) * 2)
6592 			  + integer_onep (low_result))
6593 		    {
6594 		    case 0:
6595 		      /* Always false.  */
6596 		      return omit_one_operand (type, integer_zero_node, arg0);
6597 		    case 1:
6598 		      code = LT_EXPR;
6599 		      break;
6600 		    case 2:
6601 		      code = EQ_EXPR;
6602 		      break;
6603 		    case 3:
6604 		      code = LE_EXPR;
6605 		      break;
6606 		    case 4:
6607 		      code = GT_EXPR;
6608 		      break;
6609 		    case 5:
6610 		      code = NE_EXPR;
6611 		      break;
6612 		    case 6:
6613 		      code = GE_EXPR;
6614 		      break;
6615 		    case 7:
6616 		      /* Always true.  */
6617 		      return omit_one_operand (type, integer_one_node, arg0);
6618 		    }
6619 
6620 		  t = build (code, type, cval1, cval2);
6621 		  if (save_p)
6622 		    return save_expr (t);
6623 		  else
6624 		    return fold (t);
6625 		}
6626 	    }
6627 	}
6628 
6629       /* If this is a comparison of a field, we may be able to simplify it.  */
6630       if (((TREE_CODE (arg0) == COMPONENT_REF
6631 	    && (*lang_hooks.can_use_bit_fields_p) ())
6632 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
6633 	  && (code == EQ_EXPR || code == NE_EXPR)
6634 	  /* Handle the constant case even without -O
6635 	     to make sure the warnings are given.  */
6636 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6637 	{
6638 	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6639 	  return t1 ? t1 : t;
6640 	}
6641 
6642       /* If this is a comparison of complex values and either or both sides
6643 	 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6644 	 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6645 	 This may prevent needless evaluations.  */
6646       if ((code == EQ_EXPR || code == NE_EXPR)
6647 	  && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6648 	  && (TREE_CODE (arg0) == COMPLEX_EXPR
6649 	      || TREE_CODE (arg1) == COMPLEX_EXPR
6650 	      || TREE_CODE (arg0) == COMPLEX_CST
6651 	      || TREE_CODE (arg1) == COMPLEX_CST))
6652 	{
6653 	  tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6654 	  tree real0, imag0, real1, imag1;
6655 
6656 	  arg0 = save_expr (arg0);
6657 	  arg1 = save_expr (arg1);
6658 	  real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6659 	  imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6660 	  real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6661 	  imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6662 
6663 	  return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6664 			       : TRUTH_ORIF_EXPR),
6665 			      type,
6666 			      fold (build (code, type, real0, real1)),
6667 			      fold (build (code, type, imag0, imag1))));
6668 	}
6669 
6670       /* Optimize comparisons of strlen vs zero to a compare of the
6671 	 first character of the string vs zero.  To wit,
6672 	 	strlen(ptr) == 0   =>  *ptr == 0
6673 		strlen(ptr) != 0   =>  *ptr != 0
6674 	 Other cases should reduce to one of these two (or a constant)
6675 	 due to the return value of strlen being unsigned.  */
6676       if ((code == EQ_EXPR || code == NE_EXPR)
6677 	  && integer_zerop (arg1)
6678 	  && TREE_CODE (arg0) == CALL_EXPR
6679 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6680 	{
6681 	  tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6682 	  tree arglist;
6683 
6684 	  if (TREE_CODE (fndecl) == FUNCTION_DECL
6685 	      && DECL_BUILT_IN (fndecl)
6686 	      && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6687 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6688 	      && (arglist = TREE_OPERAND (arg0, 1))
6689 	      && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6690 	      && ! TREE_CHAIN (arglist))
6691 	    return fold (build (code, type,
6692 				build1 (INDIRECT_REF, char_type_node,
6693 					TREE_VALUE(arglist)),
6694 				integer_zero_node));
6695 	}
6696 
6697       /* From here on, the only cases we handle are when the result is
6698 	 known to be a constant.
6699 
6700 	 To compute GT, swap the arguments and do LT.
6701 	 To compute GE, do LT and invert the result.
6702 	 To compute LE, swap the arguments, do LT and invert the result.
6703 	 To compute NE, do EQ and invert the result.
6704 
6705 	 Therefore, the code below must handle only EQ and LT.  */
6706 
6707       if (code == LE_EXPR || code == GT_EXPR)
6708 	{
6709 	  tem = arg0, arg0 = arg1, arg1 = tem;
6710 	  code = swap_tree_comparison (code);
6711 	}
6712 
6713       /* Note that it is safe to invert for real values here because we
6714 	 will check below in the one case that it matters.  */
6715 
6716       t1 = NULL_TREE;
6717       invert = 0;
6718       if (code == NE_EXPR || code == GE_EXPR)
6719 	{
6720 	  invert = 1;
6721 	  code = invert_tree_comparison (code);
6722 	}
6723 
6724       /* Compute a result for LT or EQ if args permit;
6725 	 otherwise return T.  */
6726       if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6727 	{
6728 	  if (code == EQ_EXPR)
6729 	    t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6730 	  else
6731 	    t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6732 			       ? INT_CST_LT_UNSIGNED (arg0, arg1)
6733 			       : INT_CST_LT (arg0, arg1)),
6734 			      0);
6735 	}
6736 
6737 #if 0 /* This is no longer useful, but breaks some real code.  */
6738       /* Assume a nonexplicit constant cannot equal an explicit one,
6739 	 since such code would be undefined anyway.
6740 	 Exception: on sysvr4, using #pragma weak,
6741 	 a label can come out as 0.  */
6742       else if (TREE_CODE (arg1) == INTEGER_CST
6743 	       && !integer_zerop (arg1)
6744 	       && TREE_CONSTANT (arg0)
6745 	       && TREE_CODE (arg0) == ADDR_EXPR
6746 	       && code == EQ_EXPR)
6747 	t1 = build_int_2 (0, 0);
6748 #endif
6749       /* Two real constants can be compared explicitly.  */
6750       else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6751 	{
6752 	  /* If either operand is a NaN, the result is false with two
6753 	     exceptions: First, an NE_EXPR is true on NaNs, but that case
6754 	     is already handled correctly since we will be inverting the
6755 	     result for NE_EXPR.  Second, if we had inverted a LE_EXPR
6756 	     or a GE_EXPR into a LT_EXPR, we must return true so that it
6757 	     will be inverted into false.  */
6758 
6759 	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6760 	      || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6761 	    t1 = build_int_2 (invert && code == LT_EXPR, 0);
6762 
6763 	  else if (code == EQ_EXPR)
6764 	    t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6765 						 TREE_REAL_CST (arg1)),
6766 			      0);
6767 	  else
6768 	    t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6769 						TREE_REAL_CST (arg1)),
6770 			      0);
6771 	}
6772 
6773       if (t1 == NULL_TREE)
6774 	return t;
6775 
6776       if (invert)
6777 	TREE_INT_CST_LOW (t1) ^= 1;
6778 
6779       TREE_TYPE (t1) = type;
6780       if (TREE_CODE (type) == BOOLEAN_TYPE)
6781 	return (*lang_hooks.truthvalue_conversion) (t1);
6782       return t1;
6783 
6784     case COND_EXPR:
6785       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6786 	 so all simple results must be passed through pedantic_non_lvalue.  */
6787       if (TREE_CODE (arg0) == INTEGER_CST)
6788 	return pedantic_non_lvalue
6789 	  (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6790       else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6791 	return pedantic_omit_one_operand (type, arg1, arg0);
6792 
6793       /* If the second operand is zero, invert the comparison and swap
6794 	 the second and third operands.  Likewise if the second operand
6795 	 is constant and the third is not or if the third operand is
6796 	 equivalent to the first operand of the comparison.  */
6797 
6798       if (integer_zerop (arg1)
6799 	  || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6800 	  || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6801 	      && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6802 						 TREE_OPERAND (t, 2),
6803 						 TREE_OPERAND (arg0, 1))))
6804 	{
6805 	  /* See if this can be inverted.  If it can't, possibly because
6806 	     it was a floating-point inequality comparison, don't do
6807 	     anything.  */
6808 	  tem = invert_truthvalue (arg0);
6809 
6810 	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6811 	    {
6812 	      t = build (code, type, tem,
6813 			 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6814 	      arg0 = tem;
6815 	      /* arg1 should be the first argument of the new T.  */
6816 	      arg1 = TREE_OPERAND (t, 1);
6817 	      STRIP_NOPS (arg1);
6818 	    }
6819 	}
6820 
6821       /* If we have A op B ? A : C, we may be able to convert this to a
6822 	 simpler expression, depending on the operation and the values
6823 	 of B and C.  Signed zeros prevent all of these transformations,
6824 	 for reasons given above each one.  */
6825 
6826       if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6827 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6828 					     arg1, TREE_OPERAND (arg0, 1))
6829 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6830 	{
6831 	  tree arg2 = TREE_OPERAND (t, 2);
6832 	  enum tree_code comp_code = TREE_CODE (arg0);
6833 
6834 	  STRIP_NOPS (arg2);
6835 
6836 	  /* If we have A op 0 ? A : -A, consider applying the following
6837 	     transformations:
6838 
6839 	     A == 0? A : -A    same as -A
6840 	     A != 0? A : -A    same as A
6841 	     A >= 0? A : -A    same as abs (A)
6842 	     A > 0?  A : -A    same as abs (A)
6843 	     A <= 0? A : -A    same as -abs (A)
6844 	     A < 0?  A : -A    same as -abs (A)
6845 
6846 	     None of these transformations work for modes with signed
6847 	     zeros.  If A is +/-0, the first two transformations will
6848 	     change the sign of the result (from +0 to -0, or vice
6849 	     versa).  The last four will fix the sign of the result,
6850 	     even though the original expressions could be positive or
6851 	     negative, depending on the sign of A.
6852 
6853 	     Note that all these transformations are correct if A is
6854 	     NaN, since the two alternatives (A and -A) are also NaNs.  */
6855 	  if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6856 	       ? real_zerop (TREE_OPERAND (arg0, 1))
6857 	       : integer_zerop (TREE_OPERAND (arg0, 1)))
6858 	      && TREE_CODE (arg2) == NEGATE_EXPR
6859 	      && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6860 	    switch (comp_code)
6861 	      {
6862 	      case EQ_EXPR:
6863 		return
6864 		  pedantic_non_lvalue
6865 		    (convert (type,
6866 			      negate_expr
6867 			      (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6868 					arg1))));
6869 	      case NE_EXPR:
6870 		return pedantic_non_lvalue (convert (type, arg1));
6871 	      case GE_EXPR:
6872 	      case GT_EXPR:
6873 		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6874 		  arg1 = convert ((*lang_hooks.types.signed_type)
6875 				  (TREE_TYPE (arg1)), arg1);
6876 		return pedantic_non_lvalue
6877 		  (convert (type, fold (build1 (ABS_EXPR,
6878 						TREE_TYPE (arg1), arg1))));
6879 	      case LE_EXPR:
6880 	      case LT_EXPR:
6881 		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6882 		  arg1 = convert ((lang_hooks.types.signed_type)
6883 				  (TREE_TYPE (arg1)), arg1);
6884 		return pedantic_non_lvalue
6885 		  (negate_expr (convert (type,
6886 					 fold (build1 (ABS_EXPR,
6887 						       TREE_TYPE (arg1),
6888 						       arg1)))));
6889 	      default:
6890 		abort ();
6891 	      }
6892 
6893 	  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
6894 	     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
6895 	     both transformations are correct when A is NaN: A != 0
6896 	     is then true, and A == 0 is false.  */
6897 
6898 	  if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6899 	    {
6900 	      if (comp_code == NE_EXPR)
6901 		return pedantic_non_lvalue (convert (type, arg1));
6902 	      else if (comp_code == EQ_EXPR)
6903 		return pedantic_non_lvalue (convert (type, integer_zero_node));
6904 	    }
6905 
6906 	  /* Try some transformations of A op B ? A : B.
6907 
6908 	     A == B? A : B    same as B
6909 	     A != B? A : B    same as A
6910 	     A >= B? A : B    same as max (A, B)
6911 	     A > B?  A : B    same as max (B, A)
6912 	     A <= B? A : B    same as min (A, B)
6913 	     A < B?  A : B    same as min (B, A)
6914 
6915 	     As above, these transformations don't work in the presence
6916 	     of signed zeros.  For example, if A and B are zeros of
6917 	     opposite sign, the first two transformations will change
6918 	     the sign of the result.  In the last four, the original
6919 	     expressions give different results for (A=+0, B=-0) and
6920 	     (A=-0, B=+0), but the transformed expressions do not.
6921 
6922 	     The first two transformations are correct if either A or B
6923 	     is a NaN.  In the first transformation, the condition will
6924 	     be false, and B will indeed be chosen.  In the case of the
6925 	     second transformation, the condition A != B will be true,
6926 	     and A will be chosen.
6927 
6928 	     The conversions to max() and min() are not correct if B is
6929 	     a number and A is not.  The conditions in the original
6930 	     expressions will be false, so all four give B.  The min()
6931 	     and max() versions would give a NaN instead.  */
6932 	  if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6933 					      arg2, TREE_OPERAND (arg0, 0)))
6934 	    {
6935 	      tree comp_op0 = TREE_OPERAND (arg0, 0);
6936 	      tree comp_op1 = TREE_OPERAND (arg0, 1);
6937 	      tree comp_type = TREE_TYPE (comp_op0);
6938 
6939 	      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
6940 	      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6941 		comp_type = type;
6942 
6943 	      switch (comp_code)
6944 		{
6945 		case EQ_EXPR:
6946 		  return pedantic_non_lvalue (convert (type, arg2));
6947 		case NE_EXPR:
6948 		  return pedantic_non_lvalue (convert (type, arg1));
6949 		case LE_EXPR:
6950 		case LT_EXPR:
6951 		  /* In C++ a ?: expression can be an lvalue, so put the
6952 		     operand which will be used if they are equal first
6953 		     so that we can convert this back to the
6954 		     corresponding COND_EXPR.  */
6955 		  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6956 		    return pedantic_non_lvalue
6957 		      (convert (type, fold (build (MIN_EXPR, comp_type,
6958 						   (comp_code == LE_EXPR
6959 						    ? comp_op0 : comp_op1),
6960 						   (comp_code == LE_EXPR
6961 						    ? comp_op1 : comp_op0)))));
6962 		  break;
6963 		case GE_EXPR:
6964 		case GT_EXPR:
6965 		  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6966 		    return pedantic_non_lvalue
6967 		      (convert (type, fold (build (MAX_EXPR, comp_type,
6968 						   (comp_code == GE_EXPR
6969 						    ? comp_op0 : comp_op1),
6970 						   (comp_code == GE_EXPR
6971 						    ? comp_op1 : comp_op0)))));
6972 		  break;
6973 		default:
6974 		  abort ();
6975 		}
6976 	    }
6977 
6978 	  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6979 	     we might still be able to simplify this.  For example,
6980 	     if C1 is one less or one more than C2, this might have started
6981 	     out as a MIN or MAX and been transformed by this function.
6982 	     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
6983 
6984 	  if (INTEGRAL_TYPE_P (type)
6985 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6986 	      && TREE_CODE (arg2) == INTEGER_CST)
6987 	    switch (comp_code)
6988 	      {
6989 	      case EQ_EXPR:
6990 		/* We can replace A with C1 in this case.  */
6991 		arg1 = convert (type, TREE_OPERAND (arg0, 1));
6992 		t = build (code, type, TREE_OPERAND (t, 0), arg1,
6993 			   TREE_OPERAND (t, 2));
6994 		break;
6995 
6996 	      case LT_EXPR:
6997 		/* If C1 is C2 + 1, this is min(A, C2).  */
6998 		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6999 		    && operand_equal_p (TREE_OPERAND (arg0, 1),
7000 					const_binop (PLUS_EXPR, arg2,
7001 						     integer_one_node, 0), 1))
7002 		  return pedantic_non_lvalue
7003 		    (fold (build (MIN_EXPR, type, arg1, arg2)));
7004 		break;
7005 
7006 	      case LE_EXPR:
7007 		/* If C1 is C2 - 1, this is min(A, C2).  */
7008 		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7009 		    && operand_equal_p (TREE_OPERAND (arg0, 1),
7010 					const_binop (MINUS_EXPR, arg2,
7011 						     integer_one_node, 0), 1))
7012 		  return pedantic_non_lvalue
7013 		    (fold (build (MIN_EXPR, type, arg1, arg2)));
7014 		break;
7015 
7016 	      case GT_EXPR:
7017 		/* If C1 is C2 - 1, this is max(A, C2).  */
7018 		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7019 		    && operand_equal_p (TREE_OPERAND (arg0, 1),
7020 					const_binop (MINUS_EXPR, arg2,
7021 						     integer_one_node, 0), 1))
7022 		  return pedantic_non_lvalue
7023 		    (fold (build (MAX_EXPR, type, arg1, arg2)));
7024 		break;
7025 
7026 	      case GE_EXPR:
7027 		/* If C1 is C2 + 1, this is max(A, C2).  */
7028 		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7029 		    && operand_equal_p (TREE_OPERAND (arg0, 1),
7030 					const_binop (PLUS_EXPR, arg2,
7031 						     integer_one_node, 0), 1))
7032 		  return pedantic_non_lvalue
7033 		    (fold (build (MAX_EXPR, type, arg1, arg2)));
7034 		break;
7035 	      case NE_EXPR:
7036 		break;
7037 	      default:
7038 		abort ();
7039 	      }
7040 	}
7041 
7042       /* If the second operand is simpler than the third, swap them
7043 	 since that produces better jump optimization results.  */
7044       if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7045 	   || TREE_CODE (arg1) == SAVE_EXPR)
7046 	  && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7047 		|| DECL_P (TREE_OPERAND (t, 2))
7048 		|| TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7049 	{
7050 	  /* See if this can be inverted.  If it can't, possibly because
7051 	     it was a floating-point inequality comparison, don't do
7052 	     anything.  */
7053 	  tem = invert_truthvalue (arg0);
7054 
7055 	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7056 	    {
7057 	      t = build (code, type, tem,
7058 			 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7059 	      arg0 = tem;
7060 	      /* arg1 should be the first argument of the new T.  */
7061 	      arg1 = TREE_OPERAND (t, 1);
7062 	      STRIP_NOPS (arg1);
7063 	    }
7064 	}
7065 
7066       /* Convert A ? 1 : 0 to simply A.  */
7067       if (integer_onep (TREE_OPERAND (t, 1))
7068 	  && integer_zerop (TREE_OPERAND (t, 2))
7069 	  /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7070 	     call to fold will try to move the conversion inside
7071 	     a COND, which will recurse.  In that case, the COND_EXPR
7072 	     is probably the best choice, so leave it alone.  */
7073 	  && type == TREE_TYPE (arg0))
7074 	return pedantic_non_lvalue (arg0);
7075 
7076       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
7077 	 over COND_EXPR in cases such as floating point comparisons.  */
7078       if (integer_zerop (TREE_OPERAND (t, 1))
7079 	  && integer_onep (TREE_OPERAND (t, 2))
7080 	  && truth_value_p (TREE_CODE (arg0)))
7081 	return pedantic_non_lvalue (convert (type,
7082 					     invert_truthvalue (arg0)));
7083 
7084       /* Look for expressions of the form A & 2 ? 2 : 0.  The result of this
7085 	 operation is simply A & 2.  */
7086 
7087       if (integer_zerop (TREE_OPERAND (t, 2))
7088 	  && TREE_CODE (arg0) == NE_EXPR
7089 	  && integer_zerop (TREE_OPERAND (arg0, 1))
7090 	  && integer_pow2p (arg1)
7091 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7092 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7093 			      arg1, 1))
7094 	return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7095 
7096       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
7097       if (integer_zerop (TREE_OPERAND (t, 2))
7098 	  && truth_value_p (TREE_CODE (arg0))
7099 	  && truth_value_p (TREE_CODE (arg1)))
7100 	return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7101 						 arg0, arg1)));
7102 
7103       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
7104       if (integer_onep (TREE_OPERAND (t, 2))
7105 	  && truth_value_p (TREE_CODE (arg0))
7106 	  && truth_value_p (TREE_CODE (arg1)))
7107 	{
7108 	  /* Only perform transformation if ARG0 is easily inverted.  */
7109 	  tem = invert_truthvalue (arg0);
7110 	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7111 	    return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7112 						     tem, arg1)));
7113 	}
7114 
7115       return t;
7116 
7117     case COMPOUND_EXPR:
7118       /* When pedantic, a compound expression can be neither an lvalue
7119 	 nor an integer constant expression.  */
7120       if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7121 	return t;
7122       /* Don't let (0, 0) be null pointer constant.  */
7123       if (integer_zerop (arg1))
7124 	return build1 (NOP_EXPR, type, arg1);
7125       return convert (type, arg1);
7126 
7127     case COMPLEX_EXPR:
7128       if (wins)
7129 	return build_complex (type, arg0, arg1);
7130       return t;
7131 
7132     case REALPART_EXPR:
7133       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7134 	return t;
7135       else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7136 	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7137 				 TREE_OPERAND (arg0, 1));
7138       else if (TREE_CODE (arg0) == COMPLEX_CST)
7139 	return TREE_REALPART (arg0);
7140       else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7141 	return fold (build (TREE_CODE (arg0), type,
7142 			    fold (build1 (REALPART_EXPR, type,
7143 					  TREE_OPERAND (arg0, 0))),
7144 			    fold (build1 (REALPART_EXPR,
7145 					  type, TREE_OPERAND (arg0, 1)))));
7146       return t;
7147 
7148     case IMAGPART_EXPR:
7149       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7150 	return convert (type, integer_zero_node);
7151       else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7152 	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7153 				 TREE_OPERAND (arg0, 0));
7154       else if (TREE_CODE (arg0) == COMPLEX_CST)
7155 	return TREE_IMAGPART (arg0);
7156       else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7157 	return fold (build (TREE_CODE (arg0), type,
7158 			    fold (build1 (IMAGPART_EXPR, type,
7159 					  TREE_OPERAND (arg0, 0))),
7160 			    fold (build1 (IMAGPART_EXPR, type,
7161 					  TREE_OPERAND (arg0, 1)))));
7162       return t;
7163 
7164       /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7165          appropriate.  */
7166     case CLEANUP_POINT_EXPR:
7167       if (! has_cleanups (arg0))
7168 	return TREE_OPERAND (t, 0);
7169 
7170       {
7171 	enum tree_code code0 = TREE_CODE (arg0);
7172 	int kind0 = TREE_CODE_CLASS (code0);
7173 	tree arg00 = TREE_OPERAND (arg0, 0);
7174 	tree arg01;
7175 
7176 	if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7177 	  return fold (build1 (code0, type,
7178 			       fold (build1 (CLEANUP_POINT_EXPR,
7179 					     TREE_TYPE (arg00), arg00))));
7180 
7181 	if (kind0 == '<' || kind0 == '2'
7182 	    || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7183 	    || code0 == TRUTH_AND_EXPR   || code0 == TRUTH_OR_EXPR
7184 	    || code0 == TRUTH_XOR_EXPR)
7185 	  {
7186 	    arg01 = TREE_OPERAND (arg0, 1);
7187 
7188 	    if (TREE_CONSTANT (arg00)
7189 		|| ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7190 		    && ! has_cleanups (arg00)))
7191 	      return fold (build (code0, type, arg00,
7192 				  fold (build1 (CLEANUP_POINT_EXPR,
7193 						TREE_TYPE (arg01), arg01))));
7194 
7195 	    if (TREE_CONSTANT (arg01))
7196 	      return fold (build (code0, type,
7197 				  fold (build1 (CLEANUP_POINT_EXPR,
7198 						TREE_TYPE (arg00), arg00)),
7199 				  arg01));
7200 	  }
7201 
7202 	return t;
7203       }
7204 
7205     case CALL_EXPR:
7206       /* Check for a built-in function.  */
7207       if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7208 	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7209 	      == FUNCTION_DECL)
7210 	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7211 	{
7212 	  tree tmp = fold_builtin (expr);
7213 	  if (tmp)
7214 	    return tmp;
7215 	}
7216       return t;
7217 
7218     default:
7219       return t;
7220     } /* switch (code) */
7221 }
7222 
7223 /* Determine if first argument is a multiple of second argument.  Return 0 if
7224    it is not, or we cannot easily determined it to be.
7225 
7226    An example of the sort of thing we care about (at this point; this routine
7227    could surely be made more general, and expanded to do what the *_DIV_EXPR's
7228    fold cases do now) is discovering that
7229 
7230      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7231 
7232    is a multiple of
7233 
7234      SAVE_EXPR (J * 8)
7235 
7236    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7237 
7238    This code also handles discovering that
7239 
7240      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7241 
7242    is a multiple of 8 so we don't have to worry about dealing with a
7243    possible remainder.
7244 
7245    Note that we *look* inside a SAVE_EXPR only to determine how it was
7246    calculated; it is not safe for fold to do much of anything else with the
7247    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7248    at run time.  For example, the latter example above *cannot* be implemented
7249    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7250    evaluation time of the original SAVE_EXPR is not necessarily the same at
7251    the time the new expression is evaluated.  The only optimization of this
7252    sort that would be valid is changing
7253 
7254      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7255 
7256    divided by 8 to
7257 
7258      SAVE_EXPR (I) * SAVE_EXPR (J)
7259 
7260    (where the same SAVE_EXPR (J) is used in the original and the
7261    transformed version).  */
7262 
7263 static int
7264 multiple_of_p (type, top, bottom)
7265      tree type;
7266      tree top;
7267      tree bottom;
7268 {
7269   if (operand_equal_p (top, bottom, 0))
7270     return 1;
7271 
7272   if (TREE_CODE (type) != INTEGER_TYPE)
7273     return 0;
7274 
7275   switch (TREE_CODE (top))
7276     {
7277     case MULT_EXPR:
7278       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7279 	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7280 
7281     case PLUS_EXPR:
7282     case MINUS_EXPR:
7283       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7284 	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7285 
7286     case LSHIFT_EXPR:
7287       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7288 	{
7289 	  tree op1, t1;
7290 
7291 	  op1 = TREE_OPERAND (top, 1);
7292 	  /* const_binop may not detect overflow correctly,
7293 	     so check for it explicitly here.  */
7294 	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7295 	      > TREE_INT_CST_LOW (op1)
7296 	      && TREE_INT_CST_HIGH (op1) == 0
7297 	      && 0 != (t1 = convert (type,
7298 				     const_binop (LSHIFT_EXPR, size_one_node,
7299 						  op1, 0)))
7300 	      && ! TREE_OVERFLOW (t1))
7301 	    return multiple_of_p (type, t1, bottom);
7302 	}
7303       return 0;
7304 
7305     case NOP_EXPR:
7306       /* Can't handle conversions from non-integral or wider integral type.  */
7307       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7308 	  || (TYPE_PRECISION (type)
7309 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7310 	return 0;
7311 
7312       /* .. fall through ...  */
7313 
7314     case SAVE_EXPR:
7315       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7316 
7317     case INTEGER_CST:
7318       if (TREE_CODE (bottom) != INTEGER_CST
7319 	  || (TREE_UNSIGNED (type)
7320 	      && (tree_int_cst_sgn (top) < 0
7321 		  || tree_int_cst_sgn (bottom) < 0)))
7322 	return 0;
7323       return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7324 					 top, bottom, 0));
7325 
7326     default:
7327       return 0;
7328     }
7329 }
7330 
7331 /* Return true if `t' is known to be non-negative.  */
7332 
7333 int
7334 tree_expr_nonnegative_p (t)
7335      tree t;
7336 {
7337   switch (TREE_CODE (t))
7338     {
7339     case ABS_EXPR:
7340     case FFS_EXPR:
7341       return 1;
7342     case INTEGER_CST:
7343       return tree_int_cst_sgn (t) >= 0;
7344     case TRUNC_DIV_EXPR:
7345     case CEIL_DIV_EXPR:
7346     case FLOOR_DIV_EXPR:
7347     case ROUND_DIV_EXPR:
7348       return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7349 	&& tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7350     case TRUNC_MOD_EXPR:
7351     case CEIL_MOD_EXPR:
7352     case FLOOR_MOD_EXPR:
7353     case ROUND_MOD_EXPR:
7354       return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7355     case COND_EXPR:
7356       return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7357 	&& tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7358     case COMPOUND_EXPR:
7359       return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7360     case MIN_EXPR:
7361       return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7362 	&& tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7363     case MAX_EXPR:
7364       return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7365 	|| tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7366     case MODIFY_EXPR:
7367       return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7368     case BIND_EXPR:
7369       return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7370     case SAVE_EXPR:
7371       return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7372     case NON_LVALUE_EXPR:
7373       return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7374     case RTL_EXPR:
7375       return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7376 
7377     default:
7378       if (truth_value_p (TREE_CODE (t)))
7379 	/* Truth values evaluate to 0 or 1, which is nonnegative.  */
7380 	return 1;
7381       else
7382 	/* We don't know sign of `t', so be conservative and return false.  */
7383 	return 0;
7384     }
7385 }
7386 
7387 /* Return true if `r' is known to be non-negative.
7388    Only handles constants at the moment.  */
7389 
7390 int
7391 rtl_expr_nonnegative_p (r)
7392      rtx r;
7393 {
7394   switch (GET_CODE (r))
7395     {
7396     case CONST_INT:
7397       return INTVAL (r) >= 0;
7398 
7399     case CONST_DOUBLE:
7400       if (GET_MODE (r) == VOIDmode)
7401 	return CONST_DOUBLE_HIGH (r) >= 0;
7402       return 0;
7403 
7404     case CONST_VECTOR:
7405       {
7406 	int units, i;
7407 	rtx elt;
7408 
7409 	units = CONST_VECTOR_NUNITS (r);
7410 
7411 	for (i = 0; i < units; ++i)
7412 	  {
7413 	    elt = CONST_VECTOR_ELT (r, i);
7414 	    if (!rtl_expr_nonnegative_p (elt))
7415 	      return 0;
7416 	  }
7417 
7418 	return 1;
7419       }
7420 
7421     case SYMBOL_REF:
7422     case LABEL_REF:
7423       /* These are always nonnegative.  */
7424       return 1;
7425 
7426     default:
7427       return 0;
7428     }
7429 }
7430 
7431 #include "gt-fold-const.h"
7432